diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index dbebfe8f6e..3f8fe65065 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -33,6 +33,17 @@
 # Change characteristics
 - Is this a breaking change (a change in existing functionality)? YES/NO
 - Does this change require a documentation update? YES/NO
+- Does this change require an update to any of the following submodules? YES/NO (If YES, please add a link to any PRs that are pending.)
+  - [ ] EMC verif-global
+  - [ ] GDAS
+  - [ ] GFS-utils
+  - [ ] GSI
+  - [ ] GSI-monitor
+  - [ ] GSI-utils
+  - [ ] UFS-utils
+  - [ ] UFS-weather-model
+  - [ ] wxflow
+
 
 # How has this been tested?
 <!-- Please list any test you conducted, including the machine.
diff --git a/.github/workflows/ci_unit_tests.yaml b/.github/workflows/ci_unit_tests.yaml
new file mode 100644
index 0000000000..6dbc7ee52c
--- /dev/null
+++ b/.github/workflows/ci_unit_tests.yaml
@@ -0,0 +1,64 @@
+name: CI Unit Tests
+on: [pull_request, push, workflow_dispatch]
+
+jobs:
+
+  ci_pytest:
+      runs-on: ubuntu-latest
+      name: Run unit tests on CI system      
+      permissions:
+        checks: write
+
+      steps:
+      - name: Checkout code
+        uses: actions/checkout@v4
+
+      - name: Set up Python
+        uses: actions/setup-python@v5
+        with:
+          python-version: 3.11.8
+
+      - name: Install dependencies
+        run: |
+          sudo apt-get update
+          sudo apt-get install -y perl libxml-libxml-perl libxml-libxslt-perl libdatetime-perl
+          python -m pip install --upgrade pip
+          pip install pytest
+          pip install wxflow
+          pip install wget
+
+      - name: Cache Rocoto Install
+        uses: actions/cache@v4
+        with:
+          path: ~/rocoto
+          key: ${{ runner.os }}-rocoto-${{ hashFiles('**/ci-unit_tests.yaml') }}
+
+      - name: Install Rocoto
+        run: |
+          if [ ! -d "$HOME/rocoto/bin" ]; then
+            git clone https://github.com/christopherwharrop/rocoto.git $HOME/rocoto
+            cd $HOME/rocoto
+            ./INSTALL
+          fi
+          echo "$HOME/rocoto/bin" >> $GITHUB_PATH
+
+      - name: Run tests
+        shell: bash
+        run: |
+          sudo mkdir -p /scratch1/NCEPDEV
+          cd $GITHUB_WORKSPACE/sorc
+          git submodule update --init
+          ./link_workflow.sh
+          cd $GITHUB_WORKSPACE/ci/scripts/tests
+          ln -s ../wxflow
+
+          pytest -v --junitxml $GITHUB_WORKSPACE/ci/scripts/tests/test-results.xml
+
+
+      - name: Publish Test Results
+        if: always()
+        uses: EnricoMi/publish-unit-test-result-action@v2
+        with:
+          files: ci/scripts/tests/test-results.xml
+          job_summary: true
+          comment_mode: off
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 3113c31149..b04c85688b 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -57,7 +57,7 @@ jobs:
         if-no-files-found: ignore
 
     - name: Comment ReadDocs Link in PR
-      if: github.event_name == 'pull_request'
+      if: ${{ github.event_name == 'pull_request' }}
       uses: actions/github-script@v6
       with:
         script: |
diff --git a/.gitignore b/.gitignore
index 047313a32f..861346a494 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,32 +30,44 @@ fix/chem
 fix/cice
 fix/cpl
 fix/datm
-fix/gldas
 fix/gdas
 fix/gsi
 fix/lut
 fix/mom6
 fix/orog
-fix/reg2grb2
+fix/orog_nest
 fix/sfc_climo
 fix/ugwd
+fix/ugwd_nest
 fix/verif
 fix/wave
 
 # Ignore parm file symlinks
 #--------------------------
-parm/config/config.base
-parm/gldas
+parm/gdas/aero
+parm/gdas/atm
+parm/gdas/io
+parm/gdas/ioda
+parm/gdas/snow
+parm/gdas/soca
+parm/gdas/jcb-gdas
+parm/gdas/jcb-algorithms
 parm/monitor
 parm/post/AEROSOL_LUTS.dat
 parm/post/nam_micro_lookup.dat
 parm/post/optics_luts_DUST.dat
 parm/post/gtg.config.gfs
 parm/post/gtg_imprintings.txt
+parm/post/optics_luts_DUST_nasa.dat
+parm/post/optics_luts_NITR_nasa.dat
 parm/post/optics_luts_SALT.dat
+parm/post/optics_luts_SALT_nasa.dat
 parm/post/optics_luts_SOOT.dat
+parm/post/optics_luts_SOOT_nasa.dat
 parm/post/optics_luts_SUSO.dat
+parm/post/optics_luts_SUSO_nasa.dat
 parm/post/optics_luts_WASO.dat
+parm/post/optics_luts_WASO_nasa.dat
 parm/post/params_grib2_tbl_new
 parm/post/post_tag_gfs128
 parm/post/post_tag_gfs65
@@ -77,6 +89,9 @@ parm/post/postcntrl_gfs_wafs.xml
 parm/post/postcntrl_gfs_wafs_anl.xml
 parm/post/postxconfig-NT-GEFS-ANL.txt
 parm/post/postxconfig-NT-GEFS-F00.txt
+parm/post/postxconfig-NT-GEFS-F00-aerosol.txt
+parm/post/postxconfig-NT-GEFS-WAFS.txt
+parm/post/postxconfig-NT-GEFS-aerosol.txt
 parm/post/postxconfig-NT-GEFS.txt
 parm/post/postxconfig-NT-GFS-ANL.txt
 parm/post/postxconfig-NT-GFS-F00-TWO.txt
@@ -90,7 +105,16 @@ parm/post/postxconfig-NT-GFS-WAFS.txt
 parm/post/postxconfig-NT-GFS.txt
 parm/post/postxconfig-NT-gefs-aerosol.txt
 parm/post/postxconfig-NT-gefs-chem.txt
+parm/post/ocean.csv
+parm/post/ice.csv
+parm/post/ocnicepost.nml.jinja2
 parm/ufs/noahmptable.tbl
+parm/ufs/model_configure.IN
+parm/ufs/model_configure_nest.IN
+parm/ufs/MOM_input_*.IN
+parm/ufs/MOM6_data_table.IN
+parm/ufs/ice_in.IN
+parm/ufs/ufs.configure.*.IN
 parm/wafs
 
 # Ignore sorc and logs folders from externals
@@ -123,7 +147,6 @@ sorc/radmon_bcor.fd
 sorc/radmon_time.fd
 sorc/rdbfmsua.fd
 sorc/recentersigp.fd
-sorc/reg2grb2.fd
 sorc/supvit.fd
 sorc/syndat_getjtbul.fd
 sorc/syndat_maksynrc.fd
@@ -133,45 +156,43 @@ sorc/tocsbufr.fd
 sorc/upp.fd
 sorc/vint.fd
 sorc/webtitle.fd
+sorc/ocnicepost.fd
 
 # Ignore scripts from externals
 #------------------------------
 # jobs symlinks
-jobs/JGFS_ATMOS_WAFS
-jobs/JGFS_ATMOS_WAFS_BLENDING
-jobs/JGFS_ATMOS_WAFS_BLENDING_0P25
-jobs/JGFS_ATMOS_WAFS_GCIP
-jobs/JGFS_ATMOS_WAFS_GRIB2
-jobs/JGFS_ATMOS_WAFS_GRIB2_0P25
 # scripts symlinks
 scripts/exemcsfc_global_sfc_prep.sh
-scripts/exgfs_atmos_wafs_blending.sh
-scripts/exgfs_atmos_wafs_blending_0p25.sh
-scripts/exgfs_atmos_wafs_gcip.sh
-scripts/exgfs_atmos_wafs_grib.sh
-scripts/exgfs_atmos_wafs_grib2.sh
-scripts/exgfs_atmos_wafs_grib2_0p25.sh
+scripts/exgdas_global_marine_analysis_ecen.py
+scripts/exglobal_prep_ocean_obs.py
 # ush symlinks
+ush/bufr2ioda_insitu_profile_argo.py
+ush/bufr2ioda_insitu_profile_bathy.py
+ush/bufr2ioda_insitu_profile_glider.py
+ush/bufr2ioda_insitu_profile_marinemammal.py
+ush/bufr2ioda_insitu_profile_tesac.py
+ush/bufr2ioda_insitu_profile_xbtctd.py
+ush/bufr2ioda_insitu_surface_altkob.py
+ush/bufr2ioda_insitu_surface_trkob.py
 ush/chgres_cube.sh
 ush/emcsfc_ice_blend.sh
 ush/emcsfc_snow.sh
-ush/exglobal_prep_ocean_obs.py
 ush/fix_precip.sh
 ush/fv3gfs_driver_grid.sh
 ush/fv3gfs_filter_topo.sh
 ush/fv3gfs_make_grid.sh
 ush/fv3gfs_make_orog.sh
+ush/gen_bufr2ioda_json.py
+ush/gen_bufr2ioda_yaml.py
+ush/bufr2ioda_insitu_profile*.py
+ush/bufr2ioda_insitu_surface*.py
 ush/global_chgres.sh
 ush/global_chgres_driver.sh
 ush/global_cycle.sh
 ush/global_cycle_driver.sh
 ush/jediinc2fv3.py
-ush/mkwfsgbl.sh
 ush/ufsda
-ush/wafs_blending.sh
-ush/wafs_grib2.regrid.sh
-ush/wafs_intdsk.sh
-ush/finddate.sh
+ush/soca
 ush/make_NTC_file.pl
 ush/make_ntc_bull.pl
 ush/make_tif.sh
@@ -188,3 +209,8 @@ versions/run.ver
 ush/python/wxflow
 workflow/wxflow
 ci/scripts/wxflow
+
+# jcb checkout and symlinks
+ush/python/jcb
+workflow/jcb
+ci/scripts/jcb
diff --git a/.gitmodules b/.gitmodules
index c9dff9f5e2..5c9e569243 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,28 +1,28 @@
 [submodule "sorc/ufs_model.fd"]
-	path = sorc/ufs_model.fd
-	url = https://github.com/NOAA-GSL/ufs-weather-model
-	ignore = dirty
+  path = sorc/ufs_model.fd
+  url = https://github.com/ufs-community/ufs-weather-model
+  ignore = dirty
 [submodule "sorc/wxflow"]
-	path = sorc/wxflow
-	url = https://github.com/NOAA-EMC/wxflow
+  path = sorc/wxflow
+  url = https://github.com/NOAA-EMC/wxflow
 [submodule "sorc/gfs_utils.fd"]
-	path = sorc/gfs_utils.fd
-	url = https://github.com/kayeekayee/gfs-utils.git
+  path = sorc/gfs_utils.fd
+  url = https://github.com/NOAA-EMC/gfs-utils
 [submodule "sorc/ufs_utils.fd"]
-	path = sorc/ufs_utils.fd
-	url = https://github.com/NOAA-GSL/UFS_UTILS.git
+  path = sorc/ufs_utils.fd
+  url = https://github.com/ufs-community/UFS_UTILS.git
 [submodule "sorc/verif-global.fd"]
-	path = sorc/verif-global.fd
-	url = https://github.com/NOAA-EMC/EMC_verif-global.git
+  path = sorc/verif-global.fd
+  url = https://github.com/NOAA-EMC/EMC_verif-global.git
 [submodule "sorc/gsi_enkf.fd"]
-	path = sorc/gsi_enkf.fd
-	url = https://github.com/NOAA-EMC/GSI.git
+  path = sorc/gsi_enkf.fd
+  url = https://github.com/NOAA-EMC/GSI.git
 [submodule "sorc/gdas.cd"]
-	path = sorc/gdas.cd
-	url = https://github.com/NOAA-EMC/GDASApp.git
+  path = sorc/gdas.cd
+  url = https://github.com/NOAA-EMC/GDASApp.git
 [submodule "sorc/gsi_utils.fd"]
-	path = sorc/gsi_utils.fd
-	url = https://github.com/NOAA-EMC/GSI-Utils.git
+  path = sorc/gsi_utils.fd
+  url = https://github.com/NOAA-EMC/GSI-Utils.git
 [submodule "sorc/gsi_monitor.fd"]
-	path = sorc/gsi_monitor.fd
-	url = https://github.com/NOAA-EMC/GSI-Monitor.git
+  path = sorc/gsi_monitor.fd
+  url = https://github.com/NOAA-EMC/GSI-Monitor.git
diff --git a/.shellcheckrc b/.shellcheckrc
index 6d540ba17f..67fabfe157 100644
--- a/.shellcheckrc
+++ b/.shellcheckrc
@@ -14,3 +14,6 @@ disable=SC1091
 
 # Disable -p -m only applies to deepest directory
 disable=SC2174
+
+# Disable warning of functions in test statements
+disable=SC2310
diff --git a/INFO b/INFO
index 61373fcc33..5a48c7a20c 100644
--- a/INFO
+++ b/INFO
@@ -1,3 +1,14 @@
+09-17-24
+========
+Update develop branch, gsl_ufs_dev
+ - based on gsl_ufs_dev from KaYee's fork
+   global-workflow: 07Aug24,37c53ac [develop_07Aug2024_37c53ac]
+   UFS: 19Jul24, c127601
+   FV3: 19Jul24, 2527c11
+   UPP: 23Apr24, be0410e
+   CCPP-PHYSICS: 19Jul24, 46df080
+   UFS_UTILS: 26Jun24, 3ef2e6b
+
 07-31-24
 ========
   Added alpha_fd coefficient for Beljaars TOFD in GSL drag
diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile
new file mode 100644
index 0000000000..ae86e33c66
--- /dev/null
+++ b/ci/Jenkinsfile
@@ -0,0 +1,310 @@
+def Machine = 'none'
+def machine = 'none'
+def CUSTOM_WORKSPACE = 'none'
+def caseList = ''
+// Location of the custom workspaces for each machine in the CI system.  They are persitent for each iteration of the PR.
+def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea']
+def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI']
+def repo_url = 'git@github.com:NOAA-EMC/global-workflow.git'
+def STATUS = 'Passed'
+
+pipeline {
+
+    agent { label 'built-in' }
+
+    options {
+        skipDefaultCheckout()
+        parallelsAlwaysFailFast()
+    }
+
+    stages { // This initial stage is used to get the Machine name from the GitHub labels on the PR
+             // which is used to designate the Nodes in the Jenkins Controler by the agent label
+             // Each Jenknis Node is connected to said machine via an JAVA agent via an ssh tunnel
+             // no op 2
+
+        stage('1. Get Machine') {
+            agent { label 'built-in' }
+            steps {
+                script {
+
+                    def causes = currentBuild.rawBuild.getCauses()
+                    def isSpawnedFromAnotherJob = causes.any { cause ->
+                        cause instanceof hudson.model.Cause.UpstreamCause
+                    }
+
+                    def run_nodes = []
+                    if (isSpawnedFromAnotherJob) {
+                        echo "machine being set to value passed to this spawned job"
+                        echo "passed machine: ${params.machine}"
+                        machine = params.machine
+                    } else {
+                        echo "This is parent job so getting list of nodes matching labels:"
+                        for (label in pullRequest.labels) {
+                            if (label.matches("CI-(.*?)-Ready")) {
+                                def machine_name = label.split('-')[1].toString().toLowerCase()
+                                jenkins.model.Jenkins.get().computers.each { c ->
+                                    if (c.node.selfLabel.name == NodeName[machine_name]) {
+                                        run_nodes.add(c.node.selfLabel.name)
+                                    }
+                                }
+                            }
+                        }
+                        // Spawning all the jobs on the nodes matching the labels
+                        if (run_nodes.size() > 1) {
+                            run_nodes.init().each { node ->
+                                def machine_name = node.split('-')[0].toLowerCase()
+                                echo "Spawning job on node: ${node} with machine name: ${machine_name}"
+                                build job: "/global-workflow/EMC-Global-Pipeline/PR-${env.CHANGE_ID}", parameters: [
+                                    string(name: 'machine', value: machine_name),
+                                    string(name: 'Node', value: node) ],
+                                    wait: false
+                            }
+                            machine = run_nodes.last().split('-')[0].toLowerCase()
+                            echo "Running parent job: ${machine}"
+                        } else {
+                            machine = run_nodes[0].split('-')[0].toLowerCase()
+                            echo "Running only the parent job: ${machine}"
+                        }
+                    }
+                }
+            }
+        }
+
+        stage('2. Get Common Workspace') {
+            agent { label NodeName[machine].toLowerCase() }
+            steps {
+                script {
+                    Machine = machine[0].toUpperCase() + machine.substring(1)
+                    echo "Getting Common Workspace for ${Machine}"
+                    ws("${custom_workspace[machine]}/${env.CHANGE_ID}") {
+                        properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
+                        CUSTOM_WORKSPACE = "${WORKSPACE}"
+                        sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*")
+                        sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """)
+                    }
+                    echo "Building and running on ${Machine} in directory ${CUSTOM_WORKSPACE}"
+                }
+            }
+        }
+
+        stage('3. Build System') {
+            matrix {
+                agent { label NodeName[machine].toLowerCase() }
+                axes {
+                    axis {
+                        name 'system'
+                        values 'gfs', 'gefs'
+                    }
+                }
+                stages {
+                    stage('build system') {
+                        steps {
+                           catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
+                            script {
+                                def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to build the system on per system basis under the custome workspace for each buile system
+                                sh(script: "mkdir -p ${HOMEgfs}")
+                                ws(HOMEgfs) {
+                                    if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs
+                                        sh(script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore
+                                        checkout scm
+                                        dir('sorc') {
+                                            sh(script: './link_workflow.sh')
+                                        }
+                                    } else {
+                                        try {
+                                            echo "Checking out the code for ${system} on ${Machine} using scm in ${HOMEgfs}"
+                                            checkout scm
+                                        } catch (Exception e) {
+                                            if (env.CHANGE_ID) {
+                                                sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine}: ${e.getMessage()}" """)
+                                            }
+                                            STATUS = 'Failed'
+                                            error("Failed to checkout: ${e.getMessage()}")
+                                        }
+                                        def gist_url = ""
+                                        def error_logs = ""
+                                        def error_logs_message = ""
+                                        def builds_file = readYaml file: 'ci/cases/yamls/build.yaml'
+                                        def build_args_list = builds_file['builds']
+                                        def build_args = build_args_list[system].join(' ').trim().replaceAll('null', '')
+                                        dir("${HOMEgfs}/sorc") {
+                                            try {
+                                                sh(script: "${build_args}")
+                                            } catch (Exception error_build) {
+                                                echo "Failed to build system: ${error_build.getMessage()}"
+                                                if ( fileExists("logs/error.logs") ) {
+                                                    def fileContent = readFile 'logs/error.logs'
+                                                    def lines = fileContent.readLines()
+                                                    for (line in lines) {
+                                                        echo "archiving: ${line}"
+                                                        if (fileExists("${line}") && readFile("${line}").length() > 0 ){
+                                                            try {
+                                                                archiveArtifacts artifacts: "${line}", fingerprint: true
+                                                                error_logs = error_logs + "${HOMEgfs}/sorc/${line} "
+                                                                error_logs_message = error_logs_message + "${HOMEgfs}/sorc/${line}\n"
+                                                            }
+                                                            catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" }
+                                                        }
+                                                    }
+                                                    try {
+                                                        sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}")
+                                                        gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim()
+                                                        sh(script:  """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
+                                                    } catch (Exception error_comment) {
+                                                        echo "Failed to comment on PR: ${error_comment.getMessage()}"
+                                                    }
+                                                    STATUS = 'Failed'
+                                                    error("Failed to build system on ${Machine}")
+                                                }
+                                            }
+                                            sh(script: './link_workflow.sh')
+                                            //sh(script: "echo ${HOMEgfs} > BUILT_semaphor")
+                                        }
+                                    }
+                                    if (env.CHANGE_ID && system == 'gfs') {
+                                        try {
+                                            sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Running" --remove-label "CI-${Machine}-Building" """)
+                                        } catch (Exception e) {
+                                            echo "Failed to update label from Buildng to Running: ${e.getMessage()}"
+                                        }
+                                    }
+                                    if (system == 'gfs') {
+                                        caseList = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
+                                    }
+                               }
+                           }
+                          }
+                        }
+                    }
+                }
+            }
+        }
+
+        stage('4. Run Tests') {
+            when {
+                expression { STATUS != 'Failed' }
+            }
+            matrix {
+                agent { label NodeName[machine].toLowerCase() }
+                axes {
+                    axis {
+                        name 'Case'
+                        // TODO add dynamic list of cases from env vars (needs addtional plugins)
+                        values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA'
+                    }
+                }
+                stages {
+
+                    stage('Create Experiments') {
+                        when {
+                            expression { return caseList.contains(Case) }
+                        }
+                        steps {
+                            catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
+                                script {
+                                    sh(script: "sed -n '/{.*}/!p' ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml > ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp")
+                                    def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp"
+                                    system = yaml_case.experiment.system
+                                    def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}"   // local HOMEgfs is used to populate the XML on per system basis
+                                    env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
+                                    try {
+                                        error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml", returnStdout: true).trim()
+                                    } catch (Exception error_create) {
+                                        sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experment on ${Machine}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """)
+                                        error("Case ${Case} failed to create experment directory")
+                                    }
+                                }
+                            }
+                        }
+                    }
+
+                    stage('Run Experiments') {
+                        when {
+                            expression { return caseList.contains(Case) }
+                        }
+                        steps {
+                            script {
+                                HOMEgfs = "${CUSTOM_WORKSPACE}/gfs"  // common HOMEgfs is used to launch the scripts that run the experiments
+                                def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${Case}", returnStdout: true).trim()
+                                def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
+                                sh(script: " rm -f ${error_file}")
+                                try {
+                                    sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} ${system}")
+                                } catch (Exception error_experment) {
+                                    sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
+                                    ws(CUSTOM_WORKSPACE) {
+                                        def error_logs = ""
+                                        def error_logs_message = ""
+                                        if (fileExists(error_file)) {
+                                            def fileContent = readFile error_file
+                                            def lines = fileContent.readLines()
+                                            for (line in lines) {
+                                                echo "archiving: ${line}"
+                                                if (fileExists("${CUSTOM_WORKSPACE}/${line}") && readFile("${CUSTOM_WORKSPACE}/${line}").length() > 0) {
+                                                    try {
+                                                        archiveArtifacts artifacts: "${line}", fingerprint: true
+                                                        error_logs = error_logs + "${CUSTOM_WORKSPACE}/${line} "
+                                                        error_logs_message = error_logs_message + "${CUSTOM_WORKSPACE}/${line}\n"
+                                                    } catch (Exception error_arch) {
+                                                        echo "Failed to archive error log ${line}: ${error_arch.getMessage()}"
+                                                    }
+                                                }
+                                            }
+                                            try {
+                                                gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim()
+                                                sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
+                                                sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}")
+                                            } catch (Exception error_comment) {
+                                                echo "Failed to comment on PR: ${error_comment.getMessage()}"
+                                            }
+                                        } else {
+                                            echo "No error logs found for failed cases in $CUSTOM_WORKSPACE/RUNTESTS/${pslot}_error.logs"
+                                        }
+                                        STATUS = 'Failed'
+                                        try {
+                                            sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
+                                            sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} in\n\\`${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}\\`" """)
+                                        } catch (Exception e) {
+                                            echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}"
+                                        }
+                                        echo "Failed to run experiments ${Case} on ${Machine}"
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        stage( '5. FINALIZE' ) {
+            agent { label NodeName[machine].toLowerCase() }
+            steps {
+                script {
+                    sh(script: """
+                        labels=\$(${GH} pr view ${env.CHANGE_ID} --repo ${repo_url} --json labels --jq '.labels[].name')
+                        for label in \$labels; do
+                            if [[ "\$label" == *"${Machine}"* ]]; then
+                                ${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "\$label"
+                            fi
+                        done
+                    """, returnStatus: true)
+                    sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
+                    if (fileExists("${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log")) {
+                        sh(script: """echo "**CI ${STATUS}** ${Machine} at <br>Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true)
+                        sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body-file ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log  """, returnStatus: true)
+                    }
+                    if (STATUS == 'Passed') {
+                        try {
+                            sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/*")
+                        } catch (Exception e) {
+                            echo "Failed to remove custom work directory ${CUSTOM_WORKSPACE} on ${Machine}: ${e.getMessage()}"
+                        }
+                    } else {
+                        echo "Failed to build and run Global-workflow in ${CUSTOM_WORKSPACE} and on ${Machine}"
+                    }
+                }
+            }
+        }   
+    }
+}
diff --git a/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml
new file mode 100644
index 0000000000..d97c9567e9
--- /dev/null
+++ b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml
@@ -0,0 +1,18 @@
+experiment:
+  system: gfs
+  mode: cycled
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: S2S
+  resdetatmos: 384
+  resdetocean: 0.25
+  nens: 0
+  gfs_cyc: 4
+  start: cold
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  idate: 2021063000
+  edate: 2021070306
+  icsdir: /scratch1/NCEPDEV/climate/Jessica.Meixner/cycling/IC_2021063000_V2
+  yaml: {{ HOMEgfs }}/ci/cases/gfsv17/ocnanal.yaml
diff --git a/ci/cases/gfsv17/ocnanal.yaml b/ci/cases/gfsv17/ocnanal.yaml
new file mode 100644
index 0000000000..a2d7363c18
--- /dev/null
+++ b/ci/cases/gfsv17/ocnanal.yaml
@@ -0,0 +1,27 @@
+defaults:
+  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+
+base:
+  DOIAU: "YES"
+  DO_JEDIATMVAR: "NO"
+  DO_JEDIATMENS: "NO"
+  DO_JEDIOCNVAR: "YES"
+  DO_JEDISNOWDA: "NO"
+  DO_MERGENSST: "NO"
+  DOHYBVAR: "NO"
+  DO_FIT2OBS: "YES"
+  DO_VERFOZN: "YES"
+  DO_VERFRAD: "YES"
+  DO_VRFY_OCEANDA: "NO"
+  FHMAX_GFS: 240
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
+
+ocnanal:
+  SOCA_INPUT_FIX_DIR: {{ FIXgfs }}/gdas/soca/1440x1080x75/soca
+  SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml
+  SOCA_NINNER: 100
+
+prepoceanobs:
+  SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml
+  OBSPREP_YAML: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obsprep/obsprep_config.yaml
+  DMPDIR: /scratch1/NCEPDEV/da/common/
diff --git a/ci/cases/hires/C1152_S2SW.yaml b/ci/cases/hires/C1152_S2SW.yaml
new file mode 100644
index 0000000000..29e1f00bbe
--- /dev/null
+++ b/ci/cases/hires/C1152_S2SW.yaml
@@ -0,0 +1,14 @@
+experiment:
+  system: gfs
+  mode: forecast-only
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: S2SW
+  resdetatmos: 1152
+  resdetocean: 0.25
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  idate: 2019120300
+  edate: 2019120300
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/hires/C768_S2SW.yaml b/ci/cases/hires/C768_S2SW.yaml
new file mode 100644
index 0000000000..ad314bb75b
--- /dev/null
+++ b/ci/cases/hires/C768_S2SW.yaml
@@ -0,0 +1,14 @@
+experiment:
+  system: gfs
+  mode: forecast-only
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: S2SW
+  resdetatmos: 768
+  resdetocean: 0.25
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  idate: 2019120300
+  edate: 2019120300
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/pr/C48_ATM.yaml b/ci/cases/pr/C48_ATM.yaml
index 39412e8aeb..79706556e6 100644
--- a/ci/cases/pr/C48_ATM.yaml
+++ b/ci/cases/pr/C48_ATM.yaml
@@ -10,4 +10,4 @@ arguments:
   expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
   idate: 2021032312
   edate: 2021032312
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/pr/C48_S2SW.yaml b/ci/cases/pr/C48_S2SW.yaml
index 2aba42f562..6367564514 100644
--- a/ci/cases/pr/C48_S2SW.yaml
+++ b/ci/cases/pr/C48_S2SW.yaml
@@ -11,4 +11,4 @@ arguments:
   expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
   idate: 2021032312
   edate: 2021032312
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/pr/C48_S2SWA_gefs.yaml b/ci/cases/pr/C48_S2SWA_gefs.yaml
index d68360bf44..a924b416c3 100644
--- a/ci/cases/pr/C48_S2SWA_gefs.yaml
+++ b/ci/cases/pr/C48_S2SWA_gefs.yaml
@@ -15,4 +15,7 @@ arguments:
   expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
   idate: 2021032312
   edate: 2021032312
-  yaml: {{ HOMEgfs }}/ci/platforms/gefs_ci_defaults.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gefs_ci_defaults.yaml
+
+skip_ci_on_hosts:
+  - wcoss2
diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
new file mode 100644
index 0000000000..fd056cf895
--- /dev/null
+++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
@@ -0,0 +1,23 @@
+experiment:
+  system: gfs
+  mode: cycled
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: S2S
+  resdetatmos: 48
+  resdetocean: 5.0
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500
+  idate: 2021032412
+  edate: 2021032418
+  nens: 0
+  gfs_cyc: 0
+  start: warm
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_gfs_defaults_ci.yaml
+
+skip_ci_on_hosts:
+  - wcoss2
+  - orion
+  - hercules
diff --git a/ci/cases/pr/C96C48_hybatmDA.yaml b/ci/cases/pr/C96C48_hybatmDA.yaml
index be35283cff..d08374d4e0 100644
--- a/ci/cases/pr/C96C48_hybatmDA.yaml
+++ b/ci/cases/pr/C96C48_hybatmDA.yaml
@@ -16,4 +16,4 @@ arguments:
   nens: 2
   gfs_cyc: 1
   start: cold
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml
new file mode 100644
index 0000000000..d1556dc1d0
--- /dev/null
+++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml
@@ -0,0 +1,24 @@
+experiment:
+  system: gfs
+  mode: cycled
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: ATM
+  resdetatmos: 96
+  resensatmos: 48
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48
+  idate: 2024022318
+  edate: 2024022400
+  nens: 2
+  gfs_cyc: 1
+  start: warm
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
+
+skip_ci_on_hosts:
+  - hera
+  - orion
+  - hercules
+  
diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml
index dee1525d80..8a89ff25ec 100644
--- a/ci/cases/pr/C96_atm3DVar.yaml
+++ b/ci/cases/pr/C96_atm3DVar.yaml
@@ -14,4 +14,7 @@ arguments:
   nens: 0
   gfs_cyc: 1
   start: cold
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
+
+skip_ci_on_hosts:
+  - wcoss2
diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml
new file mode 100644
index 0000000000..994d3ef3a0
--- /dev/null
+++ b/ci/cases/pr/C96_atm3DVar_extended.yaml
@@ -0,0 +1,22 @@
+experiment:
+  system: gfs
+  mode: cycled
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: ATM
+  resdetatmos: 96
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48
+  idate: 2021122018
+  edate: 2021122118
+  nens: 0
+  gfs_cyc: 4
+  start: cold
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_extended_ci.yaml
+
+skip_ci_on_hosts:
+  - hera
+  - orion
+  - hercules
diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml
new file mode 100644
index 0000000000..7e22955a37
--- /dev/null
+++ b/ci/cases/pr/C96_atmaerosnowDA.yaml
@@ -0,0 +1,21 @@
+experiment:
+  system: gfs
+  mode: cycled
+
+arguments:
+  pslot: {{ 'pslot' | getenv }}
+  app: ATMA
+  resdetatmos: 96
+  comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
+  expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
+  icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48
+  idate: 2021122012
+  edate: 2021122100
+  nens: 0
+  gfs_cyc: 1
+  start: cold
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml
+
+skip_ci_on_hosts:
+  - orion
+  - hercules
diff --git a/ci/cases/weekly/C384C192_hybatmda.yaml b/ci/cases/weekly/C384C192_hybatmda.yaml
index a4eae7d9a1..131ada95d5 100644
--- a/ci/cases/weekly/C384C192_hybatmda.yaml
+++ b/ci/cases/weekly/C384C192_hybatmda.yaml
@@ -16,4 +16,4 @@ arguments:
   nens: 2
   gfs_cyc: 1
   start: cold
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/weekly/C384_S2SWA.yaml b/ci/cases/weekly/C384_S2SWA.yaml
index 8e2c043a4c..7bbdc44671 100644
--- a/ci/cases/weekly/C384_S2SWA.yaml
+++ b/ci/cases/weekly/C384_S2SWA.yaml
@@ -6,9 +6,9 @@ arguments:
   pslot: {{ 'pslot' | getenv }}
   app: S2SWA
   resdetatmos: 384
-  resdetocean: 0.5
+  resdetocean: 0.25
   comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
   expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
   idate: 2016070100
   edate: 2016070100
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/weekly/C384_atm3DVar.yaml b/ci/cases/weekly/C384_atm3DVar.yaml
index 479d731b25..40487f3b47 100644
--- a/ci/cases/weekly/C384_atm3DVar.yaml
+++ b/ci/cases/weekly/C384_atm3DVar.yaml
@@ -16,4 +16,4 @@ arguments:
   nens: 0
   gfs_cyc: 1
   start: cold
-  yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml
+  yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml
diff --git a/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml
new file mode 100644
index 0000000000..6d978e25ef
--- /dev/null
+++ b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml
@@ -0,0 +1,5 @@
+defaults:
+  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+base:
+  DO_JEDISNOWDA: "YES"
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
diff --git a/ci/cases/yamls/build.yaml b/ci/cases/yamls/build.yaml
new file mode 100644
index 0000000000..2b2938719f
--- /dev/null
+++ b/ci/cases/yamls/build.yaml
@@ -0,0 +1,3 @@
+builds:
+ - gefs: './build_all.sh -kw'
+ - gfs: './build_all.sh -kgu'
diff --git a/ci/platforms/gefs_ci_defaults.yaml b/ci/cases/yamls/gefs_ci_defaults.yaml
similarity index 63%
rename from ci/platforms/gefs_ci_defaults.yaml
rename to ci/cases/yamls/gefs_ci_defaults.yaml
index dfb1626cdd..05a97edd90 100644
--- a/ci/platforms/gefs_ci_defaults.yaml
+++ b/ci/cases/yamls/gefs_ci_defaults.yaml
@@ -1,4 +1,4 @@
 defaults:
   !INC {{ HOMEgfs }}/parm/config/gefs/yaml/defaults.yaml
 base:
-  ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }}
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
diff --git a/ci/platforms/gfs_defaults_ci.yaml b/ci/cases/yamls/gfs_defaults_ci.yaml
similarity index 63%
rename from ci/platforms/gfs_defaults_ci.yaml
rename to ci/cases/yamls/gfs_defaults_ci.yaml
index b66be2a366..d09f78b8b8 100644
--- a/ci/platforms/gfs_defaults_ci.yaml
+++ b/ci/cases/yamls/gfs_defaults_ci.yaml
@@ -1,4 +1,4 @@
 defaults:
   !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
 base:
-  ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }}
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml
new file mode 100644
index 0000000000..42ee612f3a
--- /dev/null
+++ b/ci/cases/yamls/gfs_extended_ci.yaml
@@ -0,0 +1,13 @@
+defaults:
+  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+
+base:
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
+  DO_GOES: "YES"
+  DO_BUFRSND: "YES"
+  DO_GEMPAK: "YES"
+  DO_AWIPS: "NO"
+  DO_NPOESS: "YES"
+  DO_GENESIS_FSU: "NO"
+  FHMAX_GFS: 384
+  FHMAX_HF_GFS: 120
diff --git a/ci/cases/yamls/soca_gfs_defaults_ci.yaml b/ci/cases/yamls/soca_gfs_defaults_ci.yaml
new file mode 100644
index 0000000000..3d75cc911a
--- /dev/null
+++ b/ci/cases/yamls/soca_gfs_defaults_ci.yaml
@@ -0,0 +1,5 @@
+defaults:
+  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+base:
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
+  DO_JEDIOCNVAR: "YES"
diff --git a/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
new file mode 100644
index 0000000000..c4fa54dcc8
--- /dev/null
+++ b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml
@@ -0,0 +1,20 @@
+defaults:
+  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+base:
+  DOIAU: "NO"
+  DO_JEDIATMVAR: "YES"
+  DO_JEDIATMENS: "YES"
+  ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
+atmanl:
+  LAYOUT_X_ATMANL: 4
+  LAYOUT_Y_ATMANL: 4
+atmensanl:
+  LAYOUT_X_ATMENSANL: 4
+  LAYOUT_Y_ATMENSANL: 4
+esfc:
+  DONST: "NO"
+nsst:
+  NST_MODEL: "1"
+sfcanl:
+  DONST: "NO"
+  
diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera
index 76a6a08670..6d3e43c820 100644
--- a/ci/platforms/config.hera
+++ b/ci/platforms/config.hera
@@ -2,7 +2,9 @@
 
 export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT
 export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR
-export STMP="/scratch1/NCEPDEV/stmp2/${USER}"
-export SLURM_ACCOUNT=nems
+export HPC_ACCOUNT=nems
 export max_concurrent_cases=5
 export max_concurrent_pr=4
+
+export JENKINS_AGENT_LANUCH_DIR=/scratch1/NCEPDEV/global/Terry.McGuinness/Jenkins
+export JENKINS_WORK_DIR=/scratch1/NCEPDEV/global/Terry.McGuinness
diff --git a/ci/platforms/config.hercules b/ci/platforms/config.hercules
index e5a638a827..5329adae49 100644
--- a/ci/platforms/config.hercules
+++ b/ci/platforms/config.hercules
@@ -2,7 +2,9 @@
 
 export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/HERCULES
 export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR
-export STMP="/work2/noaa/stmp/${USER}"
-export SLURM_ACCOUNT=nems
+export HPC_ACCOUNT=nems
 export max_concurrent_cases=5
 export max_concurrent_pr=4
+
+export JENKINS_AGENT_LANUCH_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS/AGENT_mterry
+export JENKINS_WORK_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS
diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion
index 3ddd05c034..5171373127 100644
--- a/ci/platforms/config.orion
+++ b/ci/platforms/config.orion
@@ -2,7 +2,9 @@
 
 export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION
 export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR
-export STMP="/work2/noaa/stmp/${USER}"
-export SLURM_ACCOUNT=nems
+export HPC_ACCOUNT=nems
 export max_concurrent_cases=5
 export max_concurrent_pr=4
+
+export JENKINS_AGENT_LANUCH_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS/AGENT_mterry
+export JENKINS_WORK_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS
diff --git a/ci/platforms/config.wcoss2 b/ci/platforms/config.wcoss2
new file mode 100644
index 0000000000..7a840ad2f8
--- /dev/null
+++ b/ci/platforms/config.wcoss2
@@ -0,0 +1,7 @@
+#!/usr/bin/bash
+
+export GFS_CI_ROOT=/lfs/h2/emc/global/noscrub/globalworkflow.ci/GFS_CI_ROOT
+export ICSDIR_ROOT=/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR
+export HPC_ACCOUNT=GFS-DEV
+export max_concurrent_cases=5
+export max_concurrent_pr=4
diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
index cda2d4e9f2..04dd92f4a6 100755
--- a/ci/scripts/check_ci.sh
+++ b/ci/scripts/check_ci.sh
@@ -8,23 +8,22 @@ set -eux
 #                     to run from within a cron job in the CI Managers account
 #####################################################################################
 
-ROOT_DIR="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
+HOMEgfs="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
 scriptname=$(basename "${BASH_SOURCE[0]}")
 echo "Begin ${scriptname} at $(date -u)" || true
 export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
 
-GH=${HOME}/bin/gh
-REPO_URL="https://github.com/NOAA-EMC/global-workflow.git"
+REPO_URL=${REPO_URL:-"git@github.com:NOAA-EMC/global-workflow.git"}
 
 #########################################################################
 #  Set up runtime environment varibles for accounts on supproted machines
 #########################################################################
 
-source "${ROOT_DIR}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/detect_machine.sh"
 case ${MACHINE_ID} in
-  hera | orion | hercules)
+  hera | orion | hercules | wcoss2)
    echo "Running Automated Testing on ${MACHINE_ID}"
-   source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
+   source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
    ;;
  *)
    echo "Unsupported platform. Exiting with error."
@@ -32,12 +31,24 @@ case ${MACHINE_ID} in
    ;;
 esac
 set +x
-source "${ROOT_DIR}/ush/module-setup.sh"
-source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh"
-module use "${ROOT_DIR}/modulefiles"
+export HOMEgfs
+source "${HOMEgfs}/ush/module-setup.sh"
+source "${HOMEgfs}/ci/scripts/utils/ci_utils.sh"
+module use "${HOMEgfs}/modulefiles"
 module load "module_gwsetup.${MACHINE_ID}"
 module list
+# Load machine specific modules for ci (only wcoss2 is current)
+if [[ "${MACHINE_ID}" == "wcoss2" ]]; then
+  module load "module_gwci.${MACHINE_ID}"
+fi
 set -x
+if ! command -v gh > /dev/null; then
+   GH="${HOME}/bin/gh"
+else
+   GH=$(command -v gh)
+fi
+export GH
+
 rocotostat=$(command -v rocotostat)
 if [[ -z ${rocotostat+x} ]]; then
   echo "rocotostat not found on system"
@@ -57,7 +68,7 @@ pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db"
 
 pr_list=""
 if [[ -f "${pr_list_dbfile}" ]]; then
-  pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display | grep -v Failed | grep Running | awk '{print $1}') || true
+  pr_list=$("${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --list Open Running) || true
 fi
 if [[ -z "${pr_list+x}" ]]; then
   echo "no PRs open and ready to run cases on .. exiting"
@@ -89,13 +100,13 @@ for pr in ${pr_list}; do
     sed -i "1 i\`\`\`" "${output_ci}"
     sed -i "1 i\All CI Test Cases Passed on ${MACHINE_ID^}:" "${output_ci}"
     "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+    "${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
     # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms
     weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}"  --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true
     if [[ -n "${weekly_labels}" ]]; then
-      num_platforms=$(find "${ROOT_DIR}/ci/platforms" -type f -name "config.*" | wc -l)
+      num_platforms=$(find "${HOMEgfs}/ci/platforms" -type f -name "config.*" | wc -l)
       passed=0
-      for platforms in "${ROOT_DIR}"/ci/platforms/config.*; do
+      for platforms in "${HOMEgfs}"/ci/platforms/config.*; do
         machine=$(basename "${platforms}" | cut -d. -f2)
         if [[ "${weekly_labels}" == *"CI-${machine^}-Passed"* ]]; then
           ((passed=passed+1))
@@ -122,31 +133,40 @@ for pr in ${pr_list}; do
     if [[ ! -f "${db}" ]]; then
        continue
     fi
-    rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true
-    num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true
-    num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true
-    # num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true
-    echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true
-    num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true
-    if [[ ${num_failed} -ne 0 ]]; then
-      "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed"
-      error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
-      {
-       echo "Experiment ${pslot}  *** FAILED *** on ${MACHINE_ID^}"
-       echo "Experiment ${pslot}  with ${num_failed} tasks failed at $(date +'%D %r')" || true
-       echo "Error logs:"
-       echo "${error_logs}"
-      } >> "${output_ci}"
-      sed -i "1 i\`\`\`" "${output_ci}"
-      "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
-      "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
-      for kill_cases in "${pr_dir}/RUNTESTS/"*; do
-         pslot=$(basename "${kill_cases}")
-         cancel_slurm_jobs "${pslot}"
-      done
-      break
+
+    set +e
+    rocoto_state="$("${HOMEgfs}/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}")"
+    rocoto_error=$?
+    rm -f "${output_ci_single}"
+    if [[ "${rocoto_error}" -ne 0 ]]; then
+        "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed"
+        if [[ "${rocoto_state}" == "STALLED" ]]; then
+          # shellcheck disable=SC2312
+          "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body "Experiment ${pslot} **${rocoto_state}** on ${MACHINE_ID^} at $(date +'%D %r')"
+          "${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+          cancel_all_batch_jobs "${pr_dir}/RUNTESTS"
+          exit "${rocoto_error}"
+        fi
+        error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
+        # shellcheck disable=SC2086
+        ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo "PR_${pr}" > /dev/null
+        # shellcheck disable=SC2086
+        gist_url="$("${HOMEgfs}/ci/scripts/utils/publish_logs.py" --file ${error_logs} --gist "PR_${pr}")"
+        {
+          echo "Experiment ${pslot} **${rocoto_state}** on ${MACHINE_ID^} at $(date +'%D %r')" || true
+          echo ""
+          echo "Error logs:"
+          echo "\`\`\`"
+          echo "${error_logs}"
+          echo "\`\`\`"
+          echo "Follow link here to view the contents of the above file(s): [(link)](${gist_url})"
+        } >> "${output_ci_single}"
+        "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
+        "${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+        cancel_all_batch_jobs "${pr_dir}/RUNTESTS"
+        exit "${rocoto_error}"
     fi
-    if [[ "${num_done}" -eq  "${num_cycles}" ]]; then
+    if [[ "${rocoto_state}" == "DONE" ]]; then
       #Remove Experment cases that completed successfully
       rm -Rf "${pslot_dir}"
       rm -Rf "${pr_dir}/RUNTESTS/COMROOT/${pslot}"
@@ -155,8 +175,7 @@ for pr in ${pr_list}; do
       DATE=$(date +'%D %r')
       echo "Experiment ${pslot} **SUCCESS** on ${MACHINE_ID^} at ${DATE}" >> "${output_ci_single}"
       echo "Experiment ${pslot} *** SUCCESS *** at ${DATE}" >> "${output_ci}"
-      "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
-
+      # "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
     fi
   done
 done
diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh
index 798c98bf50..3cef7fc230 100755
--- a/ci/scripts/clone-build_ci.sh
+++ b/ci/scripts/clone-build_ci.sh
@@ -74,7 +74,7 @@ set +e
 source "${HOMEgfs}/ush/module-setup.sh"
 export BUILD_JOBS=8
 rm -rf log.build
-./build_all.sh -gu  >> log.build 2>&1
+./build_all.sh -guk  >> log.build 2>&1
 build_status=$?
 
 DATE=$(date +'%D %r')
@@ -83,6 +83,7 @@ if [[ ${build_status} != 0 ]]; then
     echo "Build: *** FAILED ***"
     echo "Build: Failed at ${DATE}"
     cat "${PWD}/log.build"
+    cat "${PWD}/logs/error.logs"
   } >> "${outfile}"
   exit "${build_status}"
 else
diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh
index 5fc13ea524..0f53ebff6f 100755
--- a/ci/scripts/driver.sh
+++ b/ci/scripts/driver.sh
@@ -14,11 +14,7 @@ set -eux
 # It then is ready to run a suite of regression tests with various configurations
 #######################################################################################
 
-#################################################################
-# TODO using static build for GitHub CLI until fixed in HPC-Stack
-#################################################################
-export GH=${HOME}/bin/gh
-export REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"}
+export REPO_URL=${REPO_URL:-"git@github.com:NOAA-EMC/global-workflow.git"}
 
 ################################################################
 # Setup the reletive paths to scripts and PS4 for better logging
@@ -34,7 +30,7 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
 
 source "${ROOT_DIR}/ush/detect_machine.sh"
 case ${MACHINE_ID} in
-  hera | orion | hercules)
+  hera | orion | hercules | wcoss2)
     echo "Running Automated Testing on ${MACHINE_ID}"
     source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
     ;;
@@ -47,12 +43,25 @@ esac
 ######################################################
 # setup runtime env for correct python install and git
 ######################################################
+HOMEgfs=${ROOT_DIR}
+export HOMEgfs
 set +x
 source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh"
 source "${ROOT_DIR}/ush/module-setup.sh"
 module use "${ROOT_DIR}/modulefiles"
 module load "module_gwsetup.${MACHINE_ID}"
+# Load machine specific modules for ci (only wcoss2 is current)
+if [[ "${MACHINE_ID}" == "wcoss2" ]]; then
+  module load "module_gwci.${MACHINE_ID}"
+fi
 set -x
+unset HOMEgfs
+if ! command -v gh > /dev/null; then
+   GH="${HOME}/bin/gh"
+else
+   GH=$(command -v gh)
+fi
+export GH
 
 ############################################################
 # query repo and get list of open PRs with tags {machine}-CI
@@ -61,14 +70,14 @@ set -x
 pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db"
 
 if [[ ! -f "${pr_list_dbfile}" ]]; then
-  "${ROOT_DIR}/ci/scripts/pr_list_database.py" --create --dbfile "${pr_list_dbfile}"
+  "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --create --dbfile "${pr_list_dbfile}"
 fi
 
 pr_list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" --state "open" | awk '{print $1}') || true
 
 for pr in ${pr_list}; do
   pr_dir="${GFS_CI_ROOT}/PR/${pr}"
-  db_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}")
+  db_list=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}")
   output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_single.log"
   #############################################################
   # Check if a Ready labeled PR has changed back from once set
@@ -79,7 +88,7 @@ for pr in ${pr_list}; do
   if [[ "${db_list}" == *"already is in list"* ]]; then
     # Get the the PID and HOST of the driver.sh cron job
     # that is stored int he CI database for this PR
-    driver_ID=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display "${pr}" | awk '{print $4}') || true
+    driver_ID=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display "${pr}" | awk '{print $4}') || true
     driver_PID=$(echo "${driver_ID}" | cut -d":" -f1) || true
     driver_HOST=$(echo "${driver_ID}" | cut -d":" -f2) || true
     host_name=$(hostname -s)
@@ -92,10 +101,12 @@ for pr in ${pr_list}; do
     if [[ "${driver_PID}" -ne 0 ]]; then
       echo "Driver PID: ${driver_PID} no longer running this build having it killed"
       if [[ "${driver_HOST}" == "${host_name}"  ]]; then
-        # shellcheck disable=SC2312
-        pstree -A -p "${driver_PID}" | grep -Pow "(?<=\()[0-9]+(?=\))" | xargs kill
+        pstree_out="$(pstree -A -p "${driver_PID}")"
+        if [[ -n "${pstree_out}" ]]; then
+           #shellcheck disable=SC2312
+           echo -e "${pstree_out}" | grep -Pow "(?<=\()[0-9]+(?=\))" | xargs kill
+        fi
       else
-        # shellcheck disable=SC2312
         ssh "${driver_HOST}" 'pstree -A -p "${driver_PID}" | grep -Eow "[0-9]+" | xargs kill'
       fi
       {
@@ -110,22 +121,25 @@ for pr in ${pr_list}; do
     else
       for case in ${experiments}; do
         case_name=$(basename "${case}")
-        cancel_slurm_jobs "${case_name}"
+        cancel_batch_jobs "${case_name}"
         {
           echo "Canceled all jobs for experiment ${case_name} in PR:${pr} on ${MACHINE_ID^}"
         } >> "${output_ci_single}"
       done
     fi
-    sed -i "1 i\`\`\`" "${output_ci_single}"
+    first_line=$(head -n 1 "${output_ci_single}")
+    if [[ "${first_line}" != '```' ]]; then
+      sed -i "1 i\`\`\`" "${output_ci_single}"
+    fi
     "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}"
+    "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+    "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}"
   fi
 done
 
 pr_list=""
 if [[ -f "${pr_list_dbfile}" ]]; then
-  pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Ready | awk '{print $1}') || true
+  pr_list=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --list Open Ready) || true
 fi
 if [[ -z "${pr_list+x}" ]]; then
   echo "no PRs open and ready for checkout/build .. exiting"
@@ -140,7 +154,7 @@ fi
 
 for pr in ${pr_list}; do
   # Skip pr's that are currently Building for when overlapping driver scripts are being called from within cron
-  pr_building=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | grep Building) || true
+  pr_building=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | grep Building) || true
   if [[ -z "${pr_building+x}" ]]; then
       continue
   fi
@@ -151,7 +165,7 @@ for pr in ${pr_list}; do
   driver_build_PID=$$
   driver_build_HOST=$(hostname -s)
   "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building"
-  "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Building "${driver_build_PID}:${driver_build_HOST}"
+  "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Building "${driver_build_PID}:${driver_build_HOST}"
   rm -Rf "${pr_dir}"
   mkdir -p "${pr_dir}"
   {
@@ -161,7 +175,10 @@ for pr in ${pr_list}; do
     echo "with PID: ${driver_build_PID} on host: ${driver_build_HOST}"
     echo ""
   } >> "${output_ci_single}"
-  sed -i "1 i\`\`\`" "${output_ci_single}"
+  first_line=$(head -n 1 "${output_ci_single}")
+  if [[ "${first_line}" != '```' ]]; then
+    sed -i "1 i\`\`\`" "${output_ci_single}"
+  fi
   "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
   set +e
   "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${output_ci}"
@@ -173,7 +190,7 @@ for pr in ${pr_list}; do
   # we need to exit this instance of the driver script
   #################################################################
   if [[ ${ci_status} -ne 0 ]]; then
-     build_PID_check=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | awk '{print $4}' | cut -d":" -f1) || true
+     build_PID_check=$("${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | awk '{print $4}' | cut -d":" -f1) || true
      if [[ "${build_PID_check}" -ne "$$" ]]; then
         echo "Driver build PID: ${build_PID_check} no longer running this build ... exiting"
         exit 0
@@ -181,7 +198,7 @@ for pr in ${pr_list}; do
   fi
   set -e
   if [[ ${ci_status} -eq 0 ]]; then
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Built "0:0"
+    "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Built "0:0"
     #setup space to put an experiment
     # export RUNTESTS for yaml case files to pickup
     export RUNTESTS="${pr_dir}/RUNTESTS"
@@ -203,18 +220,20 @@ for pr in ${pr_list}; do
       set +e
       export LOGFILE_PATH="${HOMEgfs}/ci/scripts/create_experiment.log"
       rm -f "${LOGFILE_PATH}"
-      "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml"  > "${LOGFILE_PATH}" 2>&1
+      yaml_case_file="${HOMEgfs}/ci/cases/pr/${case}.yaml"
+      skip_hosts=$("${HOMEgfs}/ci/scripts/utils/parse_yaml.py" --yaml "${yaml_case_file}" --key skip_ci_on_hosts --string)
+      if [[ "${skip_hosts}" == *"${MACHINE_ID}"* ]]; then
+        {
+          echo "Case setup: Skipped for experiment ${pslot}" || true
+        } >> "${output_ci}"
+        continue
+      fi
+      "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" --overwrite  > "${LOGFILE_PATH}" 2>&1
       ci_status=$?
       set -e
       if [[ ${ci_status} -eq 0 ]]; then
-        last_line=$(tail -1 "${LOGFILE_PATH}")
-        if [[ "${last_line}" == *"Skipping creation"* ]]; then
-          action="Skipped"
-        else
-          action="Completed"
-        fi
         {
-          echo "Case setup: ${action} for experiment ${pslot}" || true
+          echo "Case setup: Completed for experiment ${pslot}" || true
         } >> "${output_ci}"
       else
         {
@@ -223,24 +242,35 @@ for pr in ${pr_list}; do
           cat "${LOGFILE_PATH}"
         } >> "${output_ci}"
         "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed"
-        "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+        "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
         "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
         exit 1
       fi
     done
 
     "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running"
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running "0:0"
+    "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running "0:0"
     "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
 
-  else
+  else # failed to clone and build
+
     {
       echo "Failed on cloning and building global-workflowi PR: ${pr}"
       echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}" || true
     } >> "${output_ci}"
+
     "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed"
-    "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+    "${ROOT_DIR}/ci/scripts/utils/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}"
+
+    if [[ -f "${HOMEgfs}/sorc/logs/error.logs" ]]; then
+      gist_URL=$("${ROOT_DIR}/ci/scripts/utils/ci_utils_wrapper.sh" publish_logs "PR_${pr}" "${HOMEgfs}/sorc"  "${HOMEgfs}/sorc/logs/error.logs")
+      {
+        echo -e "\nError logs from build"
+        echo "Gist URL: ${gist_URL}"
+      } >> "${output_ci}"
+    fi
     "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}"
+
   fi
 
 done # looping over each open and labeled PR
diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh
index 9460e0b0a4..6cd2493769 100755
--- a/ci/scripts/driver_weekly.sh
+++ b/ci/scripts/driver_weekly.sh
@@ -38,7 +38,7 @@ export PS4='+ $(basename ${BASH_SOURCE[0]})[${LINENO}]'
 
 source "${ROOT_DIR}/ush/detect_machine.sh"
 case ${MACHINE_ID} in
-  hera | orion | hercules)
+  hera | orion | hercules | wcoss2)
     echo "Running Automated Testing on ${MACHINE_ID}"
     source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
     ;;
diff --git a/ci/scripts/pr_list_database.py b/ci/scripts/pr_list_database.py
deleted file mode 100755
index 224aabd361..0000000000
--- a/ci/scripts/pr_list_database.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-
-import sys
-import os
-from pathlib import Path
-from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, REMAINDER, ZERO_OR_MORE
-import sqlite3
-
-
-def full_path(string):
-    """
-    Gets the absolute path of the given file and confirms the directory exists
-
-    Parameters
-    ----------
-    string : str
-        Path to a file
-
-    Returns
-    --------
-    str
-        Absolute path of input path
-
-    Raises
-    -------
-    NotADirectoryError
-        If the target directory for the file does not exist.
-
-    """
-
-    if os.path.isfile(string) or os.path.isdir(os.path.dirname(string)):
-        return os.path.abspath(string)
-    else:
-        raise NotADirectoryError(string)
-
-
-def sql_connection(filename: os.path) -> sqlite3.Connection:
-    """
-    Returns an Sqlite3 Cursor object from a given path to a sqlite3 database file
-
-    Parameters
-    ----------
-    filename : Path
-        Full path to a sqlite3 database file
-
-    Returns
-    -------
-    sqlite3.Connection
-        Sqlite3 Connection object for updating table
-
-    """
-    try:
-        return sqlite3.connect(filename)
-    except sqlite3.Error:
-        print(sqlite3.Error)
-        sys.exit(-1)
-
-
-def sql_table(obj: sqlite3.Cursor) -> None:
-    """
-    Creates the initial sqlite3 table for PR states and status
-
-    Parameters
-    ----------
-    obj : sqlite3.Cursor
-         Cursor object for Sqlite3
-
-    """
-
-    obj.execute("CREATE TABLE processing(pr integer PRIMARY KEY, state text, status text, reset_id integer, cases text)")
-
-
-def sql_insert(obj: sqlite3.Cursor, entities: list) -> None:
-    """
-    Inserts a new row in sqlite3 table with PR, state, and status
-
-    Parameters
-    ----------
-    obj : sqlite3.Cursor
-        Cursor object for Sqlite3
-    entities : list
-        A list of four string values that go into sqlite table (pr, state, status, reset_id, cases)
-            pr: pull request number
-            state: The new value for the state (Open, Closed)
-            status: The new value for the status (Ready, Running, Failed)
-            reset_id: The value for number of times reset_id to Ready
-            cases: String containing case selection information
-
-    """
-
-    obj.execute('INSERT INTO processing(pr, state, status, reset_id, cases) VALUES(?, ?, ?, ?, ?)', entities)
-
-
-def sql_update(obj: sqlite3.Cursor, pr: str, updates: dict) -> None:
-    """Updates table for a given pr with new values for state and status
-
-    Parameters
-    ----------
-    obj : sqlite.sql_connection
-        sqlite3 Cursor Object
-    pr : str
-        The given pr number to update in the table
-    updates : dict
-        Dictionary of values to update for a given PR to include by postion
-        state, The new value for the state (Open, Closed)
-        status, The new value for the status (Ready, Running, Failed)
-        reset_id, The value for number of times reset_id to Ready
-        cases, Information regarding which cases are used (i.e. self PR)
-
-    """
-
-    update_list = ['state', 'status', 'reset_id', 'cases']
-    rows = sql_fetch(obj)
-    for value in updates:
-        update = update_list.pop(0)
-        obj.execute(f'UPDATE processing SET "{update}" = "{value}" WHERE pr = {pr}')
-
-
-def sql_fetch(obj: sqlite3.Cursor) -> list:
-    """ Gets list of all rows in table
-
-    Parameters
-    ----------
-    obj : sqlite.sql_connection
-        sqlite3 Cursor Object
-
-    """
-
-    obj.execute('SELECT * FROM processing')
-    return obj.fetchall()
-
-
-def sql_remove(obj: sqlite3.Cursor, pr: str) -> None:
-    """ Removes the row from table with given pr number
-
-    Parameters
-    ----------
-    obj : sqlite.sql_connection
-        sqlite3 Connection Object
-    pr : str
-        pr number acting as key for removing the row with in it
-
-    """
-
-    obj.execute(f'DELETE FROM processing WHERE pr = {pr}').rowcount
-
-
-def input_args():
-
-    description = """Arguments for creating and updating db file for pr states
-    """
-
-    parser = ArgumentParser(description=description,
-                            formatter_class=ArgumentDefaultsHelpFormatter)
-
-    parser.add_argument('--dbfile', help='SQLite3 database file with PR list', type=full_path)
-    parser.add_argument('--create', help='create sqlite file for pr list status', action='store_true', required=False)
-    parser.add_argument('--add_pr', nargs=1, metavar='PR', help='add new pr to list (defults to: Open,Ready)', required=False)
-    parser.add_argument('--remove_pr', nargs=1, metavar='PR', help='removes pr from list', required=False)
-    parser.add_argument('--update_pr', nargs=REMAINDER, metavar=('pr', 'state', 'status', 'reset_id', 'cases'),
-                        help='updates state and status of a given pr', required=False)
-    parser.add_argument('--display', nargs='*', help='output pr table', required=False)
-
-    args = parser.parse_args()
-    return args
-
-
-if __name__ == '__main__':
-
-    args = input_args()
-
-    if not args.create:
-        if not os.path.isfile(args.dbfile):
-            print(f'Error: {args.dbfile} does not exsist')
-            sys.exit(-1)
-
-    con = sql_connection(args.dbfile)
-    obj = con.cursor()
-
-    if args.create:
-        sql_table(obj)
-
-    if args.add_pr:
-        rows = sql_fetch(obj)
-        for row in rows:
-            if str(row[0]) == str(args.add_pr[0]):
-                print(f"pr {row[0]} already is in list: nothing added")
-                sys.exit(0)
-
-        entities = (args.add_pr[0], 'Open', 'Ready', 0, 'ci_repo')
-        sql_insert(obj, entities)
-
-    if args.update_pr:
-        if len(args.update_pr) < 2:
-            print(f"update_pr must have at least one vaule to update")
-            sys.exit(0)
-        pr = args.update_pr[0]
-
-        sql_update(obj, pr, args.update_pr[1:])
-
-    if args.remove_pr:
-        sql_remove(obj, args.remove_pr[0])
-
-    if args.display is not None:
-        rows = sql_fetch(obj)
-        if len(args.display) == 1:
-            for row in rows:
-                if int(args.display[0]) == int(row[0]):
-                    print(' '.join(map(str, row)))
-        else:
-            for row in rows:
-                print(' '.join(map(str, row)))
-
-    con.commit()
-    con.close()
diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh
index 5a909c1c64..5c49a21c4b 100755
--- a/ci/scripts/run-check_ci.sh
+++ b/ci/scripts/run-check_ci.sh
@@ -9,6 +9,7 @@ set -eu
 
 TEST_DIR=${1:-${TEST_DIR:-?}}  # Location of the root of the testing directory
 pslot=${2:-${pslot:-?}}        # Name of the experiment being tested by this script
+SYSTEM_BUILD_DIR=${3:-"global-workflow"}  # Name of the system build directory, default is "global-workflow
 
 # TEST_DIR contains 2 directories;
 # 1. HOMEgfs: clone of the global-workflow
@@ -21,8 +22,11 @@ pslot=${2:-${pslot:-?}}        # Name of the experiment being tested by this scr
 #     │   └── ${pslot}
 #     └── EXPDIR
 #         └── ${pslot}
-HOMEgfs="${TEST_DIR}/HOMEgfs"
+# Two system build directories created at build time gfs, and gdas
+# TODO: Make this configurable (for now all scripts run from gfs for CI at runtime)
+HOMEgfs="${TEST_DIR}/${SYSTEM_BUILD_DIR}"
 RUNTESTS="${TEST_DIR}/RUNTESTS"
+run_check_logfile="${RUNTESTS}/ci-run_check.log"
 
 # Source modules and setup logging
 echo "Source modules."
@@ -45,7 +49,7 @@ fi
 # Launch experiment
 echo "Launch experiment with Rocoto."
 rocotorun -v "${ROCOTO_VERBOSE:-0}" -w "${xml}" -d "${db}"
-sleep 30
+sleep 10
 if [[ ! -f "${db}" ]]; then
   echo "FATAL ERROR: Rocoto database file ${db} not found, experiment ${pslot} failed, ABORT!"
   exit 2
@@ -53,48 +57,52 @@ fi
 
 # Experiment launched
 rc=99
+set +e
 while true; do
 
   echo "Run rocotorun."
   rocotorun -v "${ROCOTO_VERBOSE:-0}" -w "${xml}" -d "${db}"
 
   # Wait before running rocotostat
-  sleep 30
+  sleep 60
 
   # Get job statistics
   echo "Gather Rocoto statistics"
-  rocotostat_output=$(rocotostat -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true
-  num_cycles=$(echo "${rocotostat_output}" | wc -l) || true
-  num_done=$(echo "${rocotostat_output}" | grep -c Done) || true
-  num_succeeded=$(rocotostat -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true
-  num_failed=$(rocotostat -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true
+  # shellcheck disable=SC2312 # We want to use the exit code of the command
+  full_state=$("${HOMEgfs}/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}" -v)
+  error_stat=$?
 
-  echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}"
+  for state in CYCLES_TOTAL CYCLES_DONE SUCCEEDED FAIL DEAD; do
+    declare "${state}"="$(echo "${full_state}" | grep "${state}" | cut -d: -f2)" || true
+  done
+  ROCOTO_STATE=$(echo "${full_state}" | tail -1) || exit 1
 
-  if [[ ${num_failed} -ne 0 ]]; then
-    {
-      echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true
-      echo "Experiment ${pslot} Terminated: *FAILED*"
-    } >> "${RUNTESTS}/ci.log"
+  echo -e "(${pslot} on ${MACHINE_ID^})\n\tTotal Cycles: ${CYCLES_TOTAL}\n\tNumber Cycles done: ${CYCLES_DONE}\n\tState: ${ROCOTO_STATE}"
 
-    error_logs=$(rocotostat -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs rocotocheck -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
+  if [[ ${error_stat} -ne 0 ]]; then
     {
-     echo "Error logs:"
-     echo "${error_logs}"
-    } >> "${RUNTESTS}/ci.log"
-    sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
-    sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "${pr}\/RUNTESTS" |  awk '{print $1}' | xargs scancel || true
-    rc=1
-    break
+      echo "Experiment ${pslot} Terminated with ${FAIL} tasks failed and ${DEAD} dead at $(date)" || true
+      echo "Experiment ${pslot} Terminated: *${ROCOTO_STATE}*"
+    } | tee -a "${run_check_logfile}"
+    if [[ "${DEAD}" -ne 0 ]]; then
+      error_logs=$(rocotostat -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs rocotocheck -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
+      {
+        echo "Error logs:"
+        echo "${error_logs}"
+      } | tee -a  "${run_check_logfile}"
+      rm -f "${RUNTESTS}/${pslot}_error.logs"
+      for log in ${error_logs}; do
+        echo "RUNTESTS${log#*RUNTESTS}" >> "${RUNTESTS}/${pslot}_error.logs"
+      done
+   fi
+   rc=1
+   break
   fi
 
-  if [[ "${num_done}" -eq "${num_cycles}" ]]; then
+  if [[ "${ROCOTO_STATE}" == "DONE" ]]; then
     {
-      echo "Experiment ${pslot} Completed at $(date)" || true
-      echo "with ${num_succeeded} successfully completed jobs" || true
-      echo "Experiment ${pslot} Completed: *SUCCESS*"
-    } >> "${RUNTESTS}/ci.log"
-    sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
+      echo "Experiment ${pslot} Completed ${CYCLES_DONE} Cycles: *SUCCESS* at $(date)" || true
+    } | tee -a "${run_check_logfile}"
     rc=0
     break
   fi
@@ -105,3 +113,4 @@ while true; do
 done
 
 exit "${rc}"
+
diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh
index 4a390a23f2..f109aa83d4 100755
--- a/ci/scripts/run_ci.sh
+++ b/ci/scripts/run_ci.sh
@@ -9,7 +9,7 @@ set -eux
 # Abstract TODO
 #####################################################################################
 
-ROOT_DIR="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
+HOMEgfs="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )"
 scriptname=$(basename "${BASH_SOURCE[0]}")
 echo "Begin ${scriptname} at $(date -u)" || true
 export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
@@ -18,11 +18,11 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
 #  Set up runtime environment varibles for accounts on supproted machines
 #########################################################################
 
-source "${ROOT_DIR}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/detect_machine.sh"
 case ${MACHINE_ID} in
-  hera | orion | hercules)
+  hera | orion | hercules | wcoss2)
    echo "Running Automated Testing on ${MACHINE_ID}"
-   source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
+   source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
    ;;
  *)
    echo "Unsupported platform. Exiting with error."
@@ -30,8 +30,9 @@ case ${MACHINE_ID} in
    ;;
 esac
 set +x
-source "${ROOT_DIR}/ush/module-setup.sh"
-module use "${ROOT_DIR}/modulefiles"
+export HOMEgfs
+source "${HOMEgfs}/ush/module-setup.sh"
+module use "${HOMEgfs}/modulefiles"
 module load "module_gwsetup.${MACHINE_ID}"
 module list
 set -eux
@@ -47,7 +48,8 @@ pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db"
 
 pr_list=""
 if [[ -f "${pr_list_dbfile}" ]]; then
-  pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true
+  pr_list=$("${HOMEgfs}/ci/scripts/utils/pr_list_database.py" --dbfile "${pr_list_dbfile}" --list Open Running) || true
+  pr_list=$(echo "${pr_list}" |  tr ' ' '\n' | head -n "${max_concurrent_pr}" | tr '\n' ' ') || true
 fi
 if [[ -z "${pr_list}" ]]; then
   echo "no open and built PRs that are ready for the cases to advance with rocotorun .. exiting"
diff --git a/ci/scripts/tests/test_create_experiment.py b/ci/scripts/tests/test_create_experiment.py
new file mode 100644
index 0000000000..03f3a30805
--- /dev/null
+++ b/ci/scripts/tests/test_create_experiment.py
@@ -0,0 +1,29 @@
+from wxflow import Executable
+from shutil import rmtree
+import os
+import copy
+
+_here = os.path.dirname(__file__)
+HOMEgfs = os.sep.join(_here.split(os.sep)[:-3])
+RUNDIR = os.path.join(_here, 'testdata/RUNDIR')
+
+
+def test_create_experiment():
+
+    create_experiment_script = Executable(f'{HOMEgfs}/workflow/create_experiment.py')
+    yaml_dir = yaml_dir = os.path.join(HOMEgfs, 'ci/cases/pr')
+    env = os.environ.copy()
+    env['RUNTESTS'] = RUNDIR
+
+    for case in os.listdir(yaml_dir):
+        if case.endswith('.yaml'):
+            with open(os.path.join(yaml_dir, case), 'r') as file:
+                file_contents = file.read()
+                if 'ICSDIR_ROOT' not in file_contents:
+                    create_experiment = copy.deepcopy(create_experiment_script)
+                    create_experiment.add_default_arg(['-y', f'../../cases/pr/{case}', '--overwrite'])
+                    env['pslot'] = os.path.splitext(case)[0]
+                    create_experiment(env=env)
+                    assert (create_experiment.returncode == 0)
+
+    rmtree(RUNDIR)
diff --git a/ci/scripts/tests/test_rocotostat.py b/ci/scripts/tests/test_rocotostat.py
new file mode 100755
index 0000000000..a3c56df988
--- /dev/null
+++ b/ci/scripts/tests/test_rocotostat.py
@@ -0,0 +1,89 @@
+import sys
+import os
+from shutil import rmtree
+import wget
+
+script_dir = os.path.dirname(os.path.abspath(__file__))
+sys.path.append(os.path.join(os.path.dirname(script_dir), 'utils'))
+
+from rocotostat import rocoto_statcount, rocotostat_summary, is_done, is_stalled, CommandNotFoundError
+from wxflow import which
+
+test_data_url = 'https://noaa-nws-global-pds.s3.amazonaws.com/data/CI/'
+
+testdata_path = 'testdata/rocotostat'
+testdata_full_path = os.path.join(script_dir, testdata_path)
+
+
+if not os.path.isfile(os.path.join(testdata_full_path, 'database.db')):
+    os.makedirs(testdata_full_path, exist_ok=True)
+    workflow_url = test_data_url + str(testdata_path) + '/workflow.xml'
+    workflow_destination = os.path.join(testdata_full_path, 'workflow.xml')
+    wget.download(workflow_url, workflow_destination)
+
+    database_url = test_data_url + str(testdata_path) + '/database.db'
+    database_destination = os.path.join(testdata_full_path, 'database.db')
+    wget.download(database_url, database_destination)
+
+rocotostat_cmd = which('rocotostat')
+if not rocotostat_cmd:
+    raise CommandNotFoundError("rocotostat not found in PATH")
+
+rocotostat_cmd.add_default_arg(['-w', os.path.join(testdata_path, 'workflow.xml'), '-d', os.path.join(testdata_path, 'database.db')])
+
+
+def test_rocoto_statcount():
+
+    result = rocoto_statcount(rocotostat_cmd)
+
+    assert result['SUCCEEDED'] == 20
+    assert result['FAIL'] == 0
+    assert result['DEAD'] == 0
+    assert result['RUNNING'] == 0
+    assert result['SUBMITTING'] == 0
+    assert result['QUEUED'] == 0
+
+
+def test_rocoto_summary():
+
+    result = rocotostat_summary(rocotostat_cmd)
+
+    assert result['CYCLES_TOTAL'] == 1
+    assert result['CYCLES_DONE'] == 1
+
+
+def test_rocoto_done():
+
+    result = rocotostat_summary(rocotostat_cmd)
+
+    assert is_done(result)
+
+    rmtree(testdata_full_path)
+
+
+def test_rocoto_stalled():
+    testdata_path = 'testdata/rocotostat_stalled'
+    testdata_full_path = os.path.join(script_dir, testdata_path)
+    xml = os.path.join(testdata_full_path, 'stalled.xml')
+    db = os.path.join(testdata_full_path, 'stalled.db')
+
+    if not os.path.isfile(os.path.join(testdata_full_path, 'stalled.db')):
+        os.makedirs(testdata_full_path, exist_ok=True)
+        workflow_url = test_data_url + str(testdata_path) + '/stalled.xml'
+        database_url = test_data_url + str(testdata_path) + '/stalled.db'
+
+        workflow_destination = os.path.join(testdata_full_path, 'stalled.xml')
+        wget.download(workflow_url, workflow_destination)
+
+        database_destination = os.path.join(testdata_full_path, 'stalled.db')
+        wget.download(database_url, database_destination)
+
+    rocotostat_cmd = which('rocotostat')
+    rocotostat_cmd.add_default_arg(['-w', xml, '-d', db])
+
+    result = rocoto_statcount(rocotostat_cmd)
+
+    assert result['SUCCEEDED'] == 11
+    assert is_stalled(result)
+
+    rmtree(testdata_full_path)
diff --git a/ci/scripts/tests/test_setup.py b/ci/scripts/tests/test_setup.py
new file mode 100755
index 0000000000..77a36369f4
--- /dev/null
+++ b/ci/scripts/tests/test_setup.py
@@ -0,0 +1,89 @@
+from wxflow import Executable, Configuration, ProcessError
+from shutil import rmtree
+import pytest
+import os
+
+_here = os.path.dirname(__file__)
+HOMEgfs = os.sep.join(_here.split(os.sep)[:-3])
+RUNDIR = os.path.join(_here, 'testdata/RUNDIR')
+pslot = "C48_ATM"
+account = "fv3-cpu"
+foobar = "foobar"
+
+
+def test_setup_expt():
+
+    arguments = [
+        "gfs", "forecast-only",
+        "--pslot", pslot, "--app", "ATM", "--resdetatmos", "48",
+        "--comroot", f"{RUNDIR}", "--expdir", f"{RUNDIR}",
+        "--idate", "2021032312", "--edate", "2021032312", "--overwrite"
+    ]
+    setup_expt_script = Executable(os.path.join(HOMEgfs, "workflow", "setup_expt.py"))
+    setup_expt_script.add_default_arg(arguments)
+    setup_expt_script()
+    assert (setup_expt_script.returncode == 0)
+
+
+def test_setup_xml():
+
+    setup_xml_script = Executable(os.path.join(HOMEgfs, "workflow/setup_xml.py"))
+    setup_xml_script.add_default_arg(f"{RUNDIR}/{pslot}")
+    setup_xml_script()
+    assert (setup_xml_script.returncode == 0)
+
+    cfg = Configuration(f"{RUNDIR}/{pslot}")
+    base = cfg.parse_config('config.base')
+    assert base.ACCOUNT == account
+
+    assert "UNKNOWN" not in base.values()
+
+    with open(f"{RUNDIR}/{pslot}/{pslot}.xml", 'r') as file:
+        contents = file.read()
+    assert contents.count(account) > 5
+
+    rmtree(RUNDIR)
+
+
+def test_setup_xml_fail_config_env_cornercase():
+
+    script_content = ('''#!/usr/bin/env bash
+export HOMEgfs=foobar
+../../../workflow/setup_xml.py "${1}"\n
+''')
+
+    with open('run_setup_xml.sh', 'w') as file:
+        file.write(script_content)
+    os.chmod('run_setup_xml.sh', 0o755)
+
+    try:
+        setup_xml_script = Executable(os.path.join(HOMEgfs, "ci", "scripts", "tests", "run_setup_xml.sh"))
+        setup_xml_script.add_default_arg(f"{RUNDIR}/{pslot}")
+        setup_xml_script()
+        assert (setup_xml_script.returncode == 0)
+
+        cfg = Configuration(f"{RUNDIR}/{pslot}")
+        base = cfg.parse_config('config.base')
+        assert base.ACCOUNT == account
+
+        assert foobar not in base.values()
+        assert "UNKNOWN" not in base.values()
+
+        with open(f"{RUNDIR}/{pslot}/{pslot}.xml", 'r') as file:
+            contents = file.read()
+        assert contents.count(account) > 5
+
+    except ProcessError as e:
+        # We expect this fail becuse ACCOUNT=fv3-cpu in config.base and environment
+        pass
+    except Exception as e:
+        # If an exception occurs, pass the test with a custom message
+        pytest.fail(f"Expected exception occurred: {e}")
+
+    finally:
+        # Cleanup code to ensure it runs regardless of test outcome
+        os.remove('run_setup_xml.sh')
+        try:
+            rmtree(RUNDIR)
+        except FileNotFoundError:
+            pass
diff --git a/ci/scripts/utils/ci_utils.sh b/ci/scripts/utils/ci_utils.sh
index 737a3e5a86..2a51467d38 100755
--- a/ci/scripts/utils/ci_utils.sh
+++ b/ci/scripts/utils/ci_utils.sh
@@ -1,24 +1,157 @@
 #!/bin/env bash
 
-function cancel_slurm_jobs() {
+function determine_scheduler() {
+  if command -v sbatch &> /dev/null; then
+    echo "slurm";
+  elif command -v qsub &> /dev/null; then
+    echo "torque";
+  else
+    echo "unknown"
+  fi
+}
 
-  # Usage: cancel_slurm_jobs <substring>
-  # Example: cancel_slurm_jobs "C48_ATM_3c4e7f74"
+function cancel_batch_jobs() {
+  # Usage: cancel_batch_jobs <substring>
+  # Example: cancel_batch_jobs "C48_ATM_3c4e7f74"
   #
-  # Cancel all Slurm jobs that have the given substring in their name
+  # Cancel all batch jobs that have the given substring in their name
   # So like in the example all jobs with "C48_ATM_3c4e7f74"
   # in their name will be canceled
 
   local substring=$1
   local job_ids
-  job_ids=$(squeue -u "${USER}" -h -o "%i")
-
-  for job_id in ${job_ids}; do
-    job_name=$(sacct -j "${job_id}" --format=JobName%100 | head -3 | tail -1 | sed -r 's/\s+//g') || true
-    if [[ "${job_name}" =~ ${substring} ]]; then
-      echo "Canceling Slurm Job ${job_name} with: scancel ${job_id}"
-      scancel "${job_id}"
-      continue
-    fi
+
+  scheduler=$(determine_scheduler)
+
+  if [[ "${scheduler}" == "torque" ]]; then
+    job_ids=$(qstat -u "${USER}" | awk '{print $1}') || true
+
+    for job_id in ${job_ids}; do
+      job_name=$(qstat -f "${job_id}" | grep Job_Name | awk '{print $3}') || true
+      if [[ "${job_name}" =~ ${substring} ]]; then
+        echo "Canceling PBS Job ${job_name} with: qdel ${job_id}"
+        qdel "${job_id}"
+        continue
+      fi
+    done
+
+  elif [[ "${scheduler}" == "slurm" ]]; then
+
+    job_ids=$(squeue -u "${USER}" -h -o "%i")
+
+    for job_id in ${job_ids}; do
+      job_name=$(sacct -j "${job_id}" --format=JobName%100 | head -3 | tail -1 | sed -r 's/\s+//g') || true
+      if [[ "${job_name}" =~ ${substring} ]]; then
+        echo "Canceling Slurm Job ${job_name} with: scancel ${job_id}"
+        scancel "${job_id}"
+        continue
+      fi
+    done
+
+  else
+      echo "FATAL: Unknown/unsupported job scheduler"
+      exit 1
+  fi
+}
+
+
+function get_pr_case_list () {
+
+    #############################################################
+    # loop over every yaml file in the PR's ci/cases
+    # and create an run directory for each one for this PR loop
+    #############################################################
+    for yaml_config in "${HOMEgfs}/ci/cases/pr/"*.yaml; do
+      case=$(basename "${yaml_config}" .yaml) || true
+      echo "${case}"
+    done
+}
+
+function get_pslot_list () {
+
+    local RUNTESTS="${1}"
+  
+    #############################################################
+    # loop over expdir directories in RUNTESTS
+    # and create list of the directory names (pslot) with the hash tag
+    #############################################################
+    for pslot_dir in "${RUNTESTS}/EXPDIR/"*; do
+      pslot=$(basename "${pslot_dir}") || true
+      echo "${pslot}"
+    done
+
+}
+
+function get_pslot () {
+
+    local RUNTESTS="${1}"
+    local case="${2}"
+  
+    #############################################################
+    # loop over expdir directories in RUNTESTS
+    # and return the name of the pslot with its tag that matches the case
+    #############################################################
+    # shellcheck disable=SC2045
+    for pslot_dir in $(ls -td "${RUNTESTS}/EXPDIR/"*); do
+      pslot=$(basename "${pslot_dir}")
+      check_case=$(echo "${pslot}" | rev | cut -d"_" -f2- | rev) || true
+      if [[ "${check_case}" == "${case}" ]]; then
+        echo "${pslot}"
+        break
+      fi
+    done
+
+}
+
+function cancel_all_batch_jobs () {
+  local RUNTESTS="${1}"
+  pslot_list=$(get_pslot_list "${RUNTESTS}")
+  for pslot in ${pslot_list}; do
+    cancel_batch_jobs "${pslot}"
   done
 }
+
+function create_experiment () {
+
+  local yaml_config="${1}"
+  cd "${HOMEgfs}" || exit 1
+  pr_sha=$(git rev-parse --short HEAD)
+  case=$(basename "${yaml_config}" .yaml) || true
+  export pslot=${case}_${pr_sha}
+  
+  source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
+  source "${HOMEgfs}/workflow/gw_setup.sh"
+
+  # Remove RUNDIRS dir incase this is a retry
+  rm -Rf "${STMP}/RUNDIRS/${pslot}"
+
+  "${HOMEgfs}/${system}/workflow/create_experiment.py" --overwrite --yaml "${yaml_config}"
+
+}
+
+function publish_logs() {
+    # publish_logs function
+    # This function takes a directory path and a list of files as arguments.
+    # It calls the publish_logs.py script to publish the logs and returns its gist URL.
+    # Usage: publish_logs <ID> <dir_path> <file1> <file2> ... <fileN>
+    local PR_header="$1"
+    local dir_path="$2"
+    local file="$3"
+
+    local full_paths=""
+    while IFS= read -r line; do
+        full_path="${dir_path}/${line}"
+        if [[ -f "${full_path}" ]]; then
+            full_paths+="${full_path} "
+        else
+            echo "File ${full_path} does not exist"
+        fi
+    done < "${file}"
+
+    if [[ -n "${full_paths}" ]]; then
+        # shellcheck disable=SC2027,SC2086
+        ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${full_paths} --repo ${PR_header} > /dev/null
+        URL="$("${HOMEgfs}/ci/scripts/utils/publish_logs.py" --file "${full_paths}" --gist "${PR_header}")"
+    fi
+    echo "${URL}"
+}
diff --git a/ci/scripts/utils/ci_utils_wrapper.sh b/ci/scripts/utils/ci_utils_wrapper.sh
new file mode 100755
index 0000000000..51c392fb99
--- /dev/null
+++ b/ci/scripts/utils/ci_utils_wrapper.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+HOMEgfs="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
+source "${HOMEgfs}/ush/detect_machine.sh"
+
+utitilty_function="${1}"
+
+source "${HOMEgfs}/ci/scripts/utils/ci_utils.sh"
+${utitilty_function} "${@:2}"
diff --git a/ci/scripts/utils/get_host_case_list.py b/ci/scripts/utils/get_host_case_list.py
new file mode 100755
index 0000000000..eb10f29f05
--- /dev/null
+++ b/ci/scripts/utils/get_host_case_list.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+import os
+from os.path import basename, splitext
+import sys
+import glob
+from wxflow import parse_j2yaml
+from wxflow import AttrDict
+
+_here = os.path.dirname(__file__)
+_top = os.path.abspath(os.path.join(os.path.abspath(_here), '../../..'))
+
+if __name__ == '__main__':
+
+    if len(sys.argv) < 2:
+        print('Usage: get_host_case_list.py <host_name>')
+        sys.exit(1)
+
+    host = sys.argv[1]
+
+    case_list = []
+    HOMEgfs = _top
+    data = AttrDict(HOMEgfs=_top)
+    data.update(os.environ)
+
+    case_files = glob.glob(f'{HOMEgfs}/ci/cases/pr/*.yaml')
+    for case_yaml in case_files:
+        case_conf = parse_j2yaml(path=case_yaml, data=data)
+        if 'skip_ci_on_hosts' in case_conf:
+            if host.lower() in [machine.lower() for machine in case_conf.skip_ci_on_hosts]:
+                continue
+        case_list.append(splitext(basename(case_yaml))[0])
+    print(' '.join(case_list))
diff --git a/ci/scripts/utils/githubpr.py b/ci/scripts/utils/githubpr.py
new file mode 100755
index 0000000000..5fe0b643ea
--- /dev/null
+++ b/ci/scripts/utils/githubpr.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+
+import os
+import re
+
+from github import Github, GithubException, InputFileContent, UnknownObjectException
+from wxflow import which
+
+
+class GitHubDBError(Exception):
+    """
+    Base class for GitHubDB exceptions.
+    """
+    UnknownObjectException = UnknownObjectException
+    GithubException = GithubException
+
+
+class GitHubPR(Github):
+    """
+    GitHubPR is an inherited class from GitHub in pyGitHub for interacting with GitHub pull requests.
+
+    Attributes
+    ----------
+    repo : github.Repository.Repository
+        The GitHub repository to interact with.
+    pulls : github.PaginatedList.PaginatedList of github.PullRequest.PullRequest
+        The list of open pull requests in the repository, sorted by last updated.
+    user : github.AuthenticatedUser.AuthenticatedUser
+        The authenticated user.
+    InputFileContent : github.InputFileContent.InputFileContent
+        The class used to create file content for gists.
+
+    Methods
+    -------
+    __init__(self, repo_url=None, TOKEN=None)
+        Initialize a new GitHubPR instance.
+    get_repo_url(self, repo_url=None)
+        Set the repository for the GitHubPR instance
+        using an URL directly or from 'REPO_URL' environment variable.
+    get_pr_list(self)
+        Get the numerical list of all pull requests.
+    get_ci_pr_list(self, state='Ready', host=None)
+        Get the numerical list of all pull requests with a specific state from labels.
+        for example if a PR has a label 'CI-Ready-Hera' of the form CI-[state]-[host]
+        its corresponding PR number will be included in the list.
+    """
+
+    def __init__(self, repo_url=None, TOKEN=None):
+        """
+        __init__ Initialize a new GitHubPR instance.
+
+        This method authenticates with the GitHub API using the 'gh' CLI tool
+        when the TOKEN is not provided. The repository comes from from the 'REPO_URL'
+        environment variable when repo_url is not provided.
+        """
+        if TOKEN is None:
+            gh_cli = which('gh')
+            gh_cli.add_default_arg(['auth', 'status', '--show-token'])
+            TOKEN = gh_cli(output=str, error=str).split('\n')[3].split(': ')[1]
+        super().__init__(TOKEN)
+
+        self.repo = self.get_repo_url(repo_url)
+        self.pulls = self.repo.get_pulls(state='open', sort='updated', direction='desc')
+        self.user = self.get_user()
+
+        self.InputFileContent = InputFileContent
+
+    def get_repo_url(self, repo_url=None):
+        """
+        set_repo Set the repository for the GitHubPR instance.
+
+        Parameters
+        ----------
+        repo_url : Repository URL
+            The GitHub repository.
+        """
+        if repo_url is None:
+            repo_url = os.environ.get("REPO_URL")
+        match = re.search(r"github\.com/(.+)", repo_url)
+        repo_identifier = match.group(1)[:-4]
+        return self.get_repo(repo_identifier)
+
+    def get_pr_list(self):
+        """
+        get_pr_list Get the numerical list of all pull requests.
+
+        Returns
+        -------
+        list
+            A list of all pull request numbers.
+        """
+        return [pull.number for pull in self.pulls]
+
+    def get_ci_pr_list(self, state='Ready', host=None):
+        """
+        get_ci_pr_list Get a list of pull requests that match a specified state and host.
+
+        Parameters
+        ----------
+        state : str, optional
+            The state of the pull requests to get (default is 'Ready').
+        host : str, optional
+            The host of the pull requests to get. If None, all hosts are included (default is None).
+
+        Returns
+        -------
+        list
+            A list of pull request numbers that match the specified state and host.
+        """
+        pr_list = []
+        for pull in self.pulls:
+            labels = pull.get_labels()
+            ci_labels = [s for s in labels if 'CI' in s.name]
+            for label in ci_labels:
+                if state in label.name:
+                    if host is not None:
+                        if host.lower() in label.name.lower():
+                            pr_list.append(pull.number)
+                            break
+                    else:
+                        pr_list.append(pull.number)
+                        break
+
+        return pr_list
diff --git a/ci/scripts/utils/launch_java_agent.sh b/ci/scripts/utils/launch_java_agent.sh
new file mode 100755
index 0000000000..81dbe002b6
--- /dev/null
+++ b/ci/scripts/utils/launch_java_agent.sh
@@ -0,0 +1,184 @@
+#!/bin/env bash
+
+set -e
+
+# ==============================================================================
+# Script Name: launch_java_agent.sh
+#
+# Description:
+#        This script automates the process of launching a Jenkins agent
+#        on a specified machine. It ensures that the necessary
+#        prerequisites are met, such as the availability of JAVA_HOME,
+#        the Jenkins agent launch directory, and proper authentication
+#        with GitHub.
+#
+#       It then proceeds to check if the Jenkins node is online and
+#       decides whether to launch the Jenkins agent based on the node's
+#       status. The agent is launched in the background,
+#       and its PID is logged for reference.
+#
+# Prerequisites:
+#         JAVA_HOME must be set to a valid JDK installation.
+#         Jenkins agent launch directory must exist and be specified.
+#         GitHub CLI (gh) must be installed and authenticated for messeging
+#           from the Jenkins controller to GitHub PR via shell commands.
+#           Jenkins agent launch directory must exist and be specified.
+#           TODO: Must use GitHub CLI v2.25.1 (newer versoins have issues)
+#           https://github.com/cli/cli/releases/download/v2.25.1/gh_2.25.1_linux_amd64.tar.gz
+#         Jenkins controller URL and authentication token must be provided.
+#         jenkins-secret-file:
+#           Must be present in the Jenkins agent launch directory.
+#           This file contains the secret key for the Jenkins agent
+#           established by the Jenkins administrator for each Node.
+#         jenkins_token:
+#           Must be present in the Jenkins agent launch directory.
+#           This file contains the user authentication token for the Jenkins controller
+#           to use the Remote API. This token can be generated by the user
+#           on the Jenkins controller.
+#         controller_user:
+#           Must be set to the Jenkins controller username corresponing to the jenkins_token.
+#
+# Usage: ./launch_java_agent.sh [now] [-f]
+#        The optional 'now' argument forces the script to launch the Jenkins
+#        agent without waiting before trying again.
+#        The optional 'force' argument forces the script to launch the Jenkins regarless of the node status.
+#
+# ==============================================================================
+
+force_launch="False"
+skip_wait="False"
+while getopts ":fnh" flag; do
+  case "${flag}" in
+    f) force_launch="True";;
+    n) skip_wait="True";;
+    h) echo "Usage: ./launch_java_agent.sh [now] [force]
+Two mutually exclusive optional arguments:
+    -n (now) causes the script to launch the Jenkins agent without waiting before trying again.
+    -f (force) forces the script to launch the Jenkins regarless of its connection status."
+       exit 0 ;;
+    *) echo "Unknown flag: ${flag}"
+       exit 1;;
+  esac
+done
+
+controller_url="https://jenkins.epic.oarcloud.noaa.gov"
+controller_user=${controller_user:-"terry.mcguinness"}
+controller_user_auth_token="jenkins_token"
+
+HOMEgfs="$(cd "$(dirname  "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
+host=$(hostname)
+
+#########################################################################
+#  Set up runtime environment varibles for accounts on supproted machines
+#########################################################################
+
+source "${HOMEgfs}/ush/detect_machine.sh"
+case ${MACHINE_ID} in
+  hera | orion | hercules | wcoss2)
+    echo "Launch Jenkins Java Controler on ${MACHINE_ID}";;
+  *)
+    echo "Unsupported platform. Exiting with error."
+    exit 1;;
+esac
+
+LOG=lanuched_agent-$(date +%Y%m%d%M).log
+rm -f "${LOG}"
+
+source "${HOMEgfs}/ush/module-setup.sh"
+module use "${HOMEgfs}/modulefiles"
+module load "module_gwsetup.${MACHINE_ID}"
+source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
+
+JAVA_HOME="${JENKINS_AGENT_LANUCH_DIR}/JAVA/jdk-17.0.10"
+if [[ ! -d "${JAVA_HOME}" ]]; then
+  JAVA_HOME=/usr/lib/jvm/jre-17
+  if [[ ! -d "${JAVA_HOME}" ]]; then
+    echo "ERROR: JAVA_HOME not found. Exiting with error."
+    exit 1
+  fi
+fi
+
+JAVA="${JAVA_HOME}/bin/java"
+echo "JAVA VERSION: "
+${JAVA} -version
+
+export GH="${HOME}/bin/gh"
+[[ -f "${GH}" ]] || echo "gh is not installed in ${HOME}/bin"
+${GH} --version
+
+check_mark=$(gh auth status -t 2>&1 | grep "Token:" | awk '{print $1}') || true
+if [[ "${check_mark}" != "✓" ]]; then
+  echo "gh not authenticating with emcbot token"
+  exit 1
+fi
+echo "gh authenticating with emcbot TOKEN ok"
+
+if [[ -d "${JENKINS_AGENT_LANUCH_DIR}" ]]; then
+  echo "Jenkins Agent Lanuch Directory: ${JENKINS_AGENT_LANUCH_DIR}"
+else
+  echo "ERROR: Jenkins Agent Lanuch Directory not found. Exiting with error."
+  exit 1
+fi
+cd "${JENKINS_AGENT_LANUCH_DIR}"
+
+if ! [[ -f agent.jar ]]; then
+  curl -sO "${controller_url}/jnlpJars/agent.jar"
+  echo "Updated agent.jar downloaded"
+fi
+
+if [[ ! -f "${controller_user_auth_token}" ]]; then
+   echo "User Jenkins authetication TOKEN to the controller for using the Remote API does not exist"
+   exit 1
+fi
+JENKINS_TOKEN=$(cat "${controller_user_auth_token}")
+
+cat << EOF > parse.py
+#!/usr/bin/env python3
+import json,sys
+with open(sys.argv[1], 'r') as file:
+    data = json.load(file)
+print(data.get('offline','True'))
+EOF
+chmod u+x parse.py
+
+check_node_online() {
+    rm -f curl_response
+    curl_response=$(curl --silent -u "${controller_user}:${JENKINS_TOKEN}" "${controller_url}/computer/${MACHINE_ID^}-EMC/api/json?pretty=true") || true
+    if [[ "${curl_response}" == "" ]]; then
+       echo "ERROR: Jenkins controller not reachable. Exiting with error."
+       exit 1
+    fi
+    echo -n "${curl_response}" > curl_response
+    ./parse.py curl_response
+}
+
+lauch_agent () {
+    echo "Launching Jenkins Agent on ${host}"
+    command="nohup ${JAVA} -jar agent.jar -jnlpUrl ${controller_url}/computer/${MACHINE_ID^}-EMC/jenkins-agent.jnlp  -secret @jenkins-secret-file -workDir ${JENKINS_WORK_DIR}"
+    echo -e "Launching Jenkins Agent on ${host} with the command:\n${command}" >& "${LOG}"
+    ${command} >> "${LOG}" 2>&1 &
+    nohup_PID=$!
+    echo "Java agent running on PID: ${nohup_PID}" >> "${LOG}" 2>&1
+}
+
+if [[ "${force_launch}" == "True" ]]; then
+  lauch_agent
+  exit
+fi
+
+offline=$(set -e; check_node_online)
+
+if [[ "${offline}" != "False" ]]; then
+   if [[ "${skip_wait}" != "True" ]]; then
+      echo "Jenkins Agent is offline. Waiting 5 more minutes to check again in the event it is a temp network issue"
+      sleep 300
+      offline=$(set -e; check_node_online)
+   fi
+   if [[ "${offline}" != "False" ]]; then
+     lauch_agent
+    else
+      echo "Jenkins Agent is online (nothing done)"
+    fi
+else
+  echo "Jenkins Agent is online (nothing done)"
+fi
diff --git a/ci/scripts/utils/parse_yaml.py b/ci/scripts/utils/parse_yaml.py
new file mode 100755
index 0000000000..6950fead25
--- /dev/null
+++ b/ci/scripts/utils/parse_yaml.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+
+"""
+This script parses a yaml file and returns the value of a specified key.
+"""
+
+import os
+import sys
+from wxflow import AttrDict, parse_j2yaml
+from argparse import ArgumentParser
+from pathlib import Path
+
+_here = os.path.dirname(__file__)
+_top = os.path.abspath(os.path.join(os.path.abspath(_here), '../../..'))
+
+description = """parse yaml file and return value of key"""
+
+
+def parse_args():
+    """
+    Parse command-line arguments.
+
+    Returns:
+        argparse.Namespace: The parsed command-line arguments.
+    """
+
+    parser = ArgumentParser(description=description)
+    parser.add_argument('-y', '--yaml', help='full path to yaml file to parse', type=Path, required=True)
+    parser.add_argument('-k', '--key', help='key to return value of', type=str, required=True)
+    parser.add_argument('-s', '--string', help='output results as strings', action="store_true", required=False)
+    return parser.parse_args()
+
+
+def yq(yamlfile, key):
+    """
+    Parse a yaml file and return the value of a specified key.
+
+    Args:
+        yamlfile (Path): The path to the yaml file.
+        key (str): The key to return the value of.
+
+    Returns:
+        The value of the specified key in the yaml file.
+    """
+
+    data = AttrDict(HOMEgfs=_top)
+    data.update({'HOMEgfs': _top})
+    ydict = parse_j2yaml(path=yamlfile, data=data)
+    if key == 'all':
+        return ydict
+    list_keys = key.split('.')
+    for k in list_keys:
+        ydict = ydict.get(k, None)
+        if ydict is None:
+            break
+    return ydict
+
+
+if __name__ == '__main__':
+    """
+    Main function. Parses command-line arguments and prints the value of the specified key in the specified yaml file.
+    """
+
+    args = parse_args()
+    values = yq(args.yaml, args.key)
+    if args.string and isinstance(values, list):
+        for value in values:
+            print(value)
+    else:
+        print(values)
diff --git a/ci/scripts/utils/pr_list_database.py b/ci/scripts/utils/pr_list_database.py
new file mode 100755
index 0000000000..3b53d21134
--- /dev/null
+++ b/ci/scripts/utils/pr_list_database.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+from typing import List
+from wxflow import SQLiteDB, SQLiteDBError
+from githubpr import GitHubPR
+from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, REMAINDER
+import argparse
+
+
+def full_path(string):
+    """
+    full_path Get the absolute path of a file or directory.
+    Parameters
+    ----------
+    string : str
+        The relative path of the file or directory.
+
+    Returns
+    -------
+    str
+        The absolute path of the file or directory.
+
+    Raises
+    ------
+    NotADirectoryError
+        If the provided string does not represent a valid file or directory.
+    """
+
+    if os.path.isfile(string) or os.path.isdir(os.path.dirname(string)):
+        return os.path.abspath(string)
+    else:
+        raise NotADirectoryError(string)
+
+
+def create_table(db: SQLiteDB):
+    """
+    Create a new table in a database.
+
+    Parameters
+    ----------
+    db : SQLiteDB
+        The database to create.
+    """
+
+    db.create_table('pr_list', ['pr INTEGER PRIMARY KEY UNIQUE', 'state TEXT', 'status TEXT', 'reset_id INTEGER', 'cases TEXT'])
+
+
+def add_pr(ci_database: SQLiteDB, pr: str) -> bool:
+    """
+    Add a pull request to the database.
+
+    Parameters
+    ----------
+    ci_database : SQLiteDB
+        The database to add the pull request to.
+
+    pr : str
+        The pull request to add.
+    """
+
+    entities = (pr, 'Open', 'Ready', 0, 'ci_repo')
+    try:
+        ci_database.insert_data('pr_list', entities)
+        return True
+    except (SQLiteDBError.IntegrityError) as e:
+        if 'unique' in str(e).lower():
+            print(f"pr {pr} already is in list: nothing added")
+            return False
+
+
+def update_pr(ci_database: SQLiteDB, args: argparse.Namespace):
+    """
+    Update a pull request in the database.
+
+    Parameters
+    ----------
+    ci_database : SQLiteDB
+        The database to update the pull request in.
+
+    args : argparse.Namespace
+        The command line arguments.
+    """
+
+    if len(args.update_pr) < 2:
+        print(f"update_pr must have at least one vaule to update")
+        sys.exit(0)
+
+    update_list = ['state', 'status', 'reset_id', 'cases']
+    for value in args.update_pr[1:]:
+        update = update_list.pop(0)
+        ci_database.update_data('pr_list', update, value, 'pr', args.update_pr[0])
+
+
+def display_db(ci_database: SQLiteDB, display: List[str]) -> list:
+    """
+    Display the database.
+
+    Parameters
+    ----------
+    ci_database : SQLiteDB
+        The database to display.
+
+    display : argparse.Namespace
+        The command line arguments.
+
+    Returns
+    -------
+    list
+        The rows of the database.
+    """
+
+    values = []
+    if len(display) == 1:
+        rows = ci_database.fetch_data('pr_list', ['pr', 'state', 'status', 'reset_id', 'cases'], f"pr = '{display[0]}'")
+    if len(display) == 2:
+        rows = ci_database.fetch_data('pr_list', ['pr'], f"state = '{display[0]}' AND status = '{display[1]}'")
+    if len(display) == 0:
+        rows = ci_database.fetch_data('pr_list', ['pr', 'state', 'status', 'reset_id', 'cases'])
+    for row in rows:
+        values.append(' '.join(map(str, row)))
+
+    return values
+
+
+def update_database(ci_database: SQLiteDB) -> list:
+    """
+    Update the database from the GitHub PRs
+    - only PRs from host machine are added to the database
+    - if the PR is already in the database it its added to the kill list
+
+    Parameters
+    ----------
+    ci_database : SQLiteDB
+        The database to update.
+
+    Returns
+    -------
+    list
+        The kill list of pull requests.
+    """
+
+    gh = GitHubPR()
+    pr_ready_list, pr_kill_list = gh.get_open_pr_list()
+    for pr in pr_ready_list:
+        if not add_pr(ci_database, str(pr)):
+            if pr not in pr_kill_list:
+                pr_kill_list.append(pr)
+    pr_kill_list = list(set(pr_kill_list))
+    return pr_kill_list
+
+
+def input_args():
+    """
+    Parse command line arguments.
+
+    Returns
+    -------
+    argparse.Namespace
+        The parsed command line arguments.
+    """
+    description = """Arguments for creating and updating db file for pr states
+    """
+
+    parser = ArgumentParser(description=description,
+                            formatter_class=ArgumentDefaultsHelpFormatter)
+
+    parser.add_argument('--dbfile', help='SQLite3 database file with PR list', type=full_path)
+    parser.add_argument('--create', help='create sqlite file for pr list status', action='store_true', required=False)
+    parser.add_argument('--add_pr', nargs=1, metavar='PR', help='add new pr to list (defults to: Open,Ready)', required=False)
+    parser.add_argument('--remove_pr', nargs=1, metavar='PR', help='removes pr from list', required=False)
+    parser.add_argument('--update_pr', nargs=REMAINDER, metavar=('pr', 'state', 'status', 'reset_id', 'cases'),
+                        help='updates state and status of a given pr', required=False)
+    parser.add_argument('--display', nargs='*', help='output pr table', required=False)
+    parser.add_argument('--list', nargs=2, metavar=('state', 'status'), required=False)
+    parser.add_argument('--update_database', help='use labels from Open GitHub PRs to update database state and produces a kill list',
+                        action='store_true', required=False)
+    args = parser.parse_args()
+    return args
+
+
+if __name__ == '__main__':
+
+    args = input_args()
+
+    if not args.create:
+        if not os.path.isfile(args.dbfile):
+            print(f'Error: {args.dbfile} does not exsist')
+            sys.exit(-1)
+
+    ci_database = SQLiteDB(args.dbfile)
+    ci_database.connect()
+
+    if args.create:
+        create_table(ci_database)
+    if args.add_pr:
+        add_pr(ci_database, args.add_pr[0])
+    if args.update_pr:
+        update_pr(ci_database, args)
+    if args.remove_pr:
+        ci_database.remove_data('pr_list', 'PR', args.remove_pr[0])
+    if args.display is not None:
+        for rows in display_db(ci_database, args.display):
+            print(rows)
+    if args.list:
+        for rows in display_db(ci_database, [args.list[0], args.list[1]]):
+            print(rows, end=' ')
+        print()
+    if args.update_database:
+        pr_kill_list = update_database(ci_database)
+        for pr in pr_kill_list:
+            print(pr, end=' ')
+        print()
+
+    ci_database.disconnect()
diff --git a/ci/scripts/utils/publish_logs.py b/ci/scripts/utils/publish_logs.py
new file mode 100755
index 0000000000..283c84a8d1
--- /dev/null
+++ b/ci/scripts/utils/publish_logs.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python3
+
+import os
+from githubpr import GitHubPR, GitHubDBError
+from argparse import ArgumentParser, FileType
+
+
+def parse_args():
+    """
+    Parse command line arguments.
+
+    Returns
+    -------
+    argparse.Namespace
+        The parsed command line arguments.
+    """
+
+    description = """Arguments for creating and updating error log files
+    """
+    parser = ArgumentParser(description=description)
+
+    parser.add_argument('--file', help='path to file for uploading to GitHub', required=False, type=FileType('r'), nargs='+')
+    parser.add_argument('--gist', help='create a gist of the file', nargs=1, metavar='identifier_string', required=False)
+    parser.add_argument('--repo', help='create a file in a repo', nargs=1, metavar='path_header', required=False)
+    args = parser.parse_args()
+    if bool(args.gist) == bool(args.repo):  # Exactly one of the two is required
+        parser.error("Exactly one of --gist and --repo is required")
+    return args
+
+
+def add_logs_to_gist(args, emcbot_gh):
+    """
+    Adds log files to a GitHub gist.
+
+    Parameters
+    ----------
+    args : Namespace
+        The arguments parsed from the command line.
+    emcbot_gh : GitHubPR
+        The GitHubPR object to interact with GitHub.
+
+    Prints
+    ------
+    The URL of the created gist.
+    """
+
+    gist_files = {}
+    for file in args.file:
+        with open(file.name, 'r', encoding='latin-1') as file:
+            file_content = file.read()
+        gist_files[os.path.basename(file.name)] = emcbot_gh.InputFileContent(file_content)
+
+    gist = emcbot_gh.user.create_gist(public=True, files=gist_files, description=f"error log file from CI run {args.gist[0]}")
+    print(gist.html_url)
+
+
+def upload_logs_to_repo(args, emcbot_gh, emcbot_ci_url):
+    """
+    Upload log files to a repository.
+
+    Parameters
+    ----------
+    args : Namespace
+        The arguments parsed from the command line.
+    emcbot_gh : GitHubPR
+        The GitHubPR object to interact with GitHub.
+    emcbot_ci_url : str
+        The URL of the repository to upload the logs to.
+
+    Prints
+    ------
+    The URL of the uploaded file in the repository.
+    """
+
+    path_header = args.repo[0]
+    repo_branch = "error_logs"
+    repo_path = "ci/error_logs"
+    extra = 0
+    while True:
+        try:
+            extra += 1
+            file_path_in_repo = f"{repo_path}/{path_header}/" + str(os.path.basename(args.file[0].name))
+            content = emcbot_gh.repo.get_contents(file_path_in_repo, ref='error_logs')
+            path_header = f'{args.repo[0]}_{str(extra)}'
+        except GitHubDBError.GithubException as e:
+            break
+
+    for file in args.file:
+        with open(file.name, 'r', encoding='latin-1') as file:
+            file_content = file.read()
+        file_path_in_repo = f"{repo_path}/{path_header}/" + str(os.path.basename(file.name))
+        emcbot_gh.repo.create_file(file_path_in_repo, "Adding error log file", file_content, branch="error_logs")
+
+    file_url = f"{emcbot_ci_url.rsplit('.',1)[0]}/tree/{repo_branch}/{repo_path}/{path_header}"
+    print(file_url)
+
+
+if __name__ == '__main__':
+
+    args = parse_args()
+    emcbot_ci_url = "https://github.com/emcbot/ci-global-workflows.git"
+    emcbot_gh = GitHubPR(repo_url=emcbot_ci_url)
+
+    if args.gist:  # Add error logs to a gist in GitHub emcbot's account
+        add_logs_to_gist(args, emcbot_gh)
+
+    if args.repo:  # Upload error logs to emcbot's ci-global-workflows error_logs branch
+        upload_logs_to_repo(args, emcbot_gh, emcbot_ci_url)
diff --git a/ci/scripts/utils/rocotostat.py b/ci/scripts/utils/rocotostat.py
new file mode 100755
index 0000000000..70c672f0e8
--- /dev/null
+++ b/ci/scripts/utils/rocotostat.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import copy
+from time import sleep
+
+from wxflow import which, Logger, CommandNotFoundError, ProcessError
+from argparse import ArgumentParser, FileType
+
+from collections import Counter
+
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False)
+
+
+def attempt_multiple_times(expression, max_attempts, sleep_duration=0, exception_class=Exception):
+    """
+    Retries a function multiple times.
+
+    Try to execute the function expression up to max_attempts times ignoring any exceptions
+    of the type exception_class, It waits for sleep_duration seconds between attempts.
+
+    Parameters
+    ----------
+    expression : callable
+        The function to be executed.
+    max_attempts : int
+        The maximum number of attempts to execute the function.
+    sleep_duration : int, optional
+        The number of seconds to wait between attempts. Default is 0.
+    exception_class : Exception, optional
+        The type of exception to catch. Default is the base Exception class, catching all exceptions.
+
+    Returns
+    -------
+    The return value of the function expression.
+
+    Raises
+    ------
+    exception_class
+        If the function expression raises an exception of type exception_class
+        in all max_attempts attempts.
+
+    """
+
+    attempt = 0
+    last_exception = None
+    while attempt < max_attempts:
+        try:
+            pass
+            return expression()
+        except exception_class as last_exception:
+            attempt += 1
+            sleep(sleep_duration)
+    else:
+        raise last_exception
+
+
+def input_args():
+    """
+    Parse command-line arguments.
+
+    Returns
+    -------
+    args : Namespace
+        The parsed command-line arguments.
+    """
+
+    description = """
+        Using rocotostat to get the status of all jobs this scripts
+        determines rocoto_state: if all cycles are done, then rocoto_state is Done.
+        Assuming rocotorun had just been run, and the rocoto_state is not Done, then
+        rocoto_state is Stalled if there are no jobs that are RUNNING, SUBMITTING, or QUEUED.
+        """
+
+    parser = ArgumentParser(description=description)
+
+    parser.add_argument('-w', help='workflow_document', type=FileType('r'), required=True)
+    parser.add_argument('-d', help='database_file', metavar='Database File', type=FileType('r'), required=True)
+    parser.add_argument('--verbose', action='store_true', help='List the states and the number of jobs that are in each', required=False)
+    parser.add_argument('-v', action='store_true', help='List the states and the number of jobs that are in each', required=False)
+    parser.add_argument('--export', action='store_true', help='create and export list of the status values for bash', required=False)
+
+    args = parser.parse_args()
+
+    return args
+
+
+def rocotostat_summary(rocotostat):
+    """
+    rocoto_summary Run rocotostat and process its output.
+
+    rocoto_summary(rocotostat) adds a default argument '--summary' to the rocotostat
+    command, runs it, and processes its output to return a dictionary with the total
+    number of cycles and the number of cycles marked as 'Done'.
+
+    Input:
+    rocotostat - The rocotostat command.
+
+    Output:
+    rocoto_status - A dictionary with the total number of cycles and the number of cycles marked as 'Done'.
+    """
+    rocotostat = copy.deepcopy(rocotostat)
+    rocotostat.add_default_arg('--summary')
+    rocotostat_output = attempt_multiple_times(lambda: rocotostat(output=str), 3, 90, ProcessError)
+    rocotostat_output = rocotostat_output.splitlines()[1:]
+    rocotostat_output = [line.split()[0:2] for line in rocotostat_output]
+
+    rocoto_status = {
+        'CYCLES_TOTAL': len(rocotostat_output),
+        'CYCLES_DONE': sum([sublist.count('Done') for sublist in rocotostat_output])
+    }
+    return rocoto_status
+
+
+def rocoto_statcount(rocotostat):
+    """
+    rocoto_statcount Run rocotostat and process its output.
+
+    rocoto_statcount(rocotostat) adds a default argument '--all' to the rocotostat
+    command, runs it, and processes its output to return a dictionary with the count
+    of each status case.
+
+    Input:
+    rocotostat - The rocotostat command.
+
+    Output:
+    rocoto_status - A dictionary with the count of each status case.
+    """
+
+    rocotostat = copy.deepcopy(rocotostat)
+    rocotostat.add_default_arg('--all')
+
+    rocotostat_output = attempt_multiple_times(lambda: rocotostat(output=str), 4, 120, ProcessError)
+    rocotostat_output = rocotostat_output.splitlines()[1:]
+    rocotostat_output = [line.split()[0:4] for line in rocotostat_output]
+    rocotostat_output = [line for line in rocotostat_output if len(line) != 1]
+
+    status_cases = ['SUCCEEDED', 'FAIL', 'DEAD', 'RUNNING', 'SUBMITTING', 'QUEUED']
+
+    rocoto_status = {}
+    status_counts = Counter(case for sublist in rocotostat_output for case in sublist)
+    for case in status_cases:
+        rocoto_status[case] = status_counts[case]
+
+    return rocoto_status
+
+
+def is_done(rocoto_status):
+    """
+    is_done Check if all cycles are done.
+
+    is_done(rocoto_status) checks if the total number of cycles equals the number of
+    done cycles in the rocoto_status dictionary.
+
+    Input:
+    rocoto_status - A dictionary with the count of each status case.
+
+    Output:
+    boolean - True if all cycles are done, False otherwise.
+    """
+
+    if rocoto_status['CYCLES_TOTAL'] == rocoto_status['CYCLES_DONE']:
+        return True
+    else:
+        return False
+
+
+def is_stalled(rocoto_status):
+    """
+    is_stalled Check if all cycles are stalled.
+
+    is_stalled(rocoto_status) checks if all cycles are stalled by verifying if
+    there are no jobs that are RUNNING, SUBMITTING, or QUEUED.
+
+    Input:
+    rocoto_status - A dictionary with the count of each status case.
+
+    Output:
+    boolean - True if all cycles are stalled, False otherwise.
+    """
+
+    if rocoto_status['RUNNING'] + rocoto_status['SUBMITTING'] + rocoto_status['QUEUED'] == 0:
+        return True
+    else:
+        return False
+
+
+if __name__ == '__main__':
+    """
+    main Execute the script.
+
+    main() parses the input arguments, checks if the rocotostat command is available,
+    adds default arguments to the rocotostat command, and runs it and reports
+    out to stdout spcific information of rocoto workflow.
+    """
+
+    args = input_args()
+
+    try:
+        rocotostat = which("rocotostat")
+    except CommandNotFoundError:
+        logger.exception("rocotostat not found in PATH")
+        raise CommandNotFoundError("rocotostat not found in PATH")
+
+    rocotostat.add_default_arg(['-w', os.path.abspath(args.w.name), '-d', os.path.abspath(args.d.name)])
+
+    rocoto_status = rocoto_statcount(rocotostat)
+    rocoto_status.update(rocotostat_summary(rocotostat))
+
+    error_return = 0
+    if is_done(rocoto_status):
+        rocoto_state = 'DONE'
+    elif rocoto_status['DEAD'] > 0:
+        error_return = rocoto_status['FAIL'] + rocoto_status['DEAD']
+        rocoto_state = 'FAIL'
+    elif 'UNKNOWN' in rocoto_status:
+        error_return = rocoto_status['UNKNOWN']
+        rocoto_state = 'UNKNOWN'
+    elif is_stalled(rocoto_status):
+        rocoto_status = attempt_multiple_times(lambda: rocoto_statcount(rocotostat), 2, 120, ProcessError)
+        if is_stalled(rocoto_status):
+            error_return = 3
+            rocoto_state = 'STALLED'
+    else:
+        rocoto_state = 'RUNNING'
+
+    rocoto_status['ROCOTO_STATE'] = rocoto_state
+
+    if args.verbose or args.v:
+        for status in rocoto_status:
+            if args.v:
+                print(f'{status}:{rocoto_status[status]}')
+            else:
+                print(f'Number of {status} : {rocoto_status[status]}')
+
+    if args.export:
+        for status in rocoto_status:
+            print(f'export {status}={rocoto_status[status]}')
+    else:
+        print(rocoto_state)
+
+    sys.exit(error_return)
diff --git a/ci/scripts/utils/wxflow b/ci/scripts/utils/wxflow
new file mode 120000
index 0000000000..54d0558aba
--- /dev/null
+++ b/ci/scripts/utils/wxflow
@@ -0,0 +1 @@
+../../../sorc/wxflow/src/wxflow
\ No newline at end of file
diff --git a/docs/doxygen/mainpage.h b/docs/doxygen/mainpage.h
index 19a51be272..92d602aa82 100644
--- a/docs/doxygen/mainpage.h
+++ b/docs/doxygen/mainpage.h
@@ -24,7 +24,7 @@ To setup an experiment, a python script <b>\c setup_expt.py</b> (located in <b>\
     usage: setup_expt.py [-h] --pslot PSLOT
                      [--configdir CONFIGDIR] [--idate IDATE] [--icsdir ICSDIR]
                      [--resdetatmos RESDET] [--resensatmos RESENS] [--comroot COMROOT]
-                     [--expdir EXPDIR] [--nens NENS] [--cdump CDUMP]
+                     [--expdir EXPDIR] [--nens NENS] [--run RUN]
 
     Setup files and directories to start a GFS parallel. Create EXPDIR, copy
     config files Create ROTDIR experiment directory structure, link initial
@@ -52,7 +52,7 @@ To setup an experiment, a python script <b>\c setup_expt.py</b> (located in <b>\
                         (default: None)
         --nens       number of ensemble members
                         (default: 80)
-        --cdump      CDUMP to start the experiment
+        --run        RUN to start the experiment
                         (default: gdas)
 
 The above script creates directories <b>\c EXPDIR</b> and <b>\c ROTDIR</b>. It will make links for initial conditions from a location provided via the <b>\c --icsdir</b> argument for a chosen resolution for the control <b>\c --resdetatmos</b> and the ensemble <b>\c --resensatmos</b>. Experiment name is controlled by the input argument <b>\c --pslot</b>. The script will ask user input in case any of the directories already exist. It will copy experiment configuration files into the <b>\c EXPDIR</b> from <b>\c CONFIGDIR</b>.
diff --git a/docs/source/clone.rst b/docs/source/clone.rst
index bad3f0e9f6..c365f0ed0a 100644
--- a/docs/source/clone.rst
+++ b/docs/source/clone.rst
@@ -39,6 +39,13 @@ For coupled cycling (include new UFSDA) use the `-gu` options during build:
    ./build_all.sh -gu
 
 
+For building without PDLIB (unstructured grid) for the wave model, use the `-w` options during build:
+
+::
+
+   ./build_all.sh -w
+
+
 Build workflow components and link workflow artifacts such as executables, etc.
 
 ::
diff --git a/docs/source/components.rst b/docs/source/components.rst
index 98e76b467b..869ef89bab 100644
--- a/docs/source/components.rst
+++ b/docs/source/components.rst
@@ -28,7 +28,7 @@ Components included as submodules:
 * **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values
 * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well
 * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model
-* **wxflow** Collection of python utilities for weather workflows (https://github.com/NOAA-EMC/wxflow) 
+* **wxflow** (https://github.com/NOAA-EMC/wxflow): Collection of python utilities for weather workflows
 * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only
 
 .. note::
@@ -57,19 +57,20 @@ Data
 
 Observation data, also known as dump data, is prepared in production and then archived in a global dump archive (GDA) for use by users when running cycled experiments. The GDA (identified as ``$DMPDIR`` in the workflow) is available on supported platforms and the workflow system knows where to find the data.
 
-* Hera: /scratch1/NCEPDEV/global/glopara/dump
-* Orion/Hercules: /work/noaa/rstprod/dump
-* Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump
-* WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump
-* S4: /data/prod/glopara/dump
+* Hera: ``/scratch1/NCEPDEV/global/glopara/dump``
+* Orion/Hercules: ``/work/noaa/rstprod/dump``
+* Jet: ``/mnt/lfs4/HFIP/hfv3gfs/glopara/dump``
+* WCOSS2: ``/lfs/h2/emc/global/noscrub/emc.global/dump``
+* S4: ``/data/prod/glopara/dump``
 
 -----------------------------
 Global Dump Archive Structure
 -----------------------------
 
-The global dump archive (GDA) mimics the structure of its production source: ``DMPDIR/CDUMP.PDY/[CC/atmos/]FILES``
+The global dump archive (GDA) mimics the structure of its production source:
 
-The ``CDUMP`` is either gdas, gfs, or rtofs. All three contain production output for each day (``PDY``). The gdas and gfs folders are further broken into cycle (``CC``) and component (``atmos``).
+* GDAS/GFS: ``DMPDIR/gdas[gfs].PDY/CC/atmos/FILES``
+* RTOFS: ``DMPDIR/rtofs.PDY/FILES``
 
 The GDA also contains special versions of some datasets and experimental data that is being evaluated ahead of implementation into production. The following subfolder suffixes exist:
 
@@ -81,6 +82,7 @@ The GDA also contains special versions of some datasets and experimental data th
 +--------+------------------------------------------------------------------------------------------------------+
 | ur     | Un-restricted versions of restricted files in production. Produced and archived on a 48hrs delay.    |
 |        | Some restricted datasets are unrestricted. Data amounts: restricted > un-restricted > non-restricted |
+|        | Limited availability. Discontinued producing mid-2023.                                               |
 +--------+------------------------------------------------------------------------------------------------------+
 | x      | Experimental global datasets being evaluated for production. Dates and types vary depending on       |
 |        | upcoming global upgrades.                                                                            |
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 89526d9f69..81f231f6b0 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -13,13 +13,14 @@
 import os
 import sys
 sys.path.insert(0, os.path.abspath('.'))
-
+from datetime import datetime
 
 # -- Project information -----------------------------------------------------
 
 project = 'Global-workflow'
-copyright = '2023, Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla'
-author = 'Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla'
+year = datetime.now().year
+copyright = f"2015-{year} NOAA/NWS/NCEP/EMC"
+author = 'Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, and numerous collaborators and contributors'
 
 # The full version, including alpha/beta/rc tags
 release = '0.1'
diff --git a/docs/source/configure.rst b/docs/source/configure.rst
index 12c2f75a48..439c5df110 100644
--- a/docs/source/configure.rst
+++ b/docs/source/configure.rst
@@ -4,58 +4,60 @@ Configure Run
 
 The global-workflow configs contain switches that change how the system runs. Many defaults are set initially. Users wishing to run with different settings should adjust their $EXPDIR configs and then rerun the ``setup_xml.py`` script since some configuration settings/switches change the workflow/xml ("Adjusts XML" column value is "YES").
 
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| Switch         | What                             | Default       | Adjusts XML | More Details                                      |
-+================+==================================+===============+=============+===================================================+
-| APP            | Model application                | ATM           | YES         | See case block in config.base for options         |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DOIAU          | Enable 4DIAU for control         | YES           | NO          | Turned off for cold-start first half cycle        |
-|                | with 3 increments                |               |             |                                                   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DOHYBVAR       | Run EnKF                         | YES           | YES         | Don't recommend turning off                       |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DONST          | Run NSST                         | YES           | NO          | If YES, turns on NSST in anal/fcst steps, and     |
-|                |                                  |               |             | turn off rtgsst                                   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_AWIPS       | Run jobs to produce AWIPS        | NO            | YES         | downstream processing, ops only                   |
-|                | products                         |               |             |                                                   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_BUFRSND     | Run job to produce BUFR          | NO            | YES         | downstream processing                             |
-|                | sounding products                |               |             |                                                   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_GEMPAK      | Run job to produce GEMPAK        | NO            | YES         | downstream processing, ops only                   |
-|                | products                         |               |             |                                                   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_FIT2OBS     | Run FIT2OBS job                  | YES           | YES         | Whether to run the FIT2OBS job                    |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_TRACKER     | Run tracker job                  | YES           | YES         | Whether to run the tracker job                    |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_GENESIS     | Run genesis job                  | YES           | YES         | Whether to run the genesis job                    |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_GENESIS_FSU | Run FSU genesis job              | YES           | YES         | Whether to run the FSU genesis job                |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_VERFOZN     | Run GSI monitor ozone job        | YES           | YES         | Whether to run the GSI monitor ozone job          |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_VERFRAD     | Run GSI monitor radiance job     | YES           | YES         | Whether to run the GSI monitor radiance job       |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_VMINMON     | Run GSI monitor minimization job | YES           | YES         | Whether to run the GSI monitor minimization job   |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| DO_METP        | Run METplus jobs                 | YES           | YES         | One cycle spinup                                  |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| EXP_WARM_START | Is experiment starting warm      | .false.       | NO          | Impacts IAU settings for initial cycle. Can also  |
-|                | (.true.) or cold (.false)?       |               |             | be set when running ``setup_expt.py`` script with |
-|                |                                  |               |             | the ``--start`` flag (e.g. ``--start warm``)      |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| HPSSARCH       | Archive to HPPS                  | NO            | Possibly    | Whether to save output to tarballs on HPPS        |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| LOCALARCH      | Archive to a local directory     | NO            | Possibly    | Instead of archiving data to HPSS, archive to a   |
-|                |                                  |               |             | local directory, specified by ATARDIR. If         |
-|                |                                  |               |             | LOCALARCH=YES, then HPSSARCH must =NO. Changing   |
-|                |                                  |               |             | HPSSARCH from YES to NO will adjust the XML.      |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| QUILTING       | Use I/O quilting                 | .true.        | NO          | If .true. choose OUTPUT_GRID as cubed_sphere_grid |
-|                |                                  |               |             | in netcdf or gaussian_grid                        |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
-| WRITE_DOPOST   | Run inline post                  | .true.        | NO          | If .true. produces master post output in forecast |
-|                |                                  |               |             | job                                               |
-+----------------+----------------------------------+---------------+-------------+---------------------------------------------------+
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| Switch           | What                             | Default       | Adjusts XML | More Details                                      |
++==================+==================================+===============+=============+===================================================+
+| APP              | Model application                | ATM           | YES         | See case block in config.base for options         |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DEBUG_POSTSCRIPT | Debug option for PBS scheduler   | NO            | YES         | Sets debug=true for additional logging            |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DOIAU            | Enable 4DIAU for control         | YES           | NO          | Turned off for cold-start first half cycle        |
+|                  | with 3 increments                |               |             |                                                   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DOHYBVAR         | Run EnKF                         | YES           | YES         | Don't recommend turning off                       |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DONST            | Run NSST                         | YES           | NO          | If YES, turns on NSST in anal/fcst steps, and     |
+|                  |                                  |               |             | turn off rtgsst                                   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_AWIPS         | Run jobs to produce AWIPS        | NO            | YES         | downstream processing, ops only                   |
+|                  | products                         |               |             |                                                   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_BUFRSND       | Run job to produce BUFR          | NO            | YES         | downstream processing                             |
+|                  | sounding products                |               |             |                                                   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_GEMPAK        | Run job to produce GEMPAK        | NO            | YES         | downstream processing, ops only                   |
+|                  | products                         |               |             |                                                   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_FIT2OBS       | Run FIT2OBS job                  | YES           | YES         | Whether to run the FIT2OBS job                    |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_TRACKER       | Run tracker job                  | YES           | YES         | Whether to run the tracker job                    |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_GENESIS       | Run genesis job                  | YES           | YES         | Whether to run the genesis job                    |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_GENESIS_FSU   | Run FSU genesis job              | YES           | YES         | Whether to run the FSU genesis job                |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_VERFOZN       | Run GSI monitor ozone job        | YES           | YES         | Whether to run the GSI monitor ozone job          |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_VERFRAD       | Run GSI monitor radiance job     | YES           | YES         | Whether to run the GSI monitor radiance job       |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_VMINMON       | Run GSI monitor minimization job | YES           | YES         | Whether to run the GSI monitor minimization job   |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| DO_METP          | Run METplus jobs                 | YES           | YES         | One cycle spinup                                  |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| EXP_WARM_START   | Is experiment starting warm      | .false.       | NO          | Impacts IAU settings for initial cycle. Can also  |
+|                  | (.true.) or cold (.false)?       |               |             | be set when running ``setup_expt.py`` script with |
+|                  |                                  |               |             | the ``--start`` flag (e.g. ``--start warm``)      |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| HPSSARCH         | Archive to HPPS                  | NO            | Possibly    | Whether to save output to tarballs on HPPS        |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| LOCALARCH        | Archive to a local directory     | NO            | Possibly    | Instead of archiving data to HPSS, archive to a   |
+|                  |                                  |               |             | local directory, specified by ATARDIR. If         |
+|                  |                                  |               |             | LOCALARCH=YES, then HPSSARCH must =NO. Changing   |
+|                  |                                  |               |             | HPSSARCH from YES to NO will adjust the XML.      |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| QUILTING         | Use I/O quilting                 | .true.        | NO          | If .true. choose OUTPUT_GRID as cubed_sphere_grid |
+|                  |                                  |               |             | in netcdf or gaussian_grid                        |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
+| WRITE_DOPOST     | Run inline post                  | .true.        | NO          | If .true. produces master post output in forecast |
+|                  |                                  |               |             | job                                               |
++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+
diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst
index 3ce6a889d9..643cffdef0 100644
--- a/docs/source/hpc.rst
+++ b/docs/source/hpc.rst
@@ -4,6 +4,46 @@ HPC Settings and Help
 
 Running the GFS configurations (or almost any global workflow configuration except the coarsest) is a resource intensive exercise. This page discusses recommended HPC environmental settings and contact information in case you need assistance from a particular HPC helpdesk. While most of the documentation is based on supported NOAA platforms, the learnings here can hopefully apply to other platforms. 
 
+====================================
+Minimum system software requirements
+====================================
+
+The following system software requirements are the minimum for any new or existing system and reflect the development and testing environment on which the global workflow is maintained.  Any system that does not meet these requirements will not be supported.
+
++--------------+-------------+---------------------------------------+
+| Software     | Minimum     | Notes                                 |
+|              | supported   |                                       |
+|              | version(s)  |                                       |
++==============+=============+=======================================+
+| Bash         | 4.4.20      |                                       |
++--------------+-------------+---------------------------------------+
+| Python       | * 3.8.6     | * 3.10.x is not supported by METplus  |
+|              | * 3.10.13+  |   verification software               |
+|              | * 3.11.6+   | * 3.11.6 is packaged with spack-stack |
+|              |             | * 3.9.x is untested                   |
++--------------+-------------+---------------------------------------+
+| Spack-Stack  | 1.6.0       | * Available everywhere but WCOSS2     |
++--------------+-------------+---------------------------------------+
+| lmod         | 8.3.1       |                                       |
++--------------+-------------+---------------------------------------+
+| Slurm        | 23.02.7     | * Other schedulers may be supportable |
++--------------+-------------+---------------------------------------+
+| PBSpro       | 2022.1.1    | * Other schedulers may be supportable |
++--------------+-------------+---------------------------------------+
+| Git          | 2.29.0      | * Some components e.g. GDASApp may    |
+|              |             |   need Git-LFS for downloading test   |
+|              |             |   data                                |
++--------------+-------------+---------------------------------------+
+| Rocoto       | 1.3.5       | * 1.3.7 is required for newer         |
+|              |             |   versions of Ruby (3.2+)             |
++--------------+-------------+---------------------------------------+
+| Intel        | 2021.5.1    | * GNU compilers are not supported     |
+| Compilers    |             | * Intel LLVM compilers are not yet    |
+|              |             |   supported                           |
+|              |             | * Intel 19.x is only supported on     |
+|              |             |   WCOSS2                              |
++--------------+-------------+---------------------------------------+
+
 ================================
 Experiment troubleshooting help
 ================================
@@ -22,7 +62,7 @@ HPC helpdesks
 * Hercules:  rdhpcs.hercules.help@noaa.gov
 * HPSS: rdhpcs.hpss.help@noaa.gov
 * Gaea: oar.gfdl.help@noaa.gov
-* S4: david.huber@noaa.gov
+* S4: innocent.souopgui@noaa.gov
 * Jet: rdhpcs.jet.help@noaa.gov
 * Cloud: rdhpcs.cloud.help@noaa.gov
 
@@ -48,62 +88,7 @@ https://aim.rdhpcs.noaa.gov/
 Optimizing the global workflow on S4
 ====================================
 
-The S4 cluster is relatively small and so optimizations are recommended to improve cycled runtimes. Please contact David Huber (david.huber@noaa.gov) if you are planning on running a cycled experiment on this system to obtain optimized configuration files.
-
-============
-Git settings
-============
-
-^^^^^^
-Merges
-^^^^^^
-
-Use the following command to have merge commits include the one-line description of all the commits being merged (up to 200). You only need to do this once on each machine; it will be saved to your git settings::
-
-   git config --global merge.log 200
-
-Use the ``--no-ff`` option to make sure there is always a merge commit when a fast-forward only is available. Exception: If the merge contains only a single commit, it can be applied as a fast-forward.
-
-For any merge with multiple commits, a short synopsis of the merge should appear between the title and the list of commit titles added by merge.log.
-
-^^^^^^^
-Version
-^^^^^^^
-
-It is advised to use Git v2+ when available. At the time of writing this documentation the default Git clients on the different machines were as noted in the table below. It is recommended that you check the default modules before loading recommended ones:
-
-+---------+----------+---------------------------------------+
-| Machine  | Default  | Recommended                           |
-+---------+----------+---------------------------------------+
-| Hera     | v2.18.0  | default                               |
-+---------+----------+---------------------------------------+
-| Hercules | v2.31.1  | default                               |
-+---------+----------+---------------------------------------+
-| Orion    | v1.8.3.1 | **module load git/2.28.0**            |
-+---------+----------+---------------------------------------+
-| Jet      | v2.18.0  | default                               |
-+---------+----------+---------------------------------------+
-| WCOSS2   | v2.26.2  | default or **module load git/2.29.0** |
-+---------+----------+---------------------------------------+
-| S4       | v1.8.3.1 | **module load git/2.30.0**            |
-+---------+----------+---------------------------------------+
-| AWS PW   | v1.8.3.1 | default
-+---------+----------+---------------------------------------+
-
-^^^^^^^^^^^^^
-Output format
-^^^^^^^^^^^^^
-
-For proper display of Git command output (e.g. git branch and git diff) type the following once per machine:
-
-::
-
-   git config --global core.pager 'less -FRX'
-
-For the manage_externals utility functioning::
-
-   Error: fatal: ssh variant 'simple' does not support setting port
-   Fix: git config --global ssh.variant ssh
+The S4 cluster is relatively small and so optimizations are recommended to improve cycled runtimes. Please contact Innocent Souopgui (innocent.souopgui@noaa.gov) if you are planning on running a cycled experiment on this system to obtain optimized configuration files.
 
 ========================================
 Stacksize on R&Ds (Hera, Orion, Hercules, Jet, S4)
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 2eb786199a..a5161789b3 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -10,7 +10,7 @@ Status
 ======
 
 * State of develop (HEAD) branch: GFSv17+ development
-* State of operations (dev/gfs.v16 branch): GFS v16.3.12 `tag: [gfs.v16.3.12] <https://github.com/NOAA-EMC/global-workflow/releases/tag/gfs.v16.3.12>`_
+* State of operations (dev/gfs.v16 branch): GFS v16.3.13 `tag: [gfs.v16.3.13] <https://github.com/NOAA-EMC/global-workflow/releases/tag/gfs.v16.3.13>`_
 
 =============
 Code managers
@@ -27,6 +27,10 @@ General updates: NOAA employees and affiliates can join the gfs-announce distrib
 
 GitHub updates: Users should adjust their "Watch" settings for this repo so they receive notifications as they'd like to. Find the "Watch" or "Unwatch" button towards the top right of the `authoritative global-workflow repository page <https://github.com/NOAA-EMC/global-workflow>`_ and click it to adjust how you watch the repo.
 
+=================
+Table of Contents
+=================
+
 .. toctree:: 
    :numbered:
    :maxdepth: 3
diff --git a/docs/source/init.rst b/docs/source/init.rst
index 14a0ea0d56..69e43f9bb0 100644
--- a/docs/source/init.rst
+++ b/docs/source/init.rst
@@ -51,6 +51,7 @@ Cold-start atmosphere-only cycled C96 deterministic C48 enkf (80 members) ICs ar
    Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C96C48
    Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C96C48
    WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C96C48
+   AWS: https://noaa-nws-global-pds.s3.amazonaws.com/index.html#data/ICSDIR/C96C48
 
 Start date = 2021122018
 
@@ -111,6 +112,7 @@ Warm-start cycled w/ coupled (S2S) model C48 atmosphere C48 enkf (80 members) 5
    Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500
    WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48C48mx500
    Jet: /lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/C48C48mx500
+   AWS: https://noaa-nws-global-pds.s3.amazonaws.com/index.html#data/ICSDIR/C48C48mx500
 
 Start date = 2021032312
 
@@ -246,14 +248,15 @@ Automated Generation
 Cycled mode
 -----------
 
-Not yet supported. See :ref:`Manual Generation<manual-generation>` section below for how to create your ICs yourself (outside of workflow).
+Not yet supported. See the UFS_UTILS documentation on the gdas_init utility to generate your own ICs for cycled or forecast-only mode: https://noaa-emcufs-utils.readthedocs.io/en/latest/ufs_utils.html#gdas-init
 
 .. _forecastonly-coupled:
 
 ---------------------
 Forecast-only coupled
 ---------------------
-Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion/Hercules, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov).
+Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Sample copies of initial conditions from the prototype runs are currently maintained on Hera, Orion/Hercules, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.stage_ic``.
+Note however, that due to the rapid changes in the model configuration, some staged initial conditions may not work.
 
 .. _forecastonly-atmonly:
 
@@ -261,7 +264,7 @@ Coupled initial conditions are currently only generated offline and copied prior
 Forecast-only mode (atm-only)
 -----------------------------
 
-The table below lists the needed initial condition files from past GFS versions to be used by the UFS_UTILS gdas_init utility. The utility will pull these files for you. See the next section (Manual Generation) for how to run the UFS_UTILS gdas_init utility and create initial conditions for your experiment.
+The table below lists for reference the needed initial condition files from past GFS versions to be used by the UFS_UTILS gdas_init utility. The utility will pull these files for you. See the next section (Manual Generation) for how to run the UFS_UTILS gdas_init utility and create initial conditions for your experiment.
 
 Note for table: yyyy=year; mm=month; dd=day; hh=cycle
 
@@ -284,11 +287,11 @@ Operations/production output location on HPSS: /NCEPPROD/hpssprod/runhistory/rh
 +----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+
 | v15 ops        |   gfs.t. ``hh`` z.atmanl.nemsio | gpfs_dell1_nco_ops_com_gfs_prod_gfs. ``yyyymmdd`` _ ``hh`` .gfs_nemsioa.tar | gfs. ``yyyymmdd`` /``hh``      |
 |                |                                 |                                                                             |                                |
-| pre-2020022600 |   gfs.t. ``hh`` z.sfcanl.nemsio |                                                                             |                                | 
+| pre-2020022600 |   gfs.t. ``hh`` z.sfcanl.nemsio |                                                                             |                                |
 +----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+
 | v15 ops        |   gfs.t. ``hh`` z.atmanl.nemsio | com_gfs_prod_gfs. ``yyyymmdd`` _ ``hh`` .gfs_nemsioa.tar                    | gfs. ``yyyymmdd`` /``hh``      |
 |                |                                 |                                                                             |                                |
-|                |   gfs.t. ``hh`` z.sfcanl.nemsio |                                                                             |                                |  
+|                |   gfs.t. ``hh`` z.sfcanl.nemsio |                                                                             |                                |
 +----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+
 | v16 retro      |   gfs.t. ``hh`` z.atmanl.nc     | gfs_netcdfa.tar*                                                            | gfs. ``yyyymmdd`` /``hh``/atmos|
 |                |                                 |                                                                             |                                |
@@ -312,88 +315,20 @@ Manual Generation
 *****************
 
 .. note::
-   Initial conditions cannot be generated on S4. These must be generated on another supported platform then pushed to S4. If you do not have access to a supported system or need assistance, please contact David Huber (david.huber@noaa.gov).
+   Initial conditions cannot be generated on S4. These must be generated on another supported platform then pushed to S4. If you do not have access to a supported system or need assistance, please contact Innocent Souopgui (innocent.souopgui@noaa.gov).
 
 .. _coldstarts:
 
 The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127).
 
-The ``chgres_cube`` code is available from the `UFS_UTILS repository <https://github.com/ufs-community/UFS_UTILS>`_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop. The ``chgres_cube`` code/scripts currently support the following GFS inputs:
+The ``chgres_cube`` code is available from the `UFS_UTILS repository <https://github.com/ufs-community/UFS_UTILS>`_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users should see the `documentation to generation initial conditions in the UFS_UTILS repository <https://noaa-emcufs-utils.readthedocs.io/en/latest/ufs_utils.html#gdas-init>`_. The ``chgres_cube`` code/scripts currently support the following GFS inputs:
 
 * pre-GFSv14
 * GFSv14
 * GFSv15
 * GFSv16
 
-Users can use the copy of UFS_UTILS that is already cloned and built within their global-workflow clone or clone/build it separately:
-
-Within a built/linked global-workflow clone:
-
-::
-
-   cd sorc/ufs_utils.fd/util/gdas_init
-
-Clone and build separately:
-
-1. Clone UFS_UTILS:
-
-::
-
-   git clone --recursive https://github.com/NOAA-EMC/UFS_UTILS.git
-
-Then switch to a different tag or use the default branch (develop).
-
-2. Build UFS_UTILS:
-
-::
-
-   sh build_all.sh
-   cd fix
-   sh link_fixdirs.sh emc $MACHINE
-
-where ``$MACHINE`` is ``wcoss2``, ``hera``, or ``jet``.
-
-.. note::
-   UFS-UTILS builds on Orion/Hercules but due to the lack of HPSS access on Orion/Hercules the ``gdas_init`` utility is not supported there.
-
-3. Configure your conversion:
-
-::
-
-   cd util/gdas_init
-   vi config
-
-Read the doc block at the top of the config and adjust the variables to meet you needs (e.g. ``yy, mm, dd, hh`` for ``SDATE``).
-
-Most users will want to adjust the following ``config`` settings for the current system design:
-
-#. EXTRACT_DATA=YES (to pull original ICs to convert off HPSS)
-#. RUN_CHGRES=YES (to run chgres_cube on the original ICs pulled off HPSS)
-#. LEVS=128 (for the L127 GFS)
-
-4. Submit conversion script:
-
-::
-
-   ./driver.$MACHINE.sh
-
-where ``$MACHINE`` is currently ``wcoss2``,  ``hera`` or ``jet``. Additional options will be available as support for other machines expands.
-
-.. note::
-   UFS-UTILS builds on Orion/Hercules but due to lack of HPSS access there is no ``gdas_init`` driver for Orion/Hercules nor support to pull initial conditions from HPSS for the ``gdas_init`` utility.
-
-Several small jobs will be submitted:
-
-  - 1 jobs to pull inputs off HPSS
-  - 1 or 2 jobs to run ``chgres_cube`` (1 for deterministic/hires and 1 for each EnKF ensemble member)
-
-The chgres jobs will have a dependency on the data-pull jobs and will wait to run until all data-pull jobs have completed.
-
-5. Check output:
-
-In the config you will have defined an output folder called ``$OUTDIR``. The converted output will be found there, including the needed abias and radstat initial condition files (if CDUMP=gdas). The files will be in the needed directory structure for the global-workflow system, therefore a user can move the contents of their ``$OUTDIR`` directly into their ``$ROTDIR``.
-
-Please report bugs to George Gayno (george.gayno@noaa.gov) and Kate Friedman (kate.friedman@noaa.gov).
+See instructions in UFS_UTILS to clone, build and generate initial conditions: https://noaa-emcufs-utils.readthedocs.io/en/latest/ufs_utils.html#gdas-init
 
 .. _warmstarts-prod:
 
@@ -449,7 +384,7 @@ The warm starts and other output from production are at C768 deterministic and C
 What files should you pull for starting a new experiment with warm starts from production?
 ------------------------------------------------------------------------------------------
 
-That depends on what mode you want to run -- forecast-only or cycled. Whichever mode, navigate to the top of your ``ROTDIR`` and pull the entirety of the tarball(s) listed below for your mode. The files within the tarball are already in the ``$CDUMP.$PDY/$CYC/$ATMOS`` folder format expected by the system.
+That depends on what mode you want to run -- forecast-only or cycled. Whichever mode, navigate to the top of your ``ROTDIR`` and pull the entirety of the tarball(s) listed below for your mode. The files within the tarball are already in the ``$RUN.$PDY/$CYC/$ATMOS`` folder format expected by the system.
 
 For forecast-only there are two tarballs to pull
 
@@ -489,7 +424,7 @@ Tarballs per cycle:
    com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp7.tar
    com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp8.tar
 
-Go to the top of your ``ROTDIR`` and pull the contents of all tarballs there. The tarballs already contain the needed directory structure.
+Go to the top of your ``ROTDIR`` and pull the contents of all tarballs there. The tarballs already contain the needed directory structure.  Note that the directory structure has changed, so this may not be correct.
 
 .. _warmstarts-preprod-parallels:
 
@@ -517,6 +452,7 @@ Recent pre-implementation parallel series was for GFS v16 (implemented March 202
 * **Where do I put the warm-start initial conditions?** Extraction should occur right inside your ROTDIR. You may need to rename the enkf folder (enkf.gdas.$PDY -> enkfgdas.$PDY).
 
 Due to a recent change in the dycore, you may also need an additional offline step to fix the checksum of the NetCDF files for warm start. See the :ref:`Fix netcdf checksum section <gfsv17-checksum>`.
+The current model has undergone several updates and the files generated may not be completely usable by the model.
 
 .. _retrospective:
 
diff --git a/docs/source/setup.rst b/docs/source/setup.rst
index 0e87ade9a5..1715899927 100644
--- a/docs/source/setup.rst
+++ b/docs/source/setup.rst
@@ -6,9 +6,13 @@ Experiment Setup
 
  ::
 
-   # Note: this will wipe your existing lmod environment
    source workflow/gw_setup.sh
 
+.. warning::
+   Sourcing gw_setup.sh will wipe your existing lmod environment
+
+.. note::
+   Bash shell is required to source gw_setup.sh
 
 ^^^^^^^^^^^^^^^^^^^^^^^^
 Forecast-only experiment
@@ -141,7 +145,7 @@ The following command examples include variables for reference but users should
 
    cd workflow
    ./setup_expt.py gfs cycled --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC]
-     [--resdetatmos $RESDETATMOS] [--resdetocean $RESDETOCEAN] [--resensatmos $RESENSATMOS] [--nens $NENS] [--cdump $CDUMP]
+     [--resdetatmos $RESDETATMOS] [--resdetocean $RESDETOCEAN] [--resensatmos $RESENSATMOS] [--nens $NENS] [--run $RUN]
      [--pslot $PSLOT] [--configdir $CONFIGDIR] [--comroot $COMROOT] [--expdir $EXPDIR] [--icsdir $ICSDIR]
 
 where:
@@ -166,7 +170,7 @@ where:
    * ``$RESDETOCEAN`` is the resolution of the ocean component of the deterministic forecast [default: 0.; determined based on atmosphere resolution]
    * ``$RESENSATMOS`` is the resolution of the atmosphere component of the ensemble forecast [default: 192]
    * ``$NENS`` is the number of ensemble members [default: 20]
-   * ``$CDUMP`` is the starting phase [default: gdas]
+   * ``$RUN`` is the starting phase [default: gdas]
    * ``$PSLOT`` is the name of your experiment [default: test]
    * ``$CONFIGDIR`` is the path to the config folder under the copy of the system you're using [default: $TOP_OF_CLONE/parm/config/]
    * ``$COMROOT`` is the path to your experiment output directory. Your ``ROTDIR`` (rotating com directory) will be created using ``COMROOT`` and ``PSLOT``. [default: $HOME]
diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf
index 08d0185399..03835172b9 100755
--- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf
+++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf
index b94e3a18e3..bc289b8006 100755
--- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf
+++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf
index 6611afff52..91eef7fb74 100755
--- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf
+++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf
index 1ed2568d61..044a65c843 100755
--- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf
+++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf
index d095742193..954ca49533 100755
--- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf
+++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf
index 923d208350..2fd692d1df 100755
--- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf
+++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf
index 6627b97c10..4f682a7a0a 100755
--- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf
+++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf
index 36b9272204..cc6eee326d 100755
--- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf
+++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf
index 41601c4de8..92c8c0551e 100755
--- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf
+++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf
index f766333272..53d9daf734 100755
--- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf
+++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf
index 0bc2d76455..b3bb579ca3 100755
--- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf
+++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf
@@ -1,5 +1,5 @@
 #PBS -S /bin/bash
-#PBS -N %RUN%_atmos_gempak_%CYC%
+#PBS -N %RUN%_atmos_gempak_%FHR3%_%CYC%
 #PBS -j oe
 #PBS -q %QUEUE%
 #PBS -A %PROJ%-%PROJENVIR%
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
@@ -31,9 +30,13 @@ module load gempak/${gempak_ver}
 
 module list
 
+#############################################################
+# environment settings
+#############################################################
 export cyc=%CYC%
 export cycle=t%CYC%z
 export USE_CFP=YES
+export FHR3=%FHR3%
 
 ############################################################
 # CALL executable job script here
diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf
index 9d66f4bda1..312d3dcdaa 100755
--- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf
+++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf
@@ -17,7 +17,6 @@ set -x
 export model=%model:gdas%
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf
index 2dd0bdf06c..c5f838fb5f 100755
--- a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf
+++ b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf
index 7e3282bc95..6ebae60924 100755
--- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf
+++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf
index 1da24c0d46..9792253ec8 100755
--- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf
+++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf
index f88fdcdaf9..b65be6586e 100755
--- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf
+++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf
index 33fa481a29..32b024f663 100755
--- a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf
+++ b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf
index 9c7a1609e7..938611b4bc 100755
--- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf
+++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf
index e2d3bb3463..dd0c19d6f0 100755
--- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf
+++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf
index 9afd0b5083..b538a18a3d 100755
--- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf
+++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf
index 69c8e17801..392d5f362f 100755
--- a/ecf/scripts/gdas/jgdas_forecast.ecf
+++ b/ecf/scripts/gdas/jgdas_forecast.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf
index 208ed2cc52..1f73e43eb1 100755
--- a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf
+++ b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf
index 1899dc152f..fb45d8fda5 100755
--- a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf
+++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf
index 31cca40bed..5212a026d9 100755
--- a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf
+++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf
index 77b44634a9..b1fd9fe32e 100755
--- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf
+++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf
index a30eceae57..12653d0e95 100755
--- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf
+++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf
index 41601c4de8..92c8c0551e 100755
--- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf
+++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf
index 1994f238d1..5f56e7ac17 100755
--- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf
+++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf
@@ -1,9 +1,9 @@
 #PBS -S /bin/bash
-#PBS -N %RUN%_atmos_gempak_%CYC%
+#PBS -N %RUN%_atmos_gempak_%FHR3%_%CYC%
 #PBS -j oe
 #PBS -q %QUEUE%
 #PBS -A %PROJ%-%PROJENVIR%
-#PBS -l walltime=03:00:00
+#PBS -l walltime=00:30:00
 #PBS -l select=1:ncpus=28:mpiprocs=28:mem=2GB
 #PBS -l place=vscatter:shared
 #PBS -l debug=true
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
@@ -37,6 +36,7 @@ module list
 #############################################################
 export cyc=%CYC%
 export cycle=t%CYC%z
+export FHR3=%FHR3%
 
 ############################################################
 # CALL executable job script here
diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf
index e9833baa41..4798e2a06a 100755
--- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf
+++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf
index 08686dbca3..25659058f8 100755
--- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf
+++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf
index 1ff597411a..da66dfe7f6 100755
--- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf
+++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf
index 04b07c58d1..df0f9f90f1 100755
--- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf
+++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf
@@ -1,9 +1,9 @@
 #PBS -S /bin/bash
-#PBS -N %RUN%_atmos_pgrb2_spec_gempak_%CYC%
+#PBS -N %RUN%_atmos_pgrb2_spec_gempak_%FHR3%_%CYC%
 #PBS -j oe
 #PBS -q %QUEUE%
 #PBS -A %PROJ%-%PROJENVIR%
-#PBS -l walltime=00:30:00
+#PBS -l walltime=00:15:00
 #PBS -l select=1:ncpus=1:mem=1GB
 #PBS -l place=vscatter:shared
 #PBS -l debug=true
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
@@ -35,6 +34,7 @@ module list
 #############################################################
 export cyc=%CYC%
 export cycle=t%CYC%z
+export FHR3=%FHR3%
 
 ############################################################
 # CALL executable job script here
diff --git a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf
index 2dd0bdf06c..c5f838fb5f 100755
--- a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf
+++ b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf
index bb0bcf8db7..f2b21cb168 100755
--- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf
+++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf
index d2e315bcef..50a71a44ba 100755
--- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf
+++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf
index ad717147fc..d8b1e2b531 100755
--- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf
+++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 export FHRGRP=%FHRGRP%
 export FHRLST=%FHRLST%
diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf
index e146f8df32..9108b879a5 100755
--- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf
+++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore b/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore
deleted file mode 100644
index 37e58b180a..0000000000
--- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-# Ignore these
-jgfs_atmos_awips_g2_f*.ecf
diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf
deleted file mode 100755
index c1edf3de88..0000000000
--- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf
+++ /dev/null
@@ -1,61 +0,0 @@
-#PBS -S /bin/bash
-#PBS -N %RUN%_atmos_awips_g2_%FCSTHR%_%CYC%
-#PBS -j oe
-#PBS -q %QUEUE%
-#PBS -A %PROJ%-%PROJENVIR%
-#PBS -l walltime=00:05:00
-#PBS -l select=1:ncpus=1:mem=3GB
-#PBS -l place=vscatter:shared
-#PBS -l debug=true
-
-model=gfs
-%include <head.h>
-%include <envir-p1.h>
-
-set -x
-
-export NET=%NET:gfs%
-export RUN=%RUN%
-export CDUMP=%RUN%
-
-export FHRGRP=%FHRGRP%
-export FHRLST=%FHRLST%
-export fcsthrs=%FCSTHR%
-
-############################################################
-# Load modules
-############################################################
-module load PrgEnv-intel/${PrgEnv_intel_ver}
-module load craype/${craype_ver}
-module load intel/${intel_ver}
-module load libjpeg/${libjpeg_ver}
-module load grib_util/${grib_util_ver}
-module load wgrib2/${wgrib2_ver}
-
-module list
-
-#############################################################
-# environment settings
-#############################################################
-export cyc=%CYC%
-export cycle=t%CYC%z
-trdrun=%TRDRUN%
-export job="jgfs_awips_f${fcsthrs}_${cyc}"
-
-############################################################
-# CALL executable job script here
-############################################################
-$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2
-
-###############################################################
-
-if [ $? -ne 0 ]; then
-   ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***"
-   ecflow_client --abort
-   exit
-fi
-
-%include <tail.h>
-%manual
-
-%end
diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf
index e2ddf7a1e5..3322aceeb1 100755
--- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf
+++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf
index 7443002a0b..4afac0d273 100755
--- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf
+++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf
index e9922e0751..2d9e8814ab 100755
--- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf
+++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/jgfs_forecast.ecf b/ecf/scripts/gfs/jgfs_forecast.ecf
index 370cd9342d..26d0c3b80d 100755
--- a/ecf/scripts/gfs/jgfs_forecast.ecf
+++ b/ecf/scripts/gfs/jgfs_forecast.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 # Load modules
diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf
index 2cb7f75949..8406f0449c 100755
--- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf
+++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf
index 208ed2cc52..1f73e43eb1 100755
--- a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf
+++ b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf
index 2871a0f1a1..d4de0a9725 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf
index 73015f869f..528068f057 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf
index 39e58f01c3..d09204cb2d 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf
index 0b0e516bc2..52179a56e2 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf
index 00f005a877..f7d0ea1be7 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf
index 8197d58020..1c6ba47c93 100755
--- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf
+++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf
index 8f93f6d098..171e737692 100755
--- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf
+++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf
@@ -16,7 +16,6 @@ set -x
 
 export NET=%NET:gfs%
 export RUN=%RUN%
-export CDUMP=%RUN%
 
 ############################################################
 ## Load modules
diff --git a/env/AWSPW.env b/env/AWSPW.env
index 894cce2343..867b9220ba 100755
--- a/env/AWSPW.env
+++ b/env/AWSPW.env
@@ -3,18 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanlrun"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
 
 step=$1
 
-export npe_node_max=36
 export launcher="mpiexec.hydra"
 export mpmd_opt=""
 
@@ -25,113 +19,67 @@ export NTHSTACK=1024000000
 ulimit -s unlimited
 ulimit -a
 
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing AWSPW.env"
+    exit 2
+fi
+
 if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
 
 elif [[ "${step}" = "post" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_post))
-
-    export NTHREADS_NP=${nth_np:-1}
-    [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max}
-    export APRUN_NP="${launcher} -n ${npe_post}"
+    export NTHREADS_NP=${NTHREADS1}
+    export APRUN_NP="${APRUN}"
 
-    export NTHREADS_DWN=${nth_dwn:-1}
-    [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max}
-    export APRUN_DWN="${launcher} -n ${npe_dwn}"
+    export NTHREADS_DWN=${threads_per_task_dwn:-1}
+    [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task}
+    export APRUN_DWN="${launcher} -n ${ntasks_dwn}"
 
 elif [[ "${step}" = "ecen" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN}"
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen}"
-
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
-
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc}"
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos}"
-
-elif [[ "${step}" = "postsnd" ]]; then
-
-    export CFP_MP="YES"
-
-    nth_max=$((npe_node_max / npe_node_postsnd))
-
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd}"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
-
-elif [[ "${step}" = "awips" ]]; then
-
-    nth_max=$((npe_node_max / npe_node_awips))
-
-    export NTHREADS_AWIPS=${nth_awips:-2}
-    [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max}
-    export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}"
-
-elif [[ "${step}" = "gempak" ]]; then
-
-    export CFP_MP="YES"
-
-    if [[ ${CDUMP} == "gfs" ]]; then
-        npe_gempak=${npe_gempak_gfs}
-        npe_node_gempak=${npe_node_gempak_gfs}
-    fi
-
-    nth_max=$((npe_node_max / npe_node_gempak))
-
-    export NTHREADS_GEMPAK=${nth_gempak:-1}
-    [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max}
-    export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}"
-
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN}"
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN}"
 
 fi
diff --git a/env/CONTAINER.env b/env/CONTAINER.env
index bfeb6dd6da..c40543794b 100755
--- a/env/CONTAINER.env
+++ b/env/CONTAINER.env
@@ -3,18 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
 
 step=$1
 
-export npe_node_max=40
 export launcher="mpirun"
 export mpmd_opt="--multi-prog"
 
diff --git a/env/GAEA.env b/env/GAEA.env
new file mode 100755
index 0000000000..6809a9b186
--- /dev/null
+++ b/env/GAEA.env
@@ -0,0 +1,66 @@
+#! /usr/bin/env bash
+
+if [[ $# -ne 1 ]]; then
+
+    echo "Must specify an input argument to set runtime environment variables!"
+    exit 1
+
+fi
+
+step=$1
+
+export launcher="srun -l --export=ALL"
+export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
+
+export OMP_STACKSIZE=2048000
+export NTHSTACK=1024000000
+
+ulimit -s unlimited
+ulimit -a
+
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    # This may be useful when Gaea is fully ported, so ignore SC warning
+    # shellcheck disable=SC2034
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing GAEA.env"
+    exit 2
+fi
+
+if [[ "${step}" = "waveinit" ]]; then
+
+    export CFP_MP="YES"
+    if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi
+    export wavempexec=${launcher}
+    export wave_mpmd=${mpmd_opt}
+
+elif [[ "${step}" = "fcst" ]]; then
+
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
+    # With ESMF threading, the model wants to use the full node
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
+
+elif [[ "${step}" = "atmos_products" ]]; then
+
+    export USE_CFP="YES"  # Use MPMD for downstream product generation on Hera
+
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
+elif [[ "${step}" = "fit2obs" ]]; then
+
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}"
+
+fi
diff --git a/env/HERA.env b/env/HERA.env
index 4ad9e41d01..66377b2ad4 100755
--- a/env/HERA.env
+++ b/env/HERA.env
@@ -3,11 +3,13 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
+#JKH    
     echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
+    echo "atmanlrun atmensanlrun aeroanlrun snowanl"
     echo "anal sfcanl fcst post metp"
     echo "eobs eupd ecen efcs epos"
     echo "postsnd awips gempak"
+#JKH    
     exit 1
 
 fi
@@ -30,21 +32,40 @@ export OMP_STACKSIZE=2048000
 export NTHSTACK=1024000000
 #export LD_BIND_NOW=1
 
-ulimit -s unlimited
-ulimit -a
+# Setting stacksize to unlimited on login nodes is prohibited
+if [[ -n "${SLURM_JOB_ID:-}" ]]; then
+  ulimit -s unlimited
+  ulimit -a
+fi
 
-if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing HERA.env"
+    exit 2
+fi
 
-    nth_max=$((npe_node_max / npe_node_prep))
+if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
 
     export POE="NO"
     export BACK="NO"
     export sys_tp="HERA"
     export launcher_PREP="srun"
 
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
 
-    export APRUN_CALCFIMS="${launcher} -n 1"
+    export APRUN_CALCFIMS="${APRUN}"
+
+elif [[ "${step}" = "prep_emissions" ]]; then
+
+    export APRUN="${APRUN}"
 
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then
 
@@ -53,71 +74,77 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
     export wavempexec=${launcher}
     export wave_mpmd=${mpmd_opt}
 
-elif [[ "${step}" = "atmanlrun" ]]; then
+elif [[ "${step}" = "atmanlvar" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}"
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}"
+elif [[ "${step}" = "atmensanlletkf" ]]; then
 
-elif [[ "${step}" = "atmensanlrun" ]]; then
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}"
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+elif [[ "${step}" = "atmensanlfv3inc" ]]; then
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}"    
 
 elif [[ "${step}" = "aeroanlrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
+    export NTHREADS_AEROANL=${NTHREADSmax}
+
+    export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}"
+
+elif [[ "${step}" = "atmanlfv3inc" ]]; then
 
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}"
 
-elif [[ "${step}" = "landanl" ]]; then
+elif [[ "${step}" = "prepobsaero" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}"
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+elif [[ "${step}" = "snowanl" ]]; then
+
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
 
-elif [[ "${step}" = "ocnanalbmat" ]]; then
+elif [[ "${step}" = "marinebmat" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd --multi-prog"
+    export APRUN_MARINEBMAT="${APRUN}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalbmat))
+elif [[ "${step}" = "marinebmat" ]]; then
 
-    export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export APRUNCFP="${launcher} -n \$ncmd --multi-prog"
+    export APRUN_MARINEBMAT="${APRUN}"
 
 elif [[ "${step}" = "ocnanalrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd --multi-prog"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalrun))
-
-    export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export APRUN_OCNANAL="${APRUN}"
 
 elif [[ "${step}" = "ocnanalchkpt" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd --multi-prog"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalchkpt))
+    export APRUN_OCNANAL="${APRUN}"
+
+elif [[ "${step}" = "ocnanalecen" ]]; then
 
-    export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export NTHREADS_OCNANALECEN=${NTHREADSmax}
+    export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}"
+
+elif [[ "${step}" = "marineanalletkf" ]]; then
+
+    export NTHREADS_MARINEANALLETKF=${NTHREADSmax}
+    export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}"
 
 elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
 
@@ -128,44 +155,34 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_anal))
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
 
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} --cpus-per-task=${NTHREADS_GSI}"
-
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-12}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-12}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
 
 elif [[ "${step}" = "sfcanl" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_sfcanl))
-
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "eobs" ]]; then
 
     export MKL_NUM_THREADS=4
     export MKL_CBWR=AUTO
 
-    nth_max=$((npe_node_max / npe_node_eobs))
-
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} --cpus-per-task=${NTHREADS_GSI}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -173,11 +190,8 @@ elif [[ "${step}" = "eobs" ]]; then
 
 elif [[ "${step}" = "eupd" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_eupd))
-
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} --cpus-per-task=${NTHREADS_ENKF}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}} --cpus-per-task=${NTHREADS_ENKF}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -185,116 +199,85 @@ elif [[ "${step}" = "eupd" ]]; then
 
 elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
-
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
 
 elif [[ "${step}" = "upp" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}"
 
 elif [[ "${step}" = "atmos_products" ]]; then
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation on Hera
 
-elif [[ "${step}" = "ecen" ]]; then
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+elif [[ "${step}" = "ecen" ]]; then
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_ECEN}"
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}"
 
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_CALCINC}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
-
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_ESFC}"
+    export NTHREADS_ESFC=${threads_per_task_esfc:-${max_threads_per_task}}
+    export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos} --cpus-per-task=${NTHREADS_EPOS}"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}"
 
+#JKH
 elif [[ "${step}" = "init" ]]; then
 
     export APRUN="${launcher} -n ${npe_init}"
+#JKH
 
 elif [[ "${step}" = "postsnd" ]]; then
 
     export CFP_MP="YES"
 
-    nth_max=$((npe_node_max / npe_node_postsnd))
-
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --cpus-per-task=${NTHREADS_POSTSND}"
+    export NTHREADS_POSTSND=${NTHREADS1}
+    export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}"
 
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
+    export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1}
+    [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task}
+    export APRUN_POSTSNDCFP="${launcher} -n ${ntasks_postsndcfp} ${mpmd_opt}"
 
 elif [[ "${step}" = "awips" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_awips))
-
-    export NTHREADS_AWIPS=${nth_awips:-2}
-    [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max}
-    export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}"
+    export NTHREADS_AWIPS=${NTHREADS1}
+    export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}"
 
 elif [[ "${step}" = "gempak" ]]; then
 
     export CFP_MP="YES"
 
-    if [[ ${CDUMP} == "gfs" ]]; then
-        npe_gempak=${npe_gempak_gfs}
-        npe_node_gempak=${npe_node_gempak_gfs}
-    fi
-
-    nth_max=$((npe_node_max / npe_node_gempak))
-
-    export NTHREADS_GEMPAK=${nth_gempak:-1}
-    [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max}
-    export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}"
-
+    export NTHREADS_GEMPAK=${NTHREADS1}
+    [[ ${NTHREADS_GEMPAK} -gt ${max_threads_per_task} ]] && export NTHREADS_GEMPAK=${max_threads_per_task}
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}"
 
 fi
diff --git a/env/HERCULES.env b/env/HERCULES.env
index 6a4aad7a7d..83fa1aadd1 100755
--- a/env/HERCULES.env
+++ b/env/HERCULES.env
@@ -3,16 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "fcst post"
-    echo "Note: Hercules is only set up to run in forecast-only mode"
     exit 1
 
 fi
 
 step=$1
 
-export npe_node_max=80
 export launcher="srun -l --export=ALL"
 export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
 
@@ -32,19 +28,35 @@ export I_MPI_EXTRA_FILESYSTEM_LIST=lustre
 ulimit -s unlimited
 ulimit -a
 
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing HERCULES.env"
+    exit 2
+fi
+
 case ${step} in
  "prep" | "prepbufr")
 
-    nth_max=$((npe_node_max / npe_node_prep))
-
     export POE="NO"
     export BACK=${BACK:-"YES"}
     export sys_tp="HERCULES"
     export launcher_PREP="srun"
  ;;
- "preplandobs")
+ "prepsnowobs")
+
+    export APRUN_CALCFIMS="${APRUN}"
+ ;;
+ "prep_emissions")
 
-    export APRUN_CALCFIMS="${launcher} -n 1"
+    export APRUN="${APRUN}"
  ;;
  "waveinit" | "waveprep" | "wavepostsbs" | "wavepostbndpnt" | "wavepostpnt" | "wavepostbndpntbll")
 
@@ -54,71 +66,71 @@ case ${step} in
     export wave_mpmd=${mpmd_opt}
 
  ;;
- "atmanlrun")
+ "atmanlvar")
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}"
+ ;;
+ "atmanlfv3inc")
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}"
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}"
  ;;
- "atmensanlrun")
+ "atmensanlletkf")
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}"
+ ;;
+ "atmensanlfv3inc")
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}"
  ;;
  "aeroanlrun")
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
-
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
+    export NTHREADS_AEROANL=${NTHREADSmax}
+    export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}"
  ;;
- "landanl")
+ "prepobsaero")
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}"
+;;
+ "snowanl")
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
  ;;
- "ocnanalbmat")
+ "marinebmat")
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
-
-    nth_max=$((npe_node_max / npe_node_ocnanalbmat))
-
-    export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export APRUN_MARINEBMAT="${APRUN}"
  ;;
  "ocnanalrun")
 
+    export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
+    export APRUN_OCNANAL="${APRUN}"
+ ;;
+"ocnanalecen")
+
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalrun))
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node_ocnanalecen))
 
-    export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}"
- ;;
+    export NTHREADS_OCNANALECEN=${threads_per_task_ocnanalecen:-${max_threads_per_task}}
+    [[ ${NTHREADS_OCNANALECEN} -gt ${max_threads_per_task} ]] && export NTHREADS_OCNANALECEN=${max_threads_per_task}
+    export APRUN_OCNANALECEN="${launcher} -n ${ntasks_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}"
+;;
  "ocnanalchkpt")
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalchkpt))
-
-    export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export NTHREADS_OCNANAL=${NTHREADSmax}
+    export APRUN_OCNANAL="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}"
  ;;
  "anal" | "analcalc")
 
@@ -129,32 +141,28 @@ case ${step} in
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_anal))
 
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} --cpus-per-task=${NTHREADS_GSI}"
+    export NTHREADS_GSI=${threads_per_task_anal:-${max_threads_per_task}}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-12}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-12}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
  ;;
  "sfcanl")
-    nth_max=$((npe_node_max / npe_node_sfcanl))
 
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
  ;;
  "eobs")
 
@@ -165,11 +173,10 @@ case ${step} in
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_eobs))
 
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} --cpus-per-task=${NTHREADS_GSI}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    [[ ${NTHREADS_GSI} -gt ${max_threads_per_task} ]] && export NTHREADS_GSI=${max_threads_per_task}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
  ;;
  "eupd")
 
@@ -177,127 +184,95 @@ case ${step} in
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_eupd))
 
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} --cpus-per-task=${NTHREADS_ENKF}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}} --cpus-per-task=${NTHREADS_ENKF}"
  ;;
  "fcst" | "efcs")
 
     export OMP_STACKSIZE=512M
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
  ;;
 
  "upp")
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}"
  ;;
+
  "atmos_products")
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation
  ;;
- "ecen")
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+"oceanice_products")
+
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+;;
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_ECEN}"
+ "ecen")
+
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}"
 
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_CALCINC}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}"
 
  ;;
  "esfc")
 
-    nth_max=$((npe_node_max / npe_node_esfc))
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}"
 
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_ESFC}"
-
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
 
  ;;
  "epos")
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos} --cpus-per-task=${NTHREADS_EPOS}"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}"
 
  ;;
  "postsnd")
 
     export CFP_MP="YES"
 
-    nth_max=$((npe_node_max / npe_node_postsnd))
+    export NTHREADS_POSTSND=${NTHREADS1}
+    export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}"
 
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --cpus-per-task=${NTHREADS_POSTSND}"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
+    export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1}
+    [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task}
+    export APRUN_POSTSNDCFP="${launcher} -n ${ntasks_postsndcfp} ${mpmd_opt}"
 
  ;;
  "awips")
 
-    nth_max=$((npe_node_max / npe_node_awips))
-
-    export NTHREADS_AWIPS=${nth_awips:-2}
-    [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max}
-    export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}"
+    export NTHREADS_AWIPS=${NTHREADS1}
+    export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}"
 
  ;;
  "gempak")
 
-    export CFP_MP="YES"
-
-    if [[ ${CDUMP} == "gfs" ]]; then
-        npe_gempak=${npe_gempak_gfs}
-        npe_node_gempak=${npe_node_gempak_gfs}
-    fi
-
-    nth_max=$((npe_node_max / npe_node_gempak))
-
-    export NTHREADS_GEMPAK=${nth_gempak:-1}
-    [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max}
-    export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}"
+    echo "WARNING: ${step} is not enabled on ${machine}!"
 
  ;;
  "fit2obs")
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}"
 
  ;;
  *)
diff --git a/env/JET.env b/env/JET.env
index f458bff72d..810a8cd501 100755
--- a/env/JET.env
+++ b/env/JET.env
@@ -3,26 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
 
 step=$1
 
-if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then
-  export npe_node_max=16
-elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then
-  export npe_node_max=24
-elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then 
-  export npe_node_max=16
-elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then
-  export npe_node_max=40
-fi
 export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL"
 export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
 
@@ -33,19 +19,35 @@ export NTHSTACK=1024000000
 ulimit -s unlimited
 ulimit -a
 
-if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing JET.env"
+    exit 2
+fi
 
-    nth_max=$((npe_node_max / npe_node_prep))
+if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
 
     export POE="NO"
     export BACK="NO"
     export sys_tp="JET"
     export launcher_PREP="srun"
 
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
 
     export APRUN_CALCFIMS="${launcher} -n 1"
 
+elif [[ "${step}" = "prep_emissions" ]]; then
+
+    export APRUN="${launcher} -n 1"
+
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then
 
     export CFP_MP="YES"
@@ -53,61 +55,54 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
     export wavempexec=${launcher}
     export wave_mpmd=${mpmd_opt}
 
-elif [[ "${step}" = "atmanlrun" ]]; then
+elif [[ "${step}" = "atmanlvar" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN}"
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}"
+elif [[ "${step}" = "atmensanlletkf" ]]; then
 
-elif [[ "${step}" = "atmensanlrun" ]]; then
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${launcher} ${ntasks}"
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+elif [[ "${step}" = "atmensanlfv3inc" ]]; then
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${launcher} ${ntasks}"
 
 elif [[ "${step}" = "aeroanlrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
+    export NTHREADS_AEROANL=${NTHREADSmax}
+    export APRUN_AEROANL="${APRUN}"
 
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
+elif [[ "${step}" = "prepobsaero" ]]; then
 
-elif [[ "${step}" = "landanl" ]]; then
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}"
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+elif [[ "${step}" = "snowanl" ]]; then
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
 
-elif [[ "${step}" = "ocnanalbmat" ]]; then
+elif [[ "${step}" = "atmanlfv3inc" ]]; then
 
-    export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalbmat))
+elif [[ "${step}" = "marinebmat" ]]; then
 
-    export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}"
+    export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
+    export APRUN_MARINEBMAT="${APRUN}"
 
 elif [[ "${step}" = "ocnanalrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
-
-    nth_max=$((npe_node_max / npe_node_ocnanalrun))
-
-    export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}"
+    export APRUN_OCNANAL="${APRUN}"
 
 elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
 
@@ -118,43 +113,34 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_anal))
-
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}"
+    export NTHREADS_GSI=${threads_per_task_anal:-${max_threads_per_task}}
+    export APRUN_GSI="${APRUN}"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd"
 
-    export NTHREADS_CYCLE=${nth_cycle:-12}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-12}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle}"
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl}"
 
 elif [[ "${step}" = "sfcanl" ]]; then
-    nth_max=$((npe_node_max / npe_node_sfcanl))
-
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "eobs" ]]; then
 
     export MKL_NUM_THREADS=4
     export MKL_CBWR=AUTO
 
-    nth_max=$((npe_node_max / npe_node_eobs))
-
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -162,11 +148,8 @@ elif [[ "${step}" = "eobs" ]]; then
 
 elif [[ "${step}" = "eupd" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_eupd))
-
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -174,80 +157,63 @@ elif [[ "${step}" = "eupd" ]]; then
 
 elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
 
 elif [[ "${step}" = "upp" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export APRUN_UPP="${APRUN}"
 
 elif [[ "${step}" = "atmos_products" ]]; then
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation
 
-elif [[ "${step}" = "ecen" ]]; then
+elif [[ "${step}" = "oceanice_products" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen}"
+elif [[ "${step}" = "ecen" ]]; then
 
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN}"
+
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
-
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc}"
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos}"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN}"
 
 elif [[ "${step}" = "postsnd" ]]; then
 
     export CFP_MP="YES"
 
-    nth_max=$((npe_node_max / npe_node_postsnd))
+    export NTHREADS_POSTSND=${NTHREADS1}
+    export APRUN_POSTSND="${APRUN}"
 
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd}"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
+    export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1}
+    [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task}
+    export APRUN_POSTSNDCFP="${launcher} -n ${ntasks_postsndcfp} ${mpmd_opt}"
 
 elif [[ "${step}" = "awips" ]]; then
 
@@ -259,10 +225,7 @@ elif [[ "${step}" = "gempak" ]]; then
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN}"
 
 fi
diff --git a/env/ORION.env b/env/ORION.env
index d91fd4db03..bbbfb59182 100755
--- a/env/ORION.env
+++ b/env/ORION.env
@@ -3,18 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
 
 step=$1
 
-export npe_node_max=40
 export launcher="srun -l --export=ALL"
 export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
 
@@ -32,19 +26,35 @@ export NTHSTACK=1024000000
 ulimit -s unlimited
 ulimit -a
 
-if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing ORION.env"
+    exit 2
+fi
 
-    nth_max=$((npe_node_max / npe_node_prep))
+if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
 
     export POE="NO"
     export BACK=${BACK:-"YES"}
     export sys_tp="ORION"
     export launcher_PREP="srun"
 
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
 
     export APRUN_CALCFIMS="${launcher} -n 1"
 
+elif [[ "${step}" = "prep_emissions" ]]; then
+
+    export APRUN="${launcher} -n 1"
+
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || \
     [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostpnt" ]] || [[ "${step}" == "wavepostbndpntbll" ]]; then
 
@@ -53,71 +63,74 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
     export wavempexec=${launcher}
     export wave_mpmd=${mpmd_opt}
 
-elif [[ "${step}" = "atmanlrun" ]]; then
+elif [[ "${step}" = "atmanlvar" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}"
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}"
+elif [[ "${step}" = "atmensanlletkf" ]]; then
 
-elif [[ "${step}" = "atmensanlrun" ]]; then
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}"
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+elif [[ "${step}" = "atmensanlfv3inc" ]]; then
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}"
 
 elif [[ "${step}" = "aeroanlrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
+    export NTHREADS_AEROANL=${NTHREADSmax}
+    export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}"
 
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}"
+elif [[ "${step}" = "prepobsaero" ]]; then
 
-elif [[ "${step}" = "landanl" ]]; then
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}"
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+elif [[ "${step}" = "snowanl" ]]; then
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}"
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
 
-elif [[ "${step}" = "ocnanalbmat" ]]; then
+elif [[ "${step}" = "atmanlfv3inc" ]]; then
 
-    export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalbmat))
+elif [[ "${step}" = "marinebmat" ]]; then
+
+    export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export NTHREADS_MARINEBMAT=${NTHREADSmax}
+    export APRUN_MARINEBMAT="${APRUN}"
 
 elif [[ "${step}" = "ocnanalrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalrun))
-
-    export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export APRUN_OCNANAL="${APRUN}"
 
 elif [[ "${step}" = "ocnanalchkpt" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_ocnanalchkpt))
+    export NTHREADS_OCNANAL=${NTHREADSmax}
+    export APRUN_OCNANAL="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}"
+
+elif [[ "${step}" = "ocnanalecen" ]]; then
+
+    export NTHREADS_OCNANALECEN=${NTHREADSmax}
+    export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}"
+
+elif [[ "${step}" = "marineanalletkf" ]]; then
 
-    export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}}
-    [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
-    export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}"
+    export NTHREADS_MARINEANALLETKF=${NTHREADSmax}
+    export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}"
 
 elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
 
@@ -128,32 +141,26 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_anal))
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
 
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} --cpus-per-task=${NTHREADS_GSI}"
-
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-12}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-12}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle} --cpus-per-task=${NTHREADS_CYCLE}"
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}"
 
 elif [[ "${step}" = "sfcanl" ]]; then
-    nth_max=$((npe_node_max / npe_node_sfcanl))
-
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "eobs" ]]; then
 
@@ -164,11 +171,9 @@ elif [[ "${step}" = "eobs" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_eobs))
-
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} --cpus-per-task=${NTHREADS_GSI}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    [[ ${NTHREADS_GSI} -gt ${max_threads_per_task} ]] && export NTHREADS_GSI=${max_threads_per_task}
+    export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}"
 
 elif [[ "${step}" = "eupd" ]]; then
 
@@ -176,119 +181,81 @@ elif [[ "${step}" = "eupd" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_eupd))
-
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} --cpus-per-task=${NTHREADS_ENKF}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}} --cpus-per-task=${NTHREADS_ENKF}"
 
 elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    export OMP_STACKSIZE=512M
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
 
 elif [[ "${step}" = "upp" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}"
 
 elif [[ "${step}" = "atmos_products" ]]; then
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation
 
-elif [[ "${step}" = "ecen" ]]; then
+elif [[ "${step}" = "oceanice_products" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_ECEN}"
+elif [[ "${step}" = "ecen" ]]; then
 
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}"
+
+    export NTHREADS_CHGRES=${threads_per_task:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_CALCINC}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
-
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_ESFC}"
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos} --cpus-per-task=${NTHREADS_EPOS}"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}"
 
 elif [[ "${step}" = "postsnd" ]]; then
 
     export CFP_MP="YES"
 
-    nth_max=$((npe_node_max / npe_node_postsnd))
+    export NTHREADS_POSTSND=${NTHREADS1}
+    export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}"
 
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --cpus-per-task=${NTHREADS_POSTSND}"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
+    export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1}
+    [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task}
+    export APRUN_POSTSNDCFP="${launcher} -n ${ntasks_postsndcfp} ${mpmd_opt}"
 
 elif [[ "${step}" = "awips" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_awips))
-
-    export NTHREADS_AWIPS=${nth_awips:-2}
-    [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max}
-    export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}"
+    export NTHREADS_AWIPS=${NTHREADS1}
+    export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}"
 
 elif [[ "${step}" = "gempak" ]]; then
 
-    export CFP_MP="YES"
-
-    if [[ ${CDUMP} == "gfs" ]]; then
-        npe_gempak=${npe_gempak_gfs}
-        npe_node_gempak=${npe_node_gempak_gfs}
-    fi
-
-    nth_max=$((npe_node_max / npe_node_gempak))
-
-    export NTHREADS_GEMPAK=${nth_gempak:-1}
-    [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max}
-    export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}"
+    echo "WARNING: ${step} is not enabled on ${machine}!"
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}"
 
 fi
diff --git a/env/S4.env b/env/S4.env
index 3dab3fc3e7..840ca65898 100755
--- a/env/S4.env
+++ b/env/S4.env
@@ -3,23 +3,12 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
 
 step=$1
-PARTITION_BATCH=${PARTITION_BATCH:-"s4"}
 
-if [[ ${PARTITION_BATCH} = "s4" ]]; then
-   export npe_node_max=32
-elif [[ ${PARTITION_BATCH} = "ivy" ]]; then
-   export npe_node_max=20
-fi
 export launcher="srun -l --export=ALL"
 export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
 
@@ -30,18 +19,34 @@ export NTHSTACK=1024000000
 ulimit -s unlimited
 ulimit -a
 
-if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
+# Calculate common variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing S4.env"
+    exit 2
+fi
 
-    nth_max=$((npe_node_max / npe_node_prep))
+if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
 
     export POE="NO"
     export BACK="NO"
     export sys_tp="S4"
     export launcher_PREP="srun"
 
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
 
-    export APRUN_CALCFIMS="${launcher} -n 1"
+    export APRUN_CALCFIMS="${APRUN}"
+
+elif [[ "${step}" = "prep_emissions" ]]; then
+
+    export APRUN="${APRUN}"
 
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then
 
@@ -50,46 +55,49 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
     export wavempexec=${launcher}
     export wave_mpmd=${mpmd_opt}
 
-elif [[ "${step}" = "atmanlrun" ]]; then
+elif [[ "${step}" = "atmanlvar" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN}"
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}"
+elif [[ "${step}" = "atmensanlletkf" ]]; then
 
-elif [[ "${step}" = "atmensanlrun" ]]; then
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${APRUN}"
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+elif [[ "${step}" = "atmensanlfv3inc" ]]; then
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${APRUN}"    
 
 elif [[ "${step}" = "aeroanlrun" ]]; then
 
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
+    export NTHREADS_AEROANL=${NTHREADSmax}
+    export APRUN_AEROANL="${APRUN}"
 
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
+elif [[ "${step}" = "prepobsaero" ]]; then
 
-elif [[ "${step}" = "landanl" ]]; then
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}"
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+elif [[ "${step}" = "snowanl" ]]; then
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
 
-elif [[ "${step}" = "ocnanalbmat" ]]; then
+elif [[ "${step}" = "atmanlfv3inc" ]]; then
+
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN}"
+
+elif [[ "${step}" = "marinebmat" ]]; then
      echo "WARNING: ${step} is not enabled on S4!"
 
-elif [[ "${step}" = "ocnanalrun" ]]; then
+elif [[ "${step}" = "marinerun" ]]; then
      echo "WARNING: ${step} is not enabled on S4!"
 
 elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
@@ -101,44 +109,35 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
     export USE_CFP=${USE_CFP:-"YES"}
     export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_anal))
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN}"
 
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}"
-
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd"
 
-    export NTHREADS_CYCLE=${nth_cycle:-12}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-12}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle}"
 
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl}"
 
 elif [[ "${step}" = "sfcanl" ]]; then
-    nth_max=$((npe_node_max / npe_node_sfcanl))
-
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "eobs" ]]; then
 
     export MKL_NUM_THREADS=4
     export MKL_CBWR=AUTO
 
-    nth_max=$((npe_node_max / npe_node_eobs))
-
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -146,11 +145,8 @@ elif [[ "${step}" = "eobs" ]]; then
 
 elif [[ "${step}" = "eupd" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_eupd))
-
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}}"
 
     export CFP_MP=${CFP_MP:-"YES"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -158,95 +154,57 @@ elif [[ "${step}" = "eupd" ]]; then
 
 elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks}"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks}"
+    unset nnodes ufs_ntasks
 
 elif [[ "${step}" = "upp" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export OMP_NUM_THREADS="${NTHREADS_UPP}"
+    export APRUN_UPP="${APRUN}"
 
 elif [[ "${step}" = "atmos_products" ]]; then
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation
 
-elif [[ "${step}" = "ecen" ]]; then
+elif [[ "${step}" = "oceanice_products" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
+elif [[ "${step}" = "ecen" ]]; then
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen}"
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN}"
 
-    export NTHREADS_CHGRES=${nth_chgres:-12}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN}"
 
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc}"
-
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos}"
-
-elif [[ "${step}" = "postsnd" ]]; then
-
-    export CFP_MP="YES"
-
-    nth_max=$((npe_node_max / npe_node_postsnd))
-
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd}"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}"
-
-elif [[ "${step}" = "awips" ]]; then
-
-     echo "WARNING: ${step} is not enabled on S4!"
-
-elif [[ "${step}" = "gempak" ]]; then
-
-     echo "WARNING: ${step} is not enabled on S4!"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN}"
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -n ${npe_fit2obs}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${APRUN}"
 
 fi
diff --git a/env/WCOSS2.env b/env/WCOSS2.env
index a4fe81060d..18caf1bc03 100755
--- a/env/WCOSS2.env
+++ b/env/WCOSS2.env
@@ -3,11 +3,6 @@
 if [[ $# -ne 1 ]]; then
 
     echo "Must specify an input argument to set runtime environment variables!"
-    echo "argument can be any one of the following:"
-    echo "atmanlrun atmensanlrun aeroanlrun landanl"
-    echo "anal sfcanl fcst post metp"
-    echo "eobs eupd ecen esfc efcs epos"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
@@ -18,64 +13,80 @@ step=$1
 export launcher="mpiexec -l"
 export mpmd_opt="--cpu-bind verbose,core cfp"
 
-export npe_node_max=128
+# Calculate common resource variables
+# Check first if the dependent variables are set
+if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then
+    max_threads_per_task=$((max_tasks_per_node / tasks_per_node))
+    NTHREADSmax=${threads_per_task:-${max_threads_per_task}}
+    NTHREADS1=${threads_per_task:-1}
+    [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task}
+    [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task}
+    APRUN="${launcher} -n ${ntasks}"
+else
+    echo "ERROR config.resources must be sourced before sourcing WCOSS2.env"
+    exit 2
+fi
 
 if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_prep))
-
     export POE=${POE:-"YES"}
     export BACK=${BACK:-"off"}
     export sys_tp="wcoss2"
     export launcher_PREP="mpiexec"
 
-elif [[ "${step}" = "preplandobs" ]]; then
+elif [[ "${step}" = "prepsnowobs" ]]; then
+
+    export APRUN_CALCFIMS="${APRUN}"
+
+elif [[ "${step}" = "prep_emissions" ]]; then
 
-    export APRUN_CALCFIMS="${launcher} -n 1"
+    export APRUN="${APRUN}"
 
 elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll"  ]] || [[ "${step}" = "wavepostpnt" ]]; then
 
     export USE_CFP="YES"
-    if [[ "${step}" = "waveprep" ]] && [[ "${CDUMP}" = "gfs" ]]; then export NTASKS=${NTASKS_gfs} ; fi
     export wavempexec="${launcher} -np"
     export wave_mpmd=${mpmd_opt}
 
-elif [[ "${step}" = "atmanlrun" ]]; then
+elif [[ "${step}" = "atmanlvar" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_atmanlrun))
+    export NTHREADS_ATMANLVAR=${NTHREADSmax}
+    export APRUN_ATMANLVAR="${APRUN}"
 
-    export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max}
-    export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}"
+elif [[ "${step}" = "atmensanlletkf" ]]; then
 
-elif [[ "${step}" = "atmensanlrun" ]]; then
+    export NTHREADS_ATMENSANLLETKF=${NTHREADSmax}
+    export APRUN_ATMENSANLLETKF="${APRUN}"
 
-    nth_max=$((npe_node_max / npe_node_atmensanlrun))
+elif [[ "${step}" = "atmensanlfv3inc" ]]; then
 
-    export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}}
-    [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max}
-    export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}"
+    export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMENSANLFV3INC="${APRUN}"
 
 elif [[ "${step}" = "aeroanlrun" ]]; then
 
     export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}"
 
-    nth_max=$((npe_node_max / npe_node_aeroanlrun))
+    export NTHREADS_AEROANL=${NTHREADSmax}
+    export APRUN_AEROANL="${APRUN}"
 
-    export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}}
-    [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max}
-    export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}"
+elif [[ "${step}" = "prepobsaero" ]]; then
 
-elif [[ "${step}" = "landanl" ]]; then
+    export NTHREADS_PREPOBSAERO=${NTHREADS1}
+    export APRUN_PREPOBSAERO="${APRUN} --ppn ${tasks_per_node}--cpu-bind depth --depth=${NTHREADS_PREPOBSAERO}"
 
-    nth_max=$((npe_node_max / npe_node_landanl))
+elif [[ "${step}" = "snowanl" ]]; then
 
-    export NTHREADS_LANDANL=${nth_landanl:-${nth_max}}
-    [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max}
-    export APRUN_LANDANL="${launcher} -n ${npe_landanl}"
+    export NTHREADS_SNOWANL=${NTHREADSmax}
+    export APRUN_SNOWANL="${APRUN}"
 
     export APRUN_APPLY_INCR="${launcher} -n 6"
 
+elif [[ "${step}" = "atmanlfv3inc" ]]; then
+
+    export NTHREADS_ATMANLFV3INC=${NTHREADSmax}
+    export APRUN_ATMANLFV3INC="${APRUN}"
+
 elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
 
     export OMP_PLACES=cores
@@ -86,27 +97,24 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
       export MPICH_MPIIO_HINTS="*:romio_cb_write=disable"
     fi
 
-    nth_max=$((npe_node_max / npe_node_anal))
-
-    export NTHREADS_GSI=${nth_anal:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} -ppn ${npe_node_anal} --cpu-bind depth --depth ${NTHREADS_GSI}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
     export APRUN_CALCINC="${launcher} \$ncmd"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_cycle=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_cycle} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    ntasks_cycle=${ntiles:-6}
+    export APRUN_CYCLE="${launcher} -n ${ntasks_cycle} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
 
     export NTHREADS_GAUSFCANL=1
-    npe_gausfcanl=${npe_gausfcanl:-1}
-    export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}"
+    ntasks_gausfcanl=${ntasks_gausfcanl:-1}
+    export APRUN_GAUSFCANL="${launcher} -n ${ntasks_gausfcanl}"
 
-    export NTHREADS_CHGRES=${nth_echgres:-14}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_CHGRES=${threads_per_task_echgres:-14}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES=""
 
     export CFP_MP=${CFP_MP:-"NO"}
@@ -115,12 +123,9 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then
 
 elif [[ "${step}" = "sfcanl" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_sfcanl))
-
-    export NTHREADS_CYCLE=${nth_sfcanl:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    npe_sfcanl=${ntiles:-6}
-    export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}"
+    export NTHREADS_CYCLE=${threads_per_task:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN}"
 
 elif [[ "${step}" = "eobs" ]]; then
 
@@ -128,11 +133,8 @@ elif [[ "${step}" = "eobs" ]]; then
     export OMP_STACKSIZE=1G
     export FI_OFI_RXM_SAR_LIMIT=3145728
 
-    nth_max=$((npe_node_max / npe_node_eobs))
-
-    export NTHREADS_GSI=${nth_eobs:-${nth_max}}
-    [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max}
-    export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} -ppn ${npe_node_eobs} --cpu-bind depth --depth ${NTHREADS_GSI}"
+    export NTHREADS_GSI=${NTHREADSmax}
+    export APRUN_GSI="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}"
 
     export CFP_MP=${CFP_MP:-"NO"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -145,11 +147,8 @@ elif [[ "${step}" = "eupd" ]]; then
     export MPICH_COLL_OPT_OFF=1
     export FI_OFI_RXM_SAR_LIMIT=3145728
 
-    nth_max=$((npe_node_max / npe_node_eupd))
-
-    export NTHREADS_ENKF=${nth_eupd:-${nth_max}}
-    [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max}
-    export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} -ppn ${npe_node_eupd} --cpu-bind depth --depth ${NTHREADS_ENKF}"
+    export NTHREADS_ENKF=${NTHREADSmax}
+    export APRUN_ENKF="${launcher} -n ${ntasks_enkf:-${ntasks}} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ENKF}"
 
     export CFP_MP=${CFP_MP:-"NO"}
     export USE_CFP=${USE_CFP:-"YES"}
@@ -157,127 +156,93 @@ elif [[ "${step}" = "eupd" ]]; then
 
 elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
 
-    if [[ "${CDUMP}" =~ "gfs" ]]; then
-        nprocs="npe_${step}_gfs"
-        ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}"
-    else
-        nprocs="npe_${step}"
-        ppn="npe_node_${step}"
-    fi
-    (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} ))
-    (( ntasks = nnodes*${!ppn} ))
+    (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
+    (( ufs_ntasks = nnodes*tasks_per_node ))
     # With ESMF threading, the model wants to use the full node
-    export APRUN_UFS="${launcher} -n ${ntasks} -ppn ${!ppn} --cpu-bind depth --depth 1"
-    unset nprocs ppn nnodes ntasks
+    export APRUN_UFS="${launcher} -n ${ufs_ntasks} -ppn ${tasks_per_node} --cpu-bind depth --depth 1"
+    unset nnodes ufs_ntasks
 
     # TODO: Why are fcst and efcs so different on WCOSS2?
     # TODO: Compare these with the ufs-weather-model regression test job card at:
     # https://github.com/ufs-community/ufs-weather-model/blob/develop/tests/fv3_conf/fv3_qsub.IN_wcoss2
     export FI_OFI_RXM_RX_SIZE=40000
     export FI_OFI_RXM_TX_SIZE=40000
-    if [[ "${step}" = "fcst" ]]; then
-        export OMP_PLACES=cores
-        export OMP_STACKSIZE=2048M
-    elif [[ "${step}" = "efcs" ]]; then
-        export MPICH_MPIIO_HINTS="*:romio_cb_write=disable"
-        export FI_OFI_RXM_SAR_LIMIT=3145728
-    fi
+    export OMP_PLACES=cores
+    export OMP_STACKSIZE=2048M
+    export MPICH_MPIIO_HINTS="*:romio_cb_write=disable"
+    export FI_OFI_RXM_SAR_LIMIT=3145728
 
 elif [[ "${step}" = "upp" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_upp))
-
-    export NTHREADS_UPP=${nth_upp:-1}
-    [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
-    export APRUN_UPP="${launcher} -n ${npe_upp} -ppn ${npe_node_upp} --cpu-bind depth --depth ${NTHREADS_UPP}"
+    export NTHREADS_UPP=${NTHREADS1}
+    export APRUN_UPP="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_UPP}"
 
 elif [[ "${step}" = "atmos_products" ]]; then
 
     export USE_CFP="YES"  # Use MPMD for downstream product generation
 
-elif [[ "${step}" = "ecen" ]]; then
+elif [[ "${step}" = "oceanice_products" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_ecen))
+    export NTHREADS_OCNICEPOST=${NTHREADS1}
+    export APRUN_OCNICEPOST="${launcher} -n 1 -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_OCNICEPOST}"
 
-    export NTHREADS_ECEN=${nth_ecen:-${nth_max}}
-    [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max}
-    export APRUN_ECEN="${launcher} -n ${npe_ecen} -ppn ${npe_node_ecen} --cpu-bind depth --depth ${NTHREADS_ECEN}"
+elif [[ "${step}" = "ecen" ]]; then
 
-    export NTHREADS_CHGRES=${nth_chgres:-14}
-    [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max}
+    export NTHREADS_ECEN=${NTHREADSmax}
+    export APRUN_ECEN="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ECEN}"
+
+    export NTHREADS_CHGRES=${threads_per_task_chgres:-14}
+    [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
     export APRUN_CHGRES="time"
 
-    export NTHREADS_CALCINC=${nth_calcinc:-1}
-    [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max}
-    export APRUN_CALCINC="${launcher} -n ${npe_ecen}"
+    export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
+    [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
+    export APRUN_CALCINC="${APRUN}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_ecen} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "esfc" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_esfc))
-
-    export NTHREADS_ESFC=${nth_esfc:-${nth_max}}
-    [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max}
-    export APRUN_ESFC="${launcher} -n ${npe_esfc} -ppn ${npe_node_esfc} --cpu-bind depth --depth ${NTHREADS_ESFC}"
+    export NTHREADS_ESFC=${NTHREADSmax}
+    export APRUN_ESFC="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ESFC}"
 
-    export NTHREADS_CYCLE=${nth_cycle:-14}
-    [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max}
-    export APRUN_CYCLE="${launcher} -n ${npe_esfc} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
+    export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
+    [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
+    export APRUN_CYCLE="${APRUN} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}"
 
 elif [[ "${step}" = "epos" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_epos))
-
-    export NTHREADS_EPOS=${nth_epos:-${nth_max}}
-    [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max}
-    export APRUN_EPOS="${launcher} -n ${npe_epos} -ppn ${npe_node_epos} --cpu-bind depth --depth ${NTHREADS_EPOS}"
+    export NTHREADS_EPOS=${NTHREADSmax}
+    export APRUN_EPOS="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_EPOS}"
 
 elif [[ "${step}" = "postsnd" ]]; then
 
     export MPICH_MPIIO_HINTS_DISPLAY=1
     export OMP_NUM_THREADS=1
 
-    nth_max=$((npe_node_max / npe_node_postsnd))
+    export NTHREADS_POSTSND=${NTHREADS1}
+    export APRUN_POSTSND="${APRUN} --depth=${NTHREADS_POSTSND} --cpu-bind depth"
 
-    export NTHREADS_POSTSND=${nth_postsnd:-1}
-    [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max}
-    export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --depth=${NTHREADS_POSTSND} --cpu-bind depth"
-
-    export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1}
-    [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max}
-    export APRUN_POSTSNDCFP="${launcher} -np ${npe_postsndcfp} ${mpmd_opt}"
+    export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1}
+    [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task}
+    export APRUN_POSTSNDCFP="${launcher} -np ${ntasks_postsndcfp} ${mpmd_opt}"
 
 elif [[ "${step}" = "awips" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_awips))
-
-    export NTHREADS_AWIPS=${nth_awips:-2}
-    [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max}
-    export APRUN_AWIPSCFP="${launcher} -np ${npe_awips} ${mpmd_opt}"
+    export NTHREADS_AWIPS=${NTHREADS1}
+    export APRUN_AWIPSCFP="${launcher} -np ${ntasks} ${mpmd_opt}"
 
 elif [[ "${step}" = "gempak" ]]; then
 
-    if [[ ${CDUMP} == "gfs" ]]; then
-        npe_gempak=${npe_gempak_gfs}
-        npe_node_gempak=${npe_node_gempak_gfs}
-    fi
-
-    nth_max=$((npe_node_max / npe_node_gempak))
-
-    export NTHREADS_GEMPAK=${nth_gempak:-1}
-    [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max}
-    export APRUN_GEMPAKCFP="${launcher} -np ${npe_gempak} ${mpmd_opt}"
+    export NTHREADS_GEMPAK=${NTHREADS1}
+    export APRUN_GEMPAKCFP="${launcher} -np ${ntasks} ${mpmd_opt}"
 
 elif [[ "${step}" = "fit2obs" ]]; then
 
-    nth_max=$((npe_node_max / npe_node_fit2obs))
-
-    export NTHREADS_FIT2OBS=${nth_fit2obs:-1}
-    [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max}
-    export MPIRUN="${launcher} -np ${npe_fit2obs}"
+    export NTHREADS_FIT2OBS=${NTHREADS1}
+    export MPIRUN="${launcher} -np ${ntasks}"
 
 elif [[ "${step}" = "waveawipsbulls" ]]; then
 
diff --git a/gempak/fix/datatype.tbl b/gempak/fix/datatype.tbl
index e52e156de4..63b06c0826 100755
--- a/gempak/fix/datatype.tbl
+++ b/gempak/fix/datatype.tbl
@@ -102,10 +102,10 @@ LTNG         $OBS/ltng                 YYYYMMDDHH.ltng           CAT_MSC  SCAT_N
 !
 CLIMO        $GEMPAK/climo             climate_MM.mos            CAT_NIL  SCAT_NIL    1     -1     -1
 !
-GFS          $MODEL/gfs                gfs_YYYYMMDDHH            CAT_GRD  SCAT_FCT   -1     -1     -1
-F-GFS        $COMIN                    gfs_YYYYMMDDHHfFFF        CAT_GRD  SCAT_FCT   -1     -1     -1
-F-GFSP       $COMIN                    gfs_YYYYMMDDHHfFFF        CAT_GRD  SCAT_FCT   -1     -1     -1
-F-GFSHPC     $HPCGFS                   gfs_YYYYMMDDHHfFFF        CAT_GRD  SCAT_FCT   -1     -1     -1
+GFS          $MODEL/gfs                gfs_1p00_YYYYMMDDHH       CAT_GRD  SCAT_FCT   -1     -1     -1
+F-GFS        $COMIN                    gfs_1p00_YYYYMMDDHHfFFF   CAT_GRD  SCAT_FCT   -1     -1     -1
+F-GFSP       $COMIN                    gfs_1p00_YYYYMMDDHHfFFF   CAT_GRD  SCAT_FCT   -1     -1     -1
+F-GFSHPC     $HPCGFS                   gfs_1p00_YYYYMMDDHHfFFF   CAT_GRD  SCAT_FCT   -1     -1     -1
 GFSEXT       $MODEL/ens                gfs.YYYYMMDDHH            CAT_GRD  SCAT_FCT   -1     -1     -1
 GFS1         $MODEL/ens                gfs1.YYYYMMDDHH           CAT_GRD  SCAT_FCT   -1     -1     -1
 GFS2         $MODEL/ens                gfs2.YYYYMMDDHH           CAT_GRD  SCAT_FCT   -1     -1     -1
@@ -156,9 +156,9 @@ F-NAMP20     $COMIN                    nam20_YYYYMMDDHHfFFF      CAT_GRD  SCAT_F
 F-NAMP44     $COMIN                    nam44_YYYYMMDDHHfFFF      CAT_GRD  SCAT_FCT   -1     -1     -1
 F-THREATS    $COMIN                    ${NEST}_YYYYMMDDHHfFFF    CAT_GRD  SCAT_FCT   -1     -1     -1
 F-NAMHPC     $HPCNAM                   nam_YYYYMMDDHHfFFF        CAT_GRD  SCAT_FCT   -1     -1     -1
-GDAS         $MODEL/gdas               gdas_YYMMDDHH             CAT_GRD  SCAT_FCT   -1     -1     -1
-F-GDAS       $COMIN                    gdas_YYYYMMDDHHfFFF       CAT_GRD  SCAT_FCT   -1     -1     -1
-F-GFS        $COMIN                    gfs_YYYYMMDDHHfFFF        CAT_GRD  SCAT_FCT   -1     -1     -1
+GDAS         $MODEL/gdas               gdas_1p00_YYYYMMDDHH      CAT_GRD  SCAT_FCT   -1     -1     -1
+F-GDAS       $COMIN                    gdas_1p00_YYYYMMDDHHfFFF  CAT_GRD  SCAT_FCT   -1     -1     -1
+F-GFS        $COMIN                    gfs_1p00_YYYYMMDDHHfFFF   CAT_GRD  SCAT_FCT   -1     -1     -1
 F-HWRF       $COMIN                    hwrfp_YYYYMMDDHHfFFF_*    CAT_GRD  SCAT_FCT   -1     -1     -1
 F-HWRFN       $COMIN                   hwrfn_YYYYMMDDHHfFFF_*    CAT_GRD  SCAT_FCT   -1     -1     -1
 F-GHM        $COMIN                    ghmg_YYYYMMDDHHfFFF_*     CAT_GRD  SCAT_FCT   -1     -1     -1
diff --git a/gempak/fix/gfs_meta b/gempak/fix/gfs_meta
index 5ca99b4dc6..c86233214b 100755
--- a/gempak/fix/gfs_meta
+++ b/gempak/fix/gfs_meta
@@ -1,23 +1,23 @@
-$USHgempak/gfs_meta_us.sh 36 84 126 216
-$USHgempak/gfs_meta_bwx.sh 36 84 126 180
-$USHgempak/gfs_meta_comp.sh 36 84 126 
-$USHgempak/gfs_meta_ak.sh 36 84 132 216
-$USHgempak/gfs_meta_crb.sh 126 
-$USHgempak/gfs_meta_hur.sh 36 84 126 
-$USHgempak/gfs_meta_qpf.sh 36 84 132 216 
-$USHgempak/gfs_meta_precip.sh 36 84 132 216 384
-$USHgempak/gfs_meta_sa.sh 126  
-$USHgempak/gfs_meta_ver.sh 126  
-$USHgempak/gfs_meta_hi.sh 384
-$USHgempak/gfs_meta_nhsh.sh 384
-$USHgempak/gfs_meta_trop.sh 384
-$USHgempak/gfs_meta_usext.sh 384
-$USHgempak/gfs_meta_mar_ql.sh 24 48 96 180
-$USHgempak/gfs_meta_mar_comp.sh 126
-$USHgempak/gfs_meta_opc_na_ver 126
-$USHgempak/gfs_meta_opc_np_ver 126
-$USHgempak/gfs_meta_mar_atl.sh 180
-$USHgempak/gfs_meta_mar_pac.sh 180
-$USHgempak/gfs_meta_mar_ver.sh 48
-$USHgempak/gfs_meta_mar_skewt.sh 72
-$USHgempak/gfs_meta_sa2.sh 144
+${HOMEgfs}/gempak/ush/gfs_meta_us.sh 36 84 126 216
+${HOMEgfs}/gempak/ush/gfs_meta_bwx.sh 36 84 126 180
+${HOMEgfs}/gempak/ush/gfs_meta_comp.sh 36 84 126
+${HOMEgfs}/gempak/ush/gfs_meta_ak.sh 36 84 132 216
+${HOMEgfs}/gempak/ush/gfs_meta_crb.sh 126
+${HOMEgfs}/gempak/ush/gfs_meta_hur.sh 36 84 126
+${HOMEgfs}/gempak/ush/gfs_meta_qpf.sh 36 84 132 216
+${HOMEgfs}/gempak/ush/gfs_meta_precip.sh 36 84 132 216 384
+${HOMEgfs}/gempak/ush/gfs_meta_sa.sh 126
+${HOMEgfs}/gempak/ush/gfs_meta_ver.sh 126
+${HOMEgfs}/gempak/ush/gfs_meta_hi.sh 384
+${HOMEgfs}/gempak/ush/gfs_meta_nhsh.sh 384
+${HOMEgfs}/gempak/ush/gfs_meta_trop.sh 384
+${HOMEgfs}/gempak/ush/gfs_meta_usext.sh 384
+${HOMEgfs}/gempak/ush/gfs_meta_mar_ql.sh 24 48 96 180
+${HOMEgfs}/gempak/ush/gfs_meta_mar_comp.sh 126
+${HOMEgfs}/gempak/ush/gfs_meta_opc_na_ver 126
+${HOMEgfs}/gempak/ush/gfs_meta_opc_np_ver 126
+${HOMEgfs}/gempak/ush/gfs_meta_mar_atl.sh 180
+${HOMEgfs}/gempak/ush/gfs_meta_mar_pac.sh 180
+${HOMEgfs}/gempak/ush/gfs_meta_mar_ver.sh 48
+${HOMEgfs}/gempak/ush/gfs_meta_mar_skewt.sh 72
+${HOMEgfs}/gempak/ush/gfs_meta_sa2.sh 144
diff --git a/gempak/ush/gdas_ecmwf_meta_ver.sh b/gempak/ush/gdas_ecmwf_meta_ver.sh
index e4fffd9c8a..b038be6c25 100755
--- a/gempak/ush/gdas_ecmwf_meta_ver.sh
+++ b/gempak/ush/gdas_ecmwf_meta_ver.sh
@@ -1,75 +1,39 @@
-#!/bin/sh
-#
-# Metafile Script : gdas_ecmwf_meta_ver
+#! /usr/bin/env bash
 #
 # Creates a loop comparing the 6 hr gdas fcst to the pervious 7 days
 # of ecmwf fcsts
 #
-# Log :
-# J. Carr/HPC   3/2001   New metafile for verification of ecmwf.
-# J. Carr/HPC   5/2001   Added a mn variable for a/b side dbnet root variable.
-# M. Klein/HPC 11/2004   Changed verification grid from fnl to gdas
-# M. Klein/HPC  2/2005   Changed location of working directory to /ptmp
-# M. Klein/HPC 11/2006   Modify to run in production.
-#
-
-#cd $DATA
 
-set -xa
-
-if [ $cyc -ne "06" ] ; then
-    exit
-fi
+source "${HOMEgfs}/ush/preamble.sh"
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-cyc=12
+cyc2=12
 device="nc | ecmwfver.meta"
-PDY2=$(echo ${PDY} | cut -c3-)
 
 #
 # Copy in datatype table to define gdfile type
 #
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File datatype.tbl does not exist."
-   exit $err
+if (( err != 0 )) ; then
+   echo "FATAL ERROR: File datatype.tbl does not exist."
+   exit "${err}"
 fi
 
-#
-# DEFINE YESTERDAY
-date1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-sdate1=$(echo ${date1} | cut -c 3-)
-# DEFINE 2 DAYS AGO
-date2=$($NDATE -48 ${PDY}${cyc} | cut -c -8)
-sdate2=$(echo ${date2} | cut -c 3-)
-# DEFINE 3 DAYS AGO
-date3=$($NDATE -72 ${PDY}${cyc} | cut -c -8)
-sdate3=$(echo ${date3} | cut -c 3-)
-# DEFINE 4 DAYS AGO
-date4=$($NDATE -96 ${PDY}${cyc} | cut -c -8)
-sdate4=$(echo ${date4} | cut -c 3-)
-# DEFINE 5 DAYS AGO
-date5=$($NDATE -120 ${PDY}${cyc} | cut -c -8)
-sdate5=$(echo ${date5} | cut -c 3-)
-# DEFINE 6 DAYS AGO
-date6=$($NDATE -144 ${PDY}${cyc} | cut -c -8)
-sdate6=$(echo ${date6} | cut -c 3-)
-# DEFINE 7 DAYS AGO
-date7=$($NDATE -168 ${PDY}${cyc} | cut -c -8)
-sdate7=$(echo ${date7} | cut -c 3-)
-
-vergrid="F-GDAS | ${PDY2}/0600"
+export COMIN="gdas.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
+vergrid="F-GDAS | ${PDY:2}/0600"
 fcsthr="0600f006"
 
 # GENERATING THE METAFILES.
 areas="SAM NAM"
-verdays="${date1} ${date2} ${date3} ${date4} ${date5} ${date6} ${date7}"
 
-for area in $areas
-    do
-    if [ $area == "NAM" ] ; then
+for area in ${areas}; do
+    if [[ "${area}" == "NAM" ]] ; then
         garea="5.1;-124.6;49.6;-11.9"
         proj="STR/90.0;-95.0;0.0"
         latlon="0"
@@ -80,37 +44,18 @@ for area in $areas
         latlon="1/10/1/2/10;10"
         run=" "
     fi
-    for verday in $verdays
-        do
-        verddate=$(echo ${verday} | cut -c 3-)
-        if [ ${verday} -eq ${date1} ] ; then
-            dgdattim=f024
-            sdatenum=$sdate1
-        elif [ ${verday} -eq ${date2} ] ; then
-            dgdattim=f048
-            sdatenum=$sdate2
-        elif [ ${verday} -eq ${date3} ] ; then
-            dgdattim=f072
-            sdatenum=$sdate3
-        elif [ ${verday} -eq ${date4} ] ; then
-            dgdattim=f096
-            sdatenum=$sdate4
-        elif [ ${verday} -eq ${date5} ] ; then
-            dgdattim=f120
-            sdatenum=$sdate5
-        elif [ ${verday} -eq ${date6} ] ; then
-            dgdattim=f144
-            sdatenum=$sdate6
-        elif [ ${verday} -eq ${date7} ] ; then
-            dgdattim=f168
-            sdatenum=$sdate7
+    for (( fhr=24; fhr<=168; fhr+=24 )); do
+        dgdattim=$(printf "f%03d" "${fhr}")
+        sdatenum=$(date --utc +%y%m%d -d "${PDY} ${cyc2} - ${fhr} hours")
+
+        if [[ ! -L "ecmwf.20${sdatenum}" ]]; then
+            ${NLN} "${COMINecmwf}/ecmwf.20${sdatenum}/gempak" "ecmwf.20${sdatenum}"
         fi
-        # JY grid="$COMROOT/nawips/${envir}/ecmwf.20${sdatenum}/ecmwf_glob_20${sdatenum}12"
-        grid="${COMINecmwf}.20${sdatenum}/gempak/ecmwf_glob_20${sdatenum}12"
+        gdfile="ecmwf.20${sdatenum}/ecmwf_glob_20${sdatenum}12"
 
-# 500 MB HEIGHT METAFILE
+        # 500 MB HEIGHT METAFILE
 
-$GEMEXE/gdplot2_nc << EOFplt
+        "${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL = mepowo.gsf
 PROJ     = ${proj}
 GAREA    = ${garea}
@@ -134,7 +79,7 @@ line     = 6/1/3
 title    = 6/-2/~ GDAS 500 MB HGT (6-HR FCST)|~${area} 500 HGT DF
 r
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -157,7 +102,7 @@ clear    = yes
 latlon   = ${latlon}
 r
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -165,7 +110,7 @@ title    = 5/-1/~ ECMWF PMSL
 clear    = no
 r
 
-PROJ     = 
+PROJ     =
 GAREA    = bwus
 gdfile   = ${vergrid}
 gdattim  = ${fcsthr}
@@ -181,7 +126,7 @@ clear    = yes
 latlon   = ${latlon}
 ${run}
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -195,28 +140,28 @@ EOFplt
     done
 done
 
-export err=$?;err_chk
+export err=$?
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l ecmwfver.meta
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-    mkdir -p -m 775 ${COMOUTecmwf}.${PDY}/meta
-    mv ecmwfver.meta ${COMOUTecmwf}.${PDY}/meta/ecmwfver_${PDY}_${cyc}
-    export err=$?
-    if [[ $err -ne 0 ]] ; then
-       echo " File ecmwfver.meta does not exist."
-       exit $err
-    fi
+if (( err != 0 )) || [[ ! -s ecmwfver.meta ]]; then
+    echo "FATAL ERROR: Failed to create ecmwf meta file"
+    exit "${err}"
+fi
 
-    if [ $SENDDBN = "YES" ] ; then
-        ${DBNROOT}/bin/dbn_alert MODEL ECMWFVER_HPCMETAFILE $job \
-        ${COMOUTecmwf}.${PDY}/meta/ecmwfver_${PDY}_${cyc}
-    fi
+mv ecmwfver.meta "${COM_ATMOS_GEMPAK_META}/ecmwfver_${PDY}_${cyc2}"
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: Failed to move meta file to ${COM_ATMOS_GEMPAK_META}/ecmwfver_${PDY}_${cyc2}"
+    exit "${err}"
+fi
+
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL ECMWFVER_HPCMETAFILE "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/ecmwfver_${PDY}_${cyc2}"
 fi
 
 exit
diff --git a/gempak/ush/gdas_meta_loop.sh b/gempak/ush/gdas_meta_loop.sh
index cd0d9b781b..3191789c9b 100755
--- a/gempak/ush/gdas_meta_loop.sh
+++ b/gempak/ush/gdas_meta_loop.sh
@@ -1,91 +1,59 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gdas_meta_loop
 #
-# Log :
-# D.W.Plummer/NCEP   2/97      Add log header
-# J. Carr/HPC        3/98      Changed to gdplot2
-# J. Carr/HPC        8/98      Changed map to medium resolution
-# J. Carr/HPC        2/99      Changed skip to 0
-# J. Carr/HPC        2/01      Implemented usage on IBM operationally.
-# J. Carr/HPC        5/2001    Added a mn variable for a/b side dbnet root variable.
-# M. Klein/HPC      11/2004    Change fnl to gdas
-# M. Klein/HPC       2/2005    Changed location of working directory to /ptmp 
-# M. Klein/HPC      11/2006    Modify for production on CCS
 
-#cd $DATA
-
-set -xa
+source "${HOMEgfs}/ush/preamble.sh"
 
 device="nc | gdasloop.meta"
 
-PDY2=$(echo $PDY | cut -c3-)
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L "${COMIN}" ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
-if [ "$envir" = "para" ] ; then
+if [[ "${envir}" == "para" ]] ; then
    export m_title="GDASP"
 else
    export m_title="GDAS"
 fi
 
-export COMPONENT=${COMPONENT:-atmos}
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-#
-# Copy in datatype table to define gdfile type
-#
-cp $FIXgempak/datatype.tbl datatype.tbl
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File datatype.tbl does not exist."
-   exit $err
-fi
-
-#
-# Define previous days
-#
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDYm2=$($NDATE -48 ${PDY}${cyc} | cut -c -8)
-PDYm3=$($NDATE -72 ${PDY}${cyc} | cut -c -8)
-PDYm4=$($NDATE -96 ${PDY}${cyc} | cut -c -8)
-PDYm5=$($NDATE -120 ${PDY}${cyc} | cut -c -8)
-PDYm6=$($NDATE -144 ${PDY}${cyc} | cut -c -8)
-#
-
-verdays="$PDYm6 $PDYm5 $PDYm4 $PDYm3 $PDYm2 $PDYm1 $PDY"
-
-for day in $verdays
-    do
-    PDY2=$(echo $day | cut -c 3-)
-    if [ $day -eq $PDY ] ; then
-        if [ $cyc -eq "00" ] ; then
-            cycles="00"   
-        elif [ $cyc -eq "06" ] ; then
-            cycles="00 06"
-        elif [ $cyc -eq "12" ] ; then
-            cycles="00 06 12"
-        elif [ $cyc -eq "18" ] ; then
-            cycles="00 06 12 18"
+for (( fhr=24; fhr<=144; fhr+=24 )); do
+    day=$(date --utc +%Y%m%d -d "${PDY} ${cyc} - ${fhr} hours")
+    if (( ${day}${cyc} < SDATE )); then
+        # Stop looking because these cycles weren't run
+        if (( fhr == 24 )); then
+            exit
+        else
+            break
         fi
-    else
-        cycles="00 06 12 18"
     fi
 
-    for cycle in $cycles
-        do
-#  Test with GDAS in PROD
-#        grid="${COMROOT}/nawips/${envir}/gdas.${day}/gdas_${day}${cycle}f000"
-         export COMIN=${COMINgdas}.${day}/${cycle}/${COMPONENT}/gempak
-         grid="${COMINgdas}.${day}/${cycle}/${COMPONENT}/gempak/gdas_${day}${cycle}f000"
+    cycles=$(seq -s ' ' -f "%02g" 0 6 "${cyc}")
+    for cycle in ${cycles}; do
+        #  Test with GDAS in PROD
+        YMD=${day} HH=${cyc} GRID=1p00 declare_from_tmpl "COM_ATMOS_GEMPAK_1p00_past:COM_ATMOS_GEMPAK_TMPL"
+        export COMIN="${RUN}.${day}${cycle}"
+        if [[ ! -L "${COMIN}" ]]; then
+            ${NLN} "${COM_ATMOS_GEMPAK_1p00_past}" "${COMIN}"
+        fi
+        gdfile="${COMIN}/gdas_1p00_${day}${cycle}f000"
 
-$GEMEXE/gdplot2_nc << EOF
+        "${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL = mepowo.gsf
-GDFILE	= $grid
-GDATTIM	= F00
-DEVICE	= $device
+GDFILE	= ${gdfile}
+GDATTIM	= F000
+DEVICE	= ${device}
 PANEL	= 0
 TEXT	= m/21//hw
 CONTUR	= 2
-PROJ    =  
+PROJ    =
 GAREA   = nam
 LATLON	= 0
 CLEAR	= yes
@@ -106,9 +74,9 @@ CLRBAR  = 1/V/LL              !0
 WIND    = am0
 MAP	= 1/1/1
 REFVEC  =
-TITLE   = 1/0/~ $m_title PW, EST MSLP, THICKNESS|~NAM PRCP WATER!0
+TITLE   = 1/0/~ ${m_title} PW, EST MSLP, THICKNESS|~NAM PRCP WATER!0
 r
- 
+
 PROJ    = STR/90;-105;0
 GAREA   = 2;-139;27;-22
 LATLON  = 1/1/1//15;15
@@ -124,11 +92,11 @@ LINE    = 7/5/1/2            !20/1/2/1
 FINT    = 15;21;27;33;39;45;51;57
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
 WIND    = 0
 REFVEC  =
-TITLE   = 5/-2/~ $m_title @ HGT AND VORTICITY|~NAM @ HGT AND VORT!0
+TITLE   = 5/-2/~ ${m_title} @ HGT AND VORTICITY|~NAM @ HGT AND VORT!0
 r
 
 GLEVEL	= 250
@@ -146,50 +114,24 @@ HLSYM	=
 CLRBAR	= 1
 WIND	= 0                !Bk9/.7/2/b/!
 REFVEC	=
-TITLE	= 5/-2/~ $m_title @ HGHT, ISOTACHS AND WIND (KTS)|~NAM @ HGT & WIND!0
+TITLE	= 5/-2/~ ${m_title} @ HGHT, ISOTACHS AND WIND (KTS)|~NAM @ HGT & WIND!0
 FILTER  = n
 r
 
 exit
 EOF
 
-    done
-
-done
-
-for day in $verdays
-    do
-    PDY2=$(echo $day | cut -c 3-)
-    if [ $day -eq $PDY ] ; then
-        if [ $cyc -eq "00" ] ; then
-            cycles="00"
-        elif [ $cyc -eq "06" ] ; then
-            cycles="00 06"
-        elif [ $cyc -eq "12" ] ; then
-            cycles="00 06 12"
-        elif [ $cyc -eq "18" ] ; then
-            cycles="00 06 12 18"
-        fi
-    else
-        cycles="00 06 12 18"
-    fi
+        gdfile="${COMIN}/gdas_1p00_${day}${cycle}f000"
 
-    for cycle in $cycles
-        do
-#  Test with GDAS in PROD
-#        grid="${COMROOT}/nawips/${envir}/gdas.${day}/gdas_${day}${cycle}f000"
-         export COMIN=${COMINgdas}.${day}/${cycle}/${COMPONENT}/gempak
-         grid="${COMINgdas}.${day}/${cycle}/${COMPONENT}/gempak/gdas_${day}${cycle}f000"
-   
-$GEMEXE/gdplot2_nc << EOF
+"${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL = mepowo.gsf
-GDFILE	= $grid
-GDATTIM	= F00
-DEVICE	= $device
+GDFILE	= ${gdfile}
+GDATTIM	= F000
+DEVICE	= ${device}
 PANEL	= 0
 TEXT	= m/21//hw
 CONTUR	= 1
-PROJ    =  
+PROJ    =
 GAREA   = samps
 LATLON	= 1/1/1//15;15
 CLEAR	= yes
@@ -210,9 +152,9 @@ CLRBAR  = 1/V/LL             !0
 WIND    = am0
 MAP	= 1/1/1
 REFVEC  =
-TITLE   = 1/0/~ $m_title PW, MSLP, THICKNESS|~SAM PRCP WATER!0
+TITLE   = 1/0/~ ${m_title} PW, MSLP, THICKNESS|~SAM PRCP WATER!0
 r
- 
+
 GLEVEL  = 500
 GVCORD  = PRES
 SKIP    = 0                  !0       !0                  !0        !0
@@ -225,11 +167,11 @@ LINE    = 7/5/1/2            !29/5/1/2!7/5/1/2            !29/5/1/2 !20/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!        !2;6/X;N/10-99;10-99!         !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
 WIND    = 0
 REFVEC  =
-TITLE   = 5/-2/~ $m_title @ HGT AND VORTICITY|~SAM @ HGT & VORT!0
+TITLE   = 5/-2/~ ${m_title} @ HGT AND VORTICITY|~SAM @ HGT & VORT!0
 r
 
 GLEVEL	= 250
@@ -247,7 +189,7 @@ HLSYM	=
 CLRBAR	= 1
 WIND	= 0                !Bk9/.7/2/b/!
 REFVEC	=
-TITLE	= 5/-2/~ $m_title @ HGHT, ISOTACHS AND WIND (KTS)|~SAM @ HGT & WIND!0
+TITLE	= 5/-2/~ ${m_title} @ HGHT, ISOTACHS AND WIND (KTS)|~SAM @ HGT & WIND!0
 FILTER  = n
 r
 
@@ -261,11 +203,11 @@ TYPE    = c                            !c
 CINT    = 1                            !4
 LINE    = 22/5/2/1                     !10/1/1
 FINT    =
-FLINE   = 
+FLINE   =
 HILO    =                              !26;2/H#;L#/1020-1070;900-1012/3/30;30/y
 HLSYM   =                              !2;1.5//21//hw
 WIND    = 0
-TITLE   = 1/-1/~ $m_title PMSL, 1000-850mb THKN|~SAM PMSL, 1000-850 TK!0
+TITLE   = 1/-1/~ ${m_title} PMSL, 1000-850mb THKN|~SAM PMSL, 1000-850 TK!0
 r
 
 exit
@@ -274,27 +216,28 @@ EOF
     done
 done
 
-export err=$?;err_chk
+export err=$?
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l gdasloop.meta
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s gdasloop.meta ]]; then
+    echo "FATAL ERROR: Failed to create gdasloop meta file"
+    exit "${err}"
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-    mv gdasloop.meta ${COMOUT}/gdas_${PDY}_${cyc}_loop
-    export err=$?
-    if [[ $err -ne 0 ]] ; then
-      echo " File gdasloop.meta does not exist."
-      exit $err
-    fi
+mv gdasloop.meta "${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_loop"
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: Failed to move meta file to ${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_loop"
+    exit "${err}"
+fi
 
-    if [ $SENDDBN = "YES" ] ; then
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        $COMOUT/gdas_${PDY}_${cyc}_loop
-    fi
+if [[ ${SENDDBN} == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_loop"
 fi
 
 exit
diff --git a/gempak/ush/gdas_meta_na.sh b/gempak/ush/gdas_meta_na.sh
index 6c4768cfb7..6a7e0a28c3 100755
--- a/gempak/ush/gdas_meta_na.sh
+++ b/gempak/ush/gdas_meta_na.sh
@@ -1,41 +1,33 @@
-#!/bin/sh 
-
+#! /usr/bin/env bash
 #
 # Metafile Script : gdas_meta_na
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# LJ REED 4/10/98 added line to define BIN_DIR
-# J. Carr/HPC        2/99   Changed skip to 0
-# B. Gordon          4/00   Modified for production on IBM-SP
-#                           and changed gdplot_nc -> gdplot2_nc
-# D. Michaud         4/01   Added logic to display different title
-#                           for parallel runs
-# J. Carr           11/04   Added a ? in all title/TITLE lines.
-# J. Carr           11/04   Changed GAREA and PROJ to match GFS and NAM.
-#
-
-cd $DATA
 
-set -xa
+source "${HOMEgfs}/ush/preamble.sh"
 
 device="nc | gdas.meta"
 
-PDY2=$(echo $PDY | cut -c3-)
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L "${COMIN}" ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
-if [ "$envir" = "para" ] ; then
+if [[ "${envir}" == "para" ]] ; then
    export m_title="GDASP"
 else
    export m_title="GDAS"
 fi
 
 export pgm=gdplot2_nc; prep_step
-startmsg
 
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-GDAS | ${PDY2}/${cyc}00
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-GDAS | ${PDY:2}/${cyc}00
 GDATTIM	= FALL
-DEVICE	= $device
+DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
 CONTUR	= 2
@@ -51,72 +43,70 @@ PROJ    = str/90;-105;0
 LATLON  = 1
 
 
-restore $USHgempak/restore/pmsl_thkn.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
+TITLE	= 5/-2/~ ? ${m_title} PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
 l
 ru
 
 
-restore $USHgempak/restore/850mb_hght_tmpc.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
 SKIP    = 0         !0         !0         !0         !/3
 FILTER  = NO
-TITLE	= 5/-2/~ ? $m_title @ HGT, TEMP AND WIND (KTS)|~@ HGT, TMP, WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, TEMP AND WIND (KTS)|~@ HGT, TMP, WIND!0
 l
 ru
 
 
-restore $USHgempak/restore/700mb_hght_relh_omeg.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
 l
 ru
 
 
-restore $USHgempak/restore/500mb_hght_absv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
 l
 ru
 
 
-restore $USHgempak/restore/250mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
 l
 ru
 
 exit
 EOF
-export err=$?;err_chk
+export err=$?
 
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l gdas.meta
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-  mv gdas.meta ${COMOUT}/gdas_${PDY}_${cyc}_na
-  export err=$?
-  if [[ $err -ne 0 ]] ; then
-    echo " File gdas.meta does not exist."
-    exit $err
-  fi
-
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gdas_${PDY}_${cyc}_na
-  fi
+if (( err != 0 )) || [[ ! -s gdas.meta ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file for North America"
+    exit "${err}"
 fi
 
-#
+mv gdas.meta "${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_na"
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: Failed to move meta file to ${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_na"
+    exit "${err}"
+fi
+
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gdas_${PDY}_${cyc}_na"
+fi
diff --git a/gempak/ush/gdas_ukmet_meta_ver.sh b/gempak/ush/gdas_ukmet_meta_ver.sh
index 845fa1cc6b..be3d459e8c 100755
--- a/gempak/ush/gdas_ukmet_meta_ver.sh
+++ b/gempak/ush/gdas_ukmet_meta_ver.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gdas_ukmet_meta_ver
 #
@@ -13,83 +13,27 @@
 # M. Klein/HPC   11/2006   Modify to run in production.
 #
 
-#cd $DATA
+source "${HOMEgfs}/ush/preamble.sh"
 
-set -xa
-
-if [ $cyc -ne "06" ] ; then
-    exit
-fi
-
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
 device="nc | ukmetver_12.meta"
-PDY2=$(echo ${PDY} | cut -c3-)
-
-#
-# Copy in datatype table to define gdfile type
-#
-cp $FIXgempak/datatype.tbl datatype.tbl
-
-#
-# DEFINE 1 CYCLE AGO
-dc1=$($NDATE -06 ${PDY}${cyc} | cut -c -10)
-date1=$(echo ${dc1} | cut -c -8)
-sdate1=$(echo ${dc1} | cut -c 3-8)
-cycle1=$(echo ${dc1} | cut -c 9,10)
-# DEFINE 2 CYCLES AGO
-dc2=$($NDATE -18 ${PDY}${cyc} | cut -c -10)
-date2=$(echo ${dc2} | cut -c -8)
-sdate2=$(echo ${dc2} | cut -c 3-8)
-cycle2=$(echo ${dc2} | cut -c 9,10)
-# DEFINE 3 CYCLES AGO
-dc3=$($NDATE -30 ${PDY}${cyc} | cut -c -10)
-date3=$(echo ${dc3} | cut -c -8)
-sdate3=$(echo ${dc3} | cut -c 3-8)
-cycle3=$(echo ${dc3} | cut -c 9,10)
-# DEFINE 4 CYCLES AGO
-dc4=$($NDATE -42 ${PDY}${cyc} | cut -c -10)
-date4=$(echo ${dc4} | cut -c -8)
-sdate4=$(echo ${dc4} | cut -c 3-8)
-cycle4=$(echo ${dc4} | cut -c 9,10)
-# DEFINE 5 CYCLES AGO
-dc5=$($NDATE -54 ${PDY}${cyc} | cut -c -10)
-date5=$(echo ${dc5} | cut -c -8)
-sdate5=$(echo ${dc5} | cut -c 3-8)
-cycle5=$(echo ${dc5} | cut -c 9,10)
-# DEFINE 6 CYCLES AGO
-dc6=$($NDATE -66 ${PDY}${cyc} | cut -c -10)
-date6=$(echo ${dc6} | cut -c -8)
-sdate6=$(echo ${dc6} | cut -c 3-8)
-cycle6=$(echo ${dc6} | cut -c 9,10)
-# DEFINE 7 CYCLES AGO
-dc7=$($NDATE -90 ${PDY}${cyc} | cut -c -10)
-date7=$(echo ${dc7} | cut -c -8)
-sdate7=$(echo ${dc7} | cut -c 3-8)
-cycle7=$(echo ${dc7} | cut -c 9,10)
-# DEFINE 8 CYCLES AGO
-dc8=$($NDATE -114 ${PDY}${cyc} | cut -c -10)
-date8=$(echo ${dc8} | cut -c -8)
-sdate8=$(echo ${dc8} | cut -c 3-8)
-cycle8=$(echo ${dc8} | cut -c 9,10)
-# DEFINE 9 CYCLES AGO
-dc9=$($NDATE -138 ${PDY}${cyc} | cut -c -10)
-date9=$(echo ${dc9} | cut -c -8)
-sdate9=$(echo ${dc9} | cut -c 3-8)
-cycle9=$(echo ${dc9} | cut -c 9,10)
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 # SET CURRENT CYCLE AS THE VERIFICATION GRIDDED FILE.
-vergrid="F-GDAS | ${PDY2}/0600"
+export COMIN="gdas.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
+vergrid="F-GDAS | ${PDY:2}/0600"
 fcsthr="0600f006"
 
 # SET WHAT RUNS TO COMPARE AGAINST BASED ON MODEL CYCLE TIME.
 areas="SAM NAM"
-verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9}"
 
 # GENERATING THE METAFILES.
-for area in $areas
-    do 
-    if [ ${area} = "NAM" ] ; then
+for area in ${areas}; do
+    if [[ "${area}" == "NAM" ]] ; then
         garea="5.1;-124.6;49.6;-11.9"
         proj="STR/90.0;-95.0;0.0"
         latlon="0"
@@ -100,50 +44,23 @@ for area in $areas
         latlon="1/10/1/2/10;10"
         run=" "
     fi
-    for verday in $verdays
-        do
-        if [ ${verday} -eq ${dc1} ] ; then
-            dgdattim=f012
-            sdatenum=$sdate1
-            cyclenum=$cycle1
-        elif [ ${verday} -eq ${dc2} ] ; then
-            dgdattim=f024
-            sdatenum=$sdate2
-            cyclenum=$cycle2
-        elif [ ${verday} -eq ${dc3} ] ; then
-            dgdattim=f036
-            sdatenum=$sdate3
-            cyclenum=$cycle3
-        elif [ ${verday} -eq ${dc4} ] ; then
-            dgdattim=f048
-            sdatenum=$sdate4
-            cyclenum=$cycle4
-        elif [ ${verday} -eq ${dc5} ] ; then
-            dgdattim=f060
-            sdatenum=$sdate5
-            cyclenum=$cycle5
-        elif [ ${verday} -eq ${dc6} ] ; then
-            dgdattim=f072
-            sdatenum=$sdate6
-            cyclenum=$cycle6
-        elif [ ${verday} -eq ${dc7} ] ; then
-            dgdattim=f096
-            sdatenum=$sdate7
-            cyclenum=$cycle7
-        elif [ ${verday} -eq ${dc8} ] ; then
-            dgdattim=f120
-            sdatenum=$sdate8
-            cyclenum=$cycle8
-        elif [ ${verday} -eq ${dc9} ] ; then
-            dgdattim=f144
-            sdatenum=$sdate9
-            cyclenum=$cycle9
+
+    fhrs=$(seq -s ' ' 12 12 72)
+    fhrs="${fhrs} $(seq -s ' ' 96 24 144)"
+    for fhr in ${fhrs}; do
+        stime=$(date --utc +%y%m%d -d "${PDY} ${cyc} - ${fhr} hours")
+        dgdattim=$(printf "f%03d" "${fhr}")
+        sdatenum=${stime:0:6}
+        cyclenum=${stime:6}
+
+        if [[ ! -L "ukmet.20${sdatenum}" ]]; then
+            ${NLN} "${COMINukmet}/ukmet.20${sdatenum}/gempak" "ukmet.20${sdatenum}"
         fi
-        grid="${COMINukmet}.20${sdatenum}/gempak/ukmet_20${sdatenum}${cyclenum}${dgdattim}"
+        gdfile="ukmet.20${sdatenum}/ukmet_20${sdatenum}${cyclenum}${dgdattim}"
 
-# 500 MB HEIGHT METAFILE
+        # 500 MB HEIGHT METAFILE
 
-$GEMEXE/gdplot2_nc << EOFplt
+        "${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL = mepowo.gsf
 PROJ     = ${proj}
 GAREA    = ${garea}
@@ -167,7 +84,7 @@ line     = 6/1/3
 title    = 6/-2/~ GDAS 500 MB HGT (6-HR FCST)|~${area} 500 HGT DIFF
 r
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -189,7 +106,7 @@ clear    = yes
 latlon   = ${latlon}
 r
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -197,7 +114,7 @@ title    = 5/-1/~ UKMET PMSL
 clear    = no
 r
 
-PROJ     = 
+PROJ     =
 GAREA    = bwus
 gdfile   = ${vergrid}
 gdattim  = ${fcsthr}
@@ -213,7 +130,7 @@ clear    = yes
 latlon   = ${latlon}
 ${run}
 
-gdfile   = ${grid}
+gdfile   = ${gdfile}
 gdattim  = ${dgdattim}
 line     = 5/1/3
 contur   = 4
@@ -226,22 +143,28 @@ EOFplt
     done
 done
 
-export err=$?;err_chk
+export err=$?
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l ukmetver_12.meta
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-    mkdir -p -m 775 ${COMOUTukmet}/ukmet.${PDY}/meta/
-    mv ukmetver_12.meta ${COMOUTukmet}/ukmet.${PDY}/meta/ukmetver_${PDY}_12
-    if [ $SENDDBN = "YES" ] ; then
-        ${DBNROOT}/bin/dbn_alert MODEL UKMETVER_HPCMETAFILE $job \
-        ${COMOUTukmet}/ukmet.${PDY}/meta/ukmetver_${PDY}_12
-    fi
+if (( err != 0 )) || [[ ! -s ukmetver_12.meta ]]; then
+    echo "FATAL ERROR: Failed to create ukmet meta file"
+    exit "${err}"
+fi
+
+mv ukmetver_12.meta "${COM_ATMOS_GEMPAK_META}/ukmetver_${PDY}_12"
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: Failed to move meta file to ${COM_ATMOS_GEMPAK_META}/ukmetver_${PDY}_12"
+    exit "${err}"
+fi
+
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL UKMETVER_HPCMETAFILE "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/ukmetver_${PDY}_12"
 fi
 
 exit
diff --git a/gempak/ush/gempak_gdas_f000_gif.sh b/gempak/ush/gempak_gdas_f000_gif.sh
index cdf7659155..80e28f5345 100755
--- a/gempak/ush/gempak_gdas_f000_gif.sh
+++ b/gempak/ush/gempak_gdas_f000_gif.sh
@@ -1,100 +1,87 @@
-#!/bin/sh
+#! /usr/bin/env bash
 
 #########################################################################
 #
-#   Script:  gempak_gdas_f00_gif.sh
-#
 #   This scripts creates GEMPAK .gif images of 00HR/Analysis fields from
 #   GDAS model output for archiving at NCDC.
 #
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#
-#
 #########################################################################
 
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-  set -x
-
-  MAPAREA="normal"
+source "${HOMEgfs}/ush/preamble.sh"
 
-  LATVAL="1/1/1/1/5;5"
-  LATSOUTH="1/1/1/1;4/5;5"
+LATVAL="1/1/1/1/5;5"
+LATSOUTH="1/1/1/1;4/5;5"
+pixels="1728;1472"
 
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
+cp "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp
 
 #################################################################
-#              NORTHERN HEMISPHERE ANALYSIS CHARTS              # 
+#              NORTHERN HEMISPHERE ANALYSIS CHARTS              #
 #################################################################
 
-# Create time stamp (bottom) label 
+# Create time stamp (bottom) label
 
-  echo 0000${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
- ${UTILgfs}/exec/webtitle < dates
+echo "0000${PDY}${cyc}" > dates
+export FORT55="title.output"
+"${HOMEgfs}/exec/webtitle.x" < dates
 
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
+TITLE="$(cat title.output)"
+echo "TITLE = ${TITLE}"
 
 # Define labels and file names for Northern Hemisphere analysis charts
 
-  hgttmp850lab="850MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp850dev="gdas_850_hgt_tmp_nh_anl_${cyc}.gif"
+hgttmp850lab="850MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp850dev="gdas_850_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgttmp700lab="700MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp700dev="gdas_700_hgt_tmp_nh_anl_${cyc}.gif"
 
-  hgttmp700lab="700MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp700dev="gdas_700_hgt_tmp_nh_anl_${cyc}.gif"
+hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp500dev="gdas_500_hgt_tmp_nh_anl_${cyc}.gif"
 
-  hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp500dev="gdas_500_hgt_tmp_nh_anl_${cyc}.gif"
+hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso300dev="gdas_300_hgt_iso_nh_anl_${cyc}.gif"
 
-  hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso300dev="gdas_300_hgt_iso_nh_anl_${cyc}.gif"
+hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso250dev="gdas_250_hgt_iso_nh_anl_${cyc}.gif"
 
-  hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso250dev="gdas_250_hgt_iso_nh_anl_${cyc}.gif"
+hgtiso200lab="200MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso200dev="gdas_200_hgt_iso_nh_anl_${cyc}.gif"
 
-  hgtiso200lab="200MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso200dev="gdas_200_hgt_iso_nh_anl_${cyc}.gif"
+mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
+mslpthksfcdev="gdas_sfc_mslp_thk_nh_anl_${cyc}.gif"
 
-  mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gdas_sfc_mslp_thk_nh_anl_${cyc}.gif"
- 
 
 # Set grid date and input file name
 
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F000
-  gdfile=gem_grids${fhr}.gem
+gdattim="${PDY:2:6}/${cyc}00F000"
+gdfile=gem_grids${fhr3}.gem
 
 
 #  Execute the GEMPAK program
 
-$GEMEXE/gdplot2_gif << EOF
+"${GEMEXE}/gdplot2_gif" << EOF
 
 
 ! 850MB HEIGHTS/TEMPERATURES
 
-  restore $NTS/base_nh.nts
-  restore $NTS/850_hgt_tmp.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/850_hgt_tmp.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgttmp850dev} | $pixels
+  DEVICE  = gif | ${hgttmp850dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATVAL
+  LATLON  = ${LATVAL}
   l
   r
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -107,23 +94,23 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 700MB HEIGHTS/TEMPERATURES
 
-  restore $NTS/base_nh.nts
-  restore $NTS/700_hgt_tmp.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/700_hgt_tmp.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgttmp700dev} | $pixels
+  DEVICE  = gif | ${hgttmp700dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATVAL
+  LATLON  = ${LATVAL}
   l
   r
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -136,23 +123,23 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 500MB HEIGHTS/TEMPERATURES
 
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_tmp.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/500_hgt_tmp.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgttmp500dev} | $pixels
+  DEVICE  = gif | ${hgttmp500dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATVAL
+  LATLON  = ${LATVAL}
   l
   r
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -165,14 +152,14 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 300MB HEIGHTS/ISOTACHS
 
-  restore $NTS/base_nh.nts
-  restore $NTS/300_hgt_iso.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/300_hgt_iso.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgtiso300dev} | $pixels
+  DEVICE  = gif | ${hgtiso300dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
   LATLON  = 1/1/1/1/5;5             !
@@ -181,7 +168,7 @@ $GEMEXE/gdplot2_gif << EOF
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -194,17 +181,17 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 250MB ANALYSIS HEIGHTS/ISOTACHS
 
-  restore $NTS/base_nh.nts
-  restore $NTS/250_hgt_iso.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/250_hgt_iso.nts
 
-  CLEAR   = yes 
+  CLEAR   = yes
   GDFILE  = ${gdfile}
   GDATTIM = ${gdattim}
-  MAP     = 1                       
-  DEVICE  = gif | ${hgtiso250dev} | $pixels 
-  TITLE   = 
+  MAP     = 1
+  DEVICE  = gif | ${hgtiso250dev} | ${pixels}
+  TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATVAL 
+  LATLON  = ${LATVAL}
   l
   r
 
@@ -224,14 +211,14 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 200MB HEIGHTS/ISOTACHS
 
-  restore $NTS/base_nh.nts
-  restore $NTS/200_hgt_iso.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/200_hgt_iso.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgtiso200dev} | $pixels
+  DEVICE  = gif | ${hgtiso200dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
   LATLON  = 1/1/1/1/5;5             !
@@ -240,7 +227,7 @@ $GEMEXE/gdplot2_gif << EOF
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -253,17 +240,17 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! ANALYSIS MSLP/1000-500 THICKNESS
 
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/sfc_mslp_thk.nts
 
   CLEAR   = yes
   GDFILE  = ${gdfile}
   GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
+  DEVICE  = gif | ${mslpthksfcdev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATVAL
+  LATLON  = ${LATVAL}
   l
   r
 
@@ -283,75 +270,66 @@ $GEMEXE/gdplot2_gif << EOF
   exit
 EOF
 
-$GEMEXE/gpend
-
-if [ $SENDCOM = YES ]; then
+"${GEMEXE}/gpend"
 
 # Copy the GIF images into my area
 
-  cp ${hgttmp850dev}    $COMOUTncdc/.
-  cp ${hgttmp700dev}    $COMOUTncdc/.
-  cp ${hgttmp500dev}    $COMOUTncdc/.
-  cp ${hgtiso300dev}    $COMOUTncdc/.
-  cp ${hgtiso250dev}    $COMOUTncdc/.
-  cp ${hgtiso200dev}    $COMOUTncdc/.
-  cp ${mslpthksfcdev}   $COMOUTncdc/.
+cp "${hgttmp850dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgttmp700dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgttmp500dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgtiso300dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgtiso250dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgtiso200dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${mslpthksfcdev}"   "${COM_ATMOS_GEMPAK_GIF}/."
 
 # Send the GIF images onto the NCDC area on the public ftp server
 
- if [ $SENDDBN = YES ]; then
-
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgttmp850dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgttmp700dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgttmp500dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgtiso300dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgtiso250dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgtiso200dev}    
-   $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${mslpthksfcdev}   
-
- fi
-
+if [[ ${SENDDBN} == YES ]]; then
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp850dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp700dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp500dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso300dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso250dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso200dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${mslpthksfcdev}"
 fi
 
 
- 
- 
 ##########################################################
 #         SOUTHERN HEMISPHERE ANALYSIS CHARTS            #
 ##########################################################
 
+mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
+mslpthksfcdev="gdas_sfc_mslp_thk_sh_anl_${cyc}.gif"
 
-  mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gdas_sfc_mslp_thk_sh_anl_${cyc}.gif"
-
-  hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp500dev="gdas_500_hgt_tmp_sh_anl_${cyc}.gif"
+hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp500dev="gdas_500_hgt_tmp_sh_anl_${cyc}.gif"
 
-  hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso300dev="gdas_300_hgt_iso_sh_anl_${cyc}.gif"
+hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso300dev="gdas_300_hgt_iso_sh_anl_${cyc}.gif"
 
-  hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso250dev="gdas_250_hgt_iso_sh_anl_${cyc}.gif"
+hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso250dev="gdas_250_hgt_iso_sh_anl_${cyc}.gif"
 
 
 #  Execute the GEMPAK program
 
-$GEMEXE/gdplot2_gif << EOF
+"${GEMEXE}/gdplot2_gif" << EOF
 
 
 ! ANALYSIS MSLP/1000-500 THICKNESS
 
-  restore $NTS/base_sh.nts
-  restore $NTS/sfc_mslp_thk.nts
+  restore ${NTS}/base_sh.nts
+  restore ${NTS}/sfc_mslp_thk.nts
 
   CLEAR   = yes
   GDFILE  = ${gdfile}
   GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
+  DEVICE  = gif | ${mslpthksfcdev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATSOUTH
+  LATLON  = ${LATSOUTH}
   l
   r
 
@@ -371,18 +349,18 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 500MB ANALYSIS  HEIGHTS/TEMPERATURES
 
-  restore $NTS/base_sh.nts
-  restore $NTS/500_hgt_tmp.nts
+  restore ${NTS}/base_sh.nts
+  restore ${NTS}/500_hgt_tmp.nts
 
 
   CLEAR   = yes
   GDFILE  = ${gdfile}
   GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgttmp500dev} | $pixels
+  DEVICE  = gif | ${hgttmp500dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATSOUTH
+  LATLON  = ${LATSOUTH}
   l
   r
 
@@ -401,23 +379,23 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 300MB HEIGHTS/ISOTACHS
 
-  restore $NTS/base_sh.nts
-  restore $NTS/300_hgt_iso.nts
+  restore ${NTS}/base_sh.nts
+  restore ${NTS}/300_hgt_iso.nts
 
   CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgtiso300dev} | $pixels
+  DEVICE  = gif | ${hgtiso300dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATSOUTH               !
+  LATLON  = ${LATSOUTH}               !
   l
   r
 
   CLEAR   = no
   GDPFUN  =
-  TITLE   = 1/-4/$TITLE
+  TITLE   = 1/-4/${TITLE}
   TEXT    = 2/3/2/sw
   LATLON  = 0
   l
@@ -430,17 +408,17 @@ $GEMEXE/gdplot2_gif << EOF
 
 ! 250MB ANALYSIS HEIGHTS/ISOTACHS
 
-  restore $NTS/base_sh.nts
-  restore $NTS/250_hgt_iso.nts
+  restore ${NTS}/base_sh.nts
+  restore ${NTS}/250_hgt_iso.nts
 
   CLEAR   = yes
   GDFILE  = ${gdfile}
   GDATTIM = ${gdattim}
   MAP     = 1
-  DEVICE  = gif | ${hgtiso250dev} | $pixels
+  DEVICE  = gif | ${hgtiso250dev} | ${pixels}
   TITLE   =
   TEXT    = 1/3/2/sw
-  LATLON  = $LATSOUTH
+  LATLON  = ${LATSOUTH}
   l
   r
 
@@ -461,35 +439,22 @@ $GEMEXE/gdplot2_gif << EOF
 EOF
 
 
-$GEMEXE/gpend
-
+"${GEMEXE}/gpend"
 
-if [ $SENDCOM = YES ]; then
 
 # Copy the GIF images into my area
-
-  cp ${mslpthksfcdev}   $COMOUTncdc/.
-  cp ${hgttmp500dev}    $COMOUTncdc/.
-  cp ${hgtiso300dev}    $COMOUTncdc/.
-  cp ${hgtiso250dev}    $COMOUTncdc/.
-
+cp "${mslpthksfcdev}"   "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgttmp500dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgtiso300dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
+cp "${hgtiso250dev}"    "${COM_ATMOS_GEMPAK_GIF}/."
 
 # Copy the GIF images onto the NCDC area on the public ftp server
 
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgttmp500dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgtiso300dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} $COMOUTncdc/${hgtiso250dev}
-
- fi
-
+if [[ ${SENDDBN} == YES ]]; then
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${mslpthksfcdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp500dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso300dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso250dev}"
 fi
 
-
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
+exit
diff --git a/gempak/ush/gempak_gfs_f000_gif.sh b/gempak/ush/gempak_gfs_f000_gif.sh
new file mode 100755
index 0000000000..6a709fcc16
--- /dev/null
+++ b/gempak/ush/gempak_gfs_f000_gif.sh
@@ -0,0 +1,584 @@
+#! /usr/bin/env bash
+
+#########################################################################
+#
+#   Script:  gempak_gfs_f00_gif.sh
+#
+#   This scripts creates GEMPAK .gif images of 00HR/Analysis fields from
+#   GFS model output for archiving at NCDC.
+#
+#
+#   History:   Ralph Jones     02/16/2005   JIF original version.
+#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
+#                                           from .8 to a larger size (1 or 2)
+#
+#
+#########################################################################
+
+LATVAL="1/1/1/1/5;5"
+pixels="1728;1472"
+cp "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp
+
+#################################################################
+#                       ANALYSIS CHARTS                         #
+#################################################################
+
+
+# Create time stamp (bottom) label
+
+echo "0000${PDY}${cyc}" > dates
+export FORT55="title.output"
+"${HOMEgfs}/exec/webtitle.x" < dates
+TITLE="$(cat title.output)"
+echo "TITLE = ${TITLE}"
+
+# Define labels and file names for analysis charts
+
+hgttmp700lab="700MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp700dev="gfs_700_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp500dev="gfs_500_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso300dev="gfs_300_hgt_iso_nh_anl_${cyc}.gif"
+
+hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso250dev="gfs_250_hgt_iso_nh_anl_${cyc}.gif"
+
+hgttmp250lab="250MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp250dev="gfs_250_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgtiso200lab="200MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso200dev="gfs_200_hgt_iso_nh_anl_${cyc}.gif"
+
+# Not being used?
+# hgttmp200lab="200MB ANALYSIS  HEIGHTS/TEMPERATURE"
+# hgttmp200dev="gfs_200_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgtiso100lab="100MB ANALYSIS  HEIGHTS/ISOTACHS"
+hgtiso100dev="gfs_100_hgt_iso_nh_anl_${cyc}.gif"
+
+hgttmp100lab="100MB ANALYSIS  HEIGHTS/TEMPERATURE"
+hgttmp100dev="gfs_100_hgt_tmp_nh_anl_${cyc}.gif"
+
+hgtvor500lab="500MB ANALYSIS  HEIGHTS/VORTICITY"
+hgtvor500dev="gfs_500_hgt_vor_nh_anl_${cyc}.gif"
+
+hgtvor500usdev="gfs_500_hgt_vor_uscan_anl_${cyc}.gif"
+
+mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
+mslpthksfcdev="gfs_sfc_mslp_thk_nh_anl_${cyc}.gif"
+
+mslpthksfcusdev="gfs_sfc_mslp_thk_uscan_anl_${cyc}.gif"
+
+rhvvel700lab="700MB ANALYSIS  RH/VERT VEL"
+rhvvel700dev="gfs_700_rh_vvel_nh_anl_${cyc}.gif"
+
+liftlab="ANALYSIS  LIFTED INDEX"
+liftdev="gfs_lift_nh_anl_${cyc}.gif"
+
+prswshtroplab="TROPOPAUSE PRESSURE/WIND SHEAR"
+prswshtropdev="gfs_trop_prs_wsh_nh_anl_${cyc}.gif"
+
+# Set grid date and input file name
+
+gdattim=${PDY:2:6}/${cyc}00F000
+gdfile=gem_grids${fhr3}.gem
+
+#  Execute the GEMPAK program
+
+"${GEMEXE}/gdplot2_gif" << EOF
+
+! 700MB HEIGHTS/TEMPERATURES
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/700_hgt_tmp.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgttmp700dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgttmp700lab}
+  l
+  r
+
+
+! 500MB HEIGHTS/TEMPERATURES
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/500_hgt_tmp.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgttmp500dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgttmp500lab}
+  l
+  r
+
+
+! 300MB HEIGHTS/ISOTACHS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/300_hgt_iso.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtiso300dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = 1/1/1/1/5;5             !
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtiso300lab}
+  l
+  r
+
+
+! 250MB HEIGHTS/TEMPERATURES
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/250_hgt_tmp.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgttmp250dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgttmp250lab}
+  l
+  r
+
+
+! 250MB ANALYSIS HEIGHTS/ISOTACHS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/250_hgt_iso.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtiso250dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  MAP     = 0
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtiso250lab}
+  l
+  r
+
+
+! 200MB HEIGHTS/ISOTACHS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/200_hgt_iso.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtiso200dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = 1/1/1/1/5;5             !
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtiso200lab}
+  l
+  r
+
+
+! 100MB HEIGHTS/TEMPERATURES
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/100_hgt_tmp.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgttmp100dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgttmp100lab}
+  l
+  r
+
+
+! 100MB HEIGHTS/ISOTACHS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/100_hgt_iso.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtiso100dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = 1/1/1/1/5;5             !
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtiso100lab}
+  l
+  r
+
+
+! ANALYSIS MSLP/1000-500 THICKNESS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/sfc_mslp_thk.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${mslpthksfcdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  MAP     = 0
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${mslpthksfclab}
+  l
+  r
+
+
+! ANALYSIS MSLP/1000-500 THICKNESS (US/CANADA)
+
+  restore ${NTS}/base_uscan.nts
+  restore ${NTS}/sfc_mslp_thk.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${mslpthksfcusdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  MAP     = 0
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${mslpthksfclab}
+  l
+  r
+
+! 500MB ANALYSIS  HEIGHTS/VORTICITY
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/500_hgt_vor.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtvor500dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  MAP     = 0
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtvor500lab}
+  l
+  r
+
+! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
+
+  restore ${NTS}/base_uscan.nts
+  restore ${NTS}/500_hgt_vor.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtvor500usdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+
+  TITLE   = 1/3/${hgtvor500lab}
+  l
+  r
+
+
+! ANALYSIS  LIFTED INDEX
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/100_lift.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${liftdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${liftlab}
+  l
+  r
+
+
+! ANALYSIS  TROPOPAUSE PRESSURE/WIND SHEAR
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/trop_pres_wshr.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${prswshtropdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${prswshtroplab}
+  l
+  r
+
+
+! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/700_rel_vvel.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${rhvvel700dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${rhvvel700lab}
+  l
+  r
+
+  exit
+EOF
+
+
+"${GEMEXE}/gpend"
+
+
+# Copy the GIF images into my area
+cp "${hgttmp700dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgttmp500dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtiso300dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtiso250dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgttmp250dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtiso200dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtiso100dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgttmp100dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${mslpthksfcdev}"   "${COM_ATMOS_GEMPAK_GIF}"
+cp "${mslpthksfcusdev}" "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtvor500dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtvor500usdev}"  "${COM_ATMOS_GEMPAK_GIF}"
+cp "${liftdev}"         "${COM_ATMOS_GEMPAK_GIF}"
+cp "${prswshtropdev}"   "${COM_ATMOS_GEMPAK_GIF}"
+cp "${rhvvel700dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+
+# Copy the GIF images onto the NCDC area on the public ftp server
+
+if [[ "${SENDDBN}" == "YES" ]]; then
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp700dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp500dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso300dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso250dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp250dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso200dev}"
+# "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp200dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtiso100dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgttmp100dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${mslpthksfcdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${mslpthksfcusdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtvor500dev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtvor500usdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${liftdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${prswshtropdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${rhvvel700dev}"
+
+
+
+fi
+
+# Convert the 500mb NH Hgts/Temps chart to tif, attach a heading and
+#   send to TOC via the NTC
+
+export input=${COM_ATMOS_GEMPAK_GIF}/${hgttmp500dev}
+export HEADER=YES
+export OUTPATH=${DATA}/gfs_500_hgt_tmp_nh_anl_${cyc}.tif
+"${USHgfs}/make_tif.sh"
+
+exit
diff --git a/gempak/ush/gempak_gfs_f00_gif.sh b/gempak/ush/gempak_gfs_f00_gif.sh
deleted file mode 100755
index 2a7cca5c9f..0000000000
--- a/gempak/ush/gempak_gfs_f00_gif.sh
+++ /dev/null
@@ -1,602 +0,0 @@
-#!/bin/sh
-
-#########################################################################
-#
-#   Script:  gempak_gfs_f00_gif.sh
-#
-#   This scripts creates GEMPAK .gif images of 00HR/Analysis fields from
-#   GFS model output for archiving at NCDC.
-#
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
-#                                           from .8 to a larger size (1 or 2)
-#
-#
-#########################################################################
-
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-  set -x
-
-  MAPAREA="normal"
-
-  LATVAL="1/1/1/1/5;5"
-
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
-
-#################################################################
-#                       ANALYSIS CHARTS                         # 
-#################################################################
-
-
-# Create time stamp (bottom) label 
-
-  echo 0000${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
-  ${UTILgfs}/exec/webtitle < dates
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
-
-# Define labels and file names for analysis charts
-
-  hgttmp700lab="700MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp700dev="gfs_700_hgt_tmp_nh_anl_${cyc}.gif"
-
-  hgttmp500lab="500MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp500dev="gfs_500_hgt_tmp_nh_anl_${cyc}.gif"
-
-  hgtiso500lab="500MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso500dev="gfs_500_hgt_iso_nh_anl_${cyc}.gif"
-
-  hgtiso300lab="300MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso300dev="gfs_300_hgt_iso_nh_anl_${cyc}.gif"
-
-  hgtiso250lab="250MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso250dev="gfs_250_hgt_iso_nh_anl_${cyc}.gif"
-
-  hgttmp250lab="250MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp250dev="gfs_250_hgt_tmp_nh_anl_${cyc}.gif"
-
-  hgtiso200lab="200MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso200dev="gfs_200_hgt_iso_nh_anl_${cyc}.gif"
-
-  hgttmp200lab="200MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp200dev="gfs_200_hgt_tmp_nh_anl_${cyc}.gif"
-
-  hgtiso100lab="100MB ANALYSIS  HEIGHTS/ISOTACHS"
-  hgtiso100dev="gfs_100_hgt_iso_nh_anl_${cyc}.gif"
-
-  hgttmp100lab="100MB ANALYSIS  HEIGHTS/TEMPERATURE"
-  hgttmp100dev="gfs_100_hgt_tmp_nh_anl_${cyc}.gif"
-
-  hgtvor500lab="500MB ANALYSIS  HEIGHTS/VORTICITY"
-  hgtvor500dev="gfs_500_hgt_vor_nh_anl_${cyc}.gif"
-
-  hgtvor500usdev="gfs_500_hgt_vor_uscan_anl_${cyc}.gif"
-
-  mslpthksfclab="ANALYSIS  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gfs_sfc_mslp_thk_nh_anl_${cyc}.gif"
- 
-  mslpthksfcusdev="gfs_sfc_mslp_thk_uscan_anl_${cyc}.gif"
-
-  rhvvel700lab="700MB ANALYSIS  RH/VERT VEL"
-  rhvvel700dev="gfs_700_rh_vvel_nh_anl_${cyc}.gif"
-
-  liftlab="ANALYSIS  LIFTED INDEX"
-  liftdev="gfs_lift_nh_anl_${cyc}.gif"
-
-  prswshtroplab="TROPOPAUSE PRESSURE/WIND SHEAR"
-  prswshtropdev="gfs_trop_prs_wsh_nh_anl_${cyc}.gif"
-
-# Set grid date and input file name
-
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F000
-  gdfile=gem_grids${fhr}.gem
-
-#  Execute the GEMPAK program
-
-  $GEMEXE/gdplot2_gif << EOF
-
-! 700MB HEIGHTS/TEMPERATURES
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_hgt_tmp.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgttmp700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgttmp700lab}
-  l
-  r
-
-
-! 500MB HEIGHTS/TEMPERATURES
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_tmp.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgttmp500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgttmp500lab}
-  l
-  r
-
-
-! 300MB HEIGHTS/ISOTACHS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/300_hgt_iso.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgtiso300dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = 1/1/1/1/5;5             !
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtiso300lab}
-  l
-  r
-
-
-! 250MB HEIGHTS/TEMPERATURES
-
-  restore $NTS/base_nh.nts
-  restore $NTS/250_hgt_tmp.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgttmp250dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgttmp250lab}
-  l
-  r
-
-
-! 250MB ANALYSIS HEIGHTS/ISOTACHS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/250_hgt_iso.nts
-
-  CLEAR   = yes 
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1                       
-  DEVICE  = gif | ${hgtiso250dev} | $pixels 
-  TITLE   = 
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL 
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtiso250lab}
-  l
-  r
-
-
-! 200MB HEIGHTS/ISOTACHS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/200_hgt_iso.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgtiso200dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = 1/1/1/1/5;5             !
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtiso200lab}
-  l
-  r
-
-
-! 100MB HEIGHTS/TEMPERATURES
-
-  restore $NTS/base_nh.nts
-  restore $NTS/100_hgt_tmp.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgttmp100dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgttmp100lab}
-  l
-  r
-
-
-! 100MB HEIGHTS/ISOTACHS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/100_hgt_iso.nts
-
-  CLEAR   = yes
-  GDFILE  = $gdfile
-  GDATTIM = $gdattim
-  MAP     = 1
-  DEVICE  = gif | ${hgtiso100dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = 1/1/1/1/5;5             !
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/$TITLE
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtiso100lab}
-  l
-  r
-
-
-! ANALYSIS MSLP/1000-500 THICKNESS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-
-! ANALYSIS MSLP/1000-500 THICKNESS (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcusdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500usdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! ANALYSIS  LIFTED INDEX
-
-  restore $NTS/base_nh.nts
-  restore $NTS/100_lift.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${liftdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${liftlab}
-  l
-  r
-
-
-! ANALYSIS  TROPOPAUSE PRESSURE/WIND SHEAR
-
-  restore $NTS/base_nh.nts
-  restore $NTS/trop_pres_wshr.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${prswshtropdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${prswshtroplab}
-  l
-  r
-
-
-! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_rel_vvel.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${rhvvel700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${rhvvel700lab}
-  l
-  r
-
-  exit
-EOF
-
-
-$GEMEXE/gpend
-
-
-if [ $SENDCOM = YES ]; then
-
-# Copy the GIF images into my area
-
-  cp ${hgttmp700dev}    ${COMOUT}
-  cp ${hgttmp500dev}    ${COMOUT}
-  cp ${hgtiso300dev}    ${COMOUT}
-  cp ${hgtiso250dev}    ${COMOUT}
-  cp ${hgttmp250dev}    ${COMOUT}
-  cp ${hgtiso200dev}    ${COMOUT}
-  cp ${hgtiso100dev}    ${COMOUT}
-  cp ${hgttmp100dev}    ${COMOUT}
-  cp ${mslpthksfcdev}   ${COMOUT}
-  cp ${mslpthksfcusdev} ${COMOUT}
-  cp ${hgtvor500dev}    ${COMOUT}
-  cp ${hgtvor500usdev}  ${COMOUT}
-  cp ${liftdev}         ${COMOUT}
-  cp ${prswshtropdev}   ${COMOUT}
-  cp ${rhvvel700dev}    ${COMOUT}
-
-# Copy the GIF images onto the NCDC area on the public ftp server
-
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgttmp700dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgttmp500dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtiso300dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtiso250dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgttmp250dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtiso200dev}
-# $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgttmp200dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtiso100dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgttmp100dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcusdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500dev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500usdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${liftdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${prswshtropdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${rhvvel700dev}
-
-
-# Convert the 500mb NH Hgts/Temps chart to tif, attach a heading and 
-#   send to TOC via the NTC
-
- fi
-  export input=${COMOUT}/${hgttmp500dev}
-  export HEADER=YES
-  export OUTPATH=$DATA/gfs_500_hgt_tmp_nh_anl_${cyc}.tif
-  ${USHgfs}/make_tif.sh
-fi 
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
diff --git a/gempak/ush/gempak_gfs_f12_gif.sh b/gempak/ush/gempak_gfs_f12_gif.sh
deleted file mode 100755
index 611252a2e2..0000000000
--- a/gempak/ush/gempak_gfs_f12_gif.sh
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/bin/sh
-
-#########################################################################
-#
-#   Script:  gempak_gfs_f12_gif.sh
-#
-#   This scripts creates GEMPAK .gif images of 12HR forecast fields from
-#   GFS model output for archiving at NCDC.
-#
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
-#                                           from .8 to a larger size (1 or 2)
-#
-#
-#########################################################################
-
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-  set -x
-
-  MAPAREA="normal"
-
-  LATVAL="1/1/1/1/5;5"
-
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
- 
-##########################################################
-#                12HR FORECAST CHARTS                    #
-##########################################################
-
-
-# Create time stamp (bottom) label
-
-  echo 00${fhr}${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
- ${UTILgfs}/exec/webtitle < dates
-
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
-
-
-# Define labels and file names for 12hr forecast charts
-
-  hgtvor500lab="500MB ${fhr}HR FORECAST  HEIGHTS/VORTICITY"
-  hgtvor500dev="gfs_500_hgt_vor_nh_f${fhr}_${cyc}.gif"
-
-  hgtvor500usdev="gfs_500_hgt_vor_uscan_f${fhr}_${cyc}.gif"
-
-  mslpthksfclab="${fhr}HR FORECAST  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gfs_sfc_mslp_thk_nh_f${fhr}_${cyc}.gif"
-
-  rhvvel700lab="700MB ${fhr}HR FORECAST  RH/VERT VEL"
-  rhvvel700dev="gfs_700_rh_vvel_nh_f${fhr}_${cyc}.gif"
-
-# Set grid date and input file name
-
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F0${fhr}
-  gdfile=gem_grids${fhr}.gem
-
-#  Execute the GEMPAK program
-
-  $GEMEXE/gdplot2_gif << EOF
-
-! ANALYSIS MSLP/1000-500 THICKNESS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500usdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_rel_vvel.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${rhvvel700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${rhvvel700lab}
-  l
-  r
-
-  exit
-EOF
-
-$GEMEXE/gpend
-
-if [ $SENDCOM = YES ]; then
-
-# Copy the GIF images into my area
-
-  cp ${mslpthksfcdev}   ${COMOUT}
-  cp ${hgtvor500dev}    ${COMOUT}
-  cp ${hgtvor500usdev}  ${COMOUT}
-  cp ${rhvvel700dev}    ${COMOUT}
-
-# Copy the GIF images onto the NCDC area on the public ftp server
-
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500dev}
-#  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500usdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${rhvvel700dev}
-
- fi
-
-fi
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
diff --git a/gempak/ush/gempak_gfs_f24_gif.sh b/gempak/ush/gempak_gfs_f24_gif.sh
deleted file mode 100755
index 53670a29bd..0000000000
--- a/gempak/ush/gempak_gfs_f24_gif.sh
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/bin/sh
-
-
-#########################################################################
-#
-#   Script:  gempak_gfs_f24_gif.sh
-#
-#   This scripts creates GEMPAK .gif images of 24HR forecast fields from
-#   GFS model output for archiving at NCDC.
-#
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
-#                                           from .8 to a larger size (1 or 2)
-#
-#
-#########################################################################
-
-
-
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-
-
-  set -x
-
-
-  MAPAREA="normal"
-
-  LATVAL="1/1/1/1/5;5"
-
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
-
-
- 
-##########################################################
-#                24HR FORECAST CHARTS                    #
-##########################################################
-
-
-# Create time stamp (bottom) label
-
-  echo 00${fhr}${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
- ${UTILgfs}/exec/webtitle < dates
-
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
-
-
-# Define labels and file names for 24hr forecast charts
-
-  hgtvor500lab="500MB ${fhr}HR FORECAST  HEIGHTS/VORTICITY"
-  hgtvor500dev="gfs_500_hgt_vor_nh_f${fhr}_${cyc}.gif"
-
-  hgtvor500usdev="gfs_500_hgt_vor_uscan_f${fhr}_${cyc}.gif"
-
-  mslpthksfclab="${fhr}HR FORECAST  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gfs_sfc_mslp_thk_nh_f${fhr}_${cyc}.gif"
-
-  rhvvel700lab="700MB ${fhr}HR FORECAST  RH/VERT VEL"
-  rhvvel700dev="gfs_700_rh_vvel_nh_f${fhr}_${cyc}.gif"
-
-
-# Set grid date and input file name
-
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F0${fhr}
-  gdfile=gem_grids${fhr}.gem
-
-
-
-#  Execute the GEMPAK program
-
-  $GEMEXE/gdplot2_gif << EOF
-
-
-! ANALYSIS MSLP/1000-500 THICKNESS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500usdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_rel_vvel.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${rhvvel700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${rhvvel700lab}
-  l
-  r
-
-  exit
-EOF
-
-
-$GEMEXE/gpend
-
-
-if [ $SENDCOM = YES ]; then
-
-# Copy the GIF images into my area
-
-  cp ${mslpthksfcdev}   ${COMOUT}
-  cp ${hgtvor500dev}    ${COMOUT}
-  cp ${hgtvor500usdev}  ${COMOUT}
-  cp ${rhvvel700dev}    ${COMOUT}
-
-
-# Copy the GIF images onto the NCDC area on the public ftp server
-
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500dev}
-#  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500usdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${rhvvel700dev}
-
- fi
-
-fi
-
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
diff --git a/gempak/ush/gempak_gfs_f36_gif.sh b/gempak/ush/gempak_gfs_f36_gif.sh
deleted file mode 100755
index e1999090c0..0000000000
--- a/gempak/ush/gempak_gfs_f36_gif.sh
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/bin/sh
-
-
-#########################################################################
-#
-#   Script:  gempak_gfs_f36_gif.sh
-#
-#   This scripts creates GEMPAK .gif images of 36HR forecast fields from
-#   GFS model output for archiving at NCDC.
-#
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
-#                                           from .8 to a larger size (1 or 2)
-#
-#
-#########################################################################
-
-
-
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-
-  set -x
-
-
-  MAPAREA="normal"
-
-  LATVAL="1/1/1/1/5;5"
-
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
-
-
- 
-##########################################################
-#                36HR FORECAST CHARTS                    #
-##########################################################
-
-
-# Create time stamp (bottom) label
-
-  echo 00${fhr}${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
- ${UTILgfs}/exec/webtitle < dates
-
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
-
-
-# Define labels and file names for 36hr forecast charts
-
-  hgtvor500lab="500MB ${fhr}HR FORECAST  HEIGHTS/VORTICITY"
-  hgtvor500dev="gfs_500_hgt_vor_nh_f${fhr}_${cyc}.gif"
-
-  hgtvor500usdev="gfs_500_hgt_vor_uscan_f${fhr}_${cyc}.gif"
-
-  mslpthksfclab="${fhr}HR FORECAST  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gfs_sfc_mslp_thk_nh_f${fhr}_${cyc}.gif"
-
-  rhvvel700lab="700MB ${fhr}HR FORECAST  RH/VERT VEL"
-  rhvvel700dev="gfs_700_rh_vvel_nh_f${fhr}_${cyc}.gif"
-
-
-# Set grid date and input file name
-
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F0${fhr}
-  gdfile=gem_grids${fhr}.gem
-
-
-
-#  Execute the GEMPAK program
-
-  $GEMEXE/gdplot2_gif << EOF
-
-
-! ANALYSIS MSLP/1000-500 THICKNESS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500usdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_rel_vvel.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${rhvvel700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${rhvvel700lab}
-  l
-  r
-
-  exit
-EOF
-
-
-$GEMEXE/gpend
-
-
-if [ $SENDCOM = YES ]; then
-
-# Copy the GIF images into my area
-
-  cp ${mslpthksfcdev}   ${COMOUT}
-  cp ${hgtvor500dev}    ${COMOUT}
-  cp ${hgtvor500usdev}  ${COMOUT}
-  cp ${rhvvel700dev}    ${COMOUT}
-
-
-# Copy the GIF images onto the NCDC area on the public ftp server
-
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500dev}
-#  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500usdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${rhvvel700dev}
-
- fi
-
-fi
-
-
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
diff --git a/gempak/ush/gempak_gfs_f48_gif.sh b/gempak/ush/gempak_gfs_f48_gif.sh
deleted file mode 100755
index 1e0ba532fd..0000000000
--- a/gempak/ush/gempak_gfs_f48_gif.sh
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/bin/sh
-
-
-#########################################################################
-#
-#   Script:  gempak_gfs_f48_gif.sh
-#
-#   This scripts creates GEMPAK .gif images of 48HR forecast fields from
-#   GFS model output for archiving at NCDC.
-#
-#
-#   History:   Ralph Jones     02/16/2005   JIF original version.
-#   History:   Steve Lilly     04/30/2008   Change font size of the Titles
-#                                           from .8 to a larger size (1 or 2)
-#
-#
-#########################################################################
-
-
-
-   msg=" Make GEMPAK GIFS utility"
-   postmsg "$jlogfile" "$msg"
-
-
-  set -x
-
-
-  MAPAREA="normal"
-
-  LATVAL="1/1/1/1/5;5"
-
-  pixels="1728;1472"
-
-  cp $FIXgempak/coltbl.spc coltbl.xwp
-
-
- 
-##########################################################
-#                48HR FORECAST CHARTS                    #
-##########################################################
-
-
-# Create time stamp (bottom) label
-
-  echo 00${fhr}${PDY}${cyc} > dates
-  export FORT55="title.output"
-#  $WEBTITLE < dates
- ${UTILgfs}/exec/webtitle < dates
-
-  export TITLE=$(cat title.output)
-  echo "\n\n TITLE = $TITLE \n"
-
-
-# Define labels and file names for 48hr forecast charts
-
-  hgtvor500lab="500MB ${fhr}HR FORECAST  HEIGHTS/VORTICITY"
-  hgtvor500dev="gfs_500_hgt_vor_nh_f${fhr}_${cyc}.gif"
-
-  hgtvor500usdev="gfs_500_hgt_vor_uscan_f${fhr}_${cyc}.gif"
-
-  mslpthksfclab="${fhr}HR FORECAST  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
-  mslpthksfcdev="gfs_sfc_mslp_thk_nh_f${fhr}_${cyc}.gif"
-
-  rhvvel700lab="700MB ${fhr}HR FORECAST  RH/VERT VEL"
-  rhvvel700dev="gfs_700_rh_vvel_nh_f${fhr}_${cyc}.gif"
-
-
-# Set grid date and input file name
-
-  gdattim=$(echo ${PDY} | cut -c3-8)/${cyc}00F0${fhr}
-  gdfile=gem_grids${fhr}.gem
-
-
-
-#  Execute the GEMPAK program
-
-  $GEMEXE/gdplot2_gif << EOF
-
-
-! ANALYSIS MSLP/1000-500 THICKNESS
-
-  restore $NTS/base_nh.nts
-  restore $NTS/sfc_mslp_thk.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${mslpthksfcdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  MAP     = 0
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${mslpthksfclab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
-
-  restore $NTS/base_uscan.nts
-  restore $NTS/500_hgt_vor.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${hgtvor500usdev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${hgtvor500lab}
-  l
-  r
-
-
-! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
-
-  restore $NTS/base_nh.nts
-  restore $NTS/700_rel_vvel.nts
-
-  CLEAR   = yes
-  GDFILE  = ${gdfile}
-  GDATTIM = ${gdattim}
-  MAP     = 1
-  DEVICE  = gif | ${rhvvel700dev} | $pixels
-  TITLE   =
-  TEXT    = 1/2/2/c/sw 
-  LATLON  = $LATVAL
-  l
-  r
-
-  CLEAR   = no
-  GDPFUN  =
-  TITLE   = 1/-4/${TITLE}
-  TEXT    = 2/2/2/c/sw 
-  LATLON  = 0
-  l
-  r
-
-  TITLE   = 1/3/${rhvvel700lab}
-  l
-  r
-
-  exit
-EOF
-
-
-$GEMEXE/gpend
-
-
-if [ $SENDCOM = YES ]; then
-
-# Copy the GIF images into my area
-
-  cp ${mslpthksfcdev}   ${COMOUT}
-  cp ${hgtvor500dev}    ${COMOUT}
-  cp ${hgtvor500usdev}  ${COMOUT}
-  cp ${rhvvel700dev}    ${COMOUT}
-
-
-# Copy the GIF images onto the NCDC area on the public ftp server
-
- if [ $SENDDBN = YES ]; then
-
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${mslpthksfcdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500dev}
-#  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${hgtvor500usdev}
-  $DBNROOT/bin/dbn_alert MODEL NCDCGIF ${job} ${COMOUT}/${rhvvel700dev}
-
- fi
-
-fi
-
-
-
-   msg=" GEMPAK_GIF ${fhr} hour completed normally"
-   postmsg "$jlogfile" "$msg"
-
-   exit
diff --git a/gempak/ush/gempak_gfs_fhhh_gif.sh b/gempak/ush/gempak_gfs_fhhh_gif.sh
new file mode 100755
index 0000000000..33f5764068
--- /dev/null
+++ b/gempak/ush/gempak_gfs_fhhh_gif.sh
@@ -0,0 +1,189 @@
+#! /usr/bin/env bash
+
+#########################################################################
+#
+#   This scripts creates GEMPAK .gif images of forecast fields from
+#   GFS model output for archiving at NCDC.
+#
+#########################################################################
+
+LATVAL="1/1/1/1/5;5"
+pixels="1728;1472"
+cp "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp
+
+##########################################################
+#                   FORECAST CHARTS                      #
+##########################################################
+
+
+# Create time stamp (bottom) label
+
+echo "0${fhr3}${PDY}${cyc}" > dates
+export FORT55="title.output"
+"${HOMEgfs}/exec/webtitle.x" < dates
+
+TITLE="$(cat title.output)"
+echo "TITLE = ${TITLE}"
+
+# Define labels and file names for forecast charts
+hgtvor500lab="500MB ${fhr3}HR FORECAST  HEIGHTS/VORTICITY"
+hgtvor500dev="gfs_500_hgt_vor_nh_f${fhr3}_${cyc}.gif"
+
+hgtvor500usdev="gfs_500_hgt_vor_uscan_f${fhr3}_${cyc}.gif"
+
+mslpthksfclab="${fhr3}HR FORECAST  MEAN SEA LEVEL PRESSURE/1000-500MB THICKNESS"
+mslpthksfcdev="gfs_sfc_mslp_thk_nh_f${fhr3}_${cyc}.gif"
+
+rhvvel700lab="700MB ${fhr3}HR FORECAST  RH/VERT VEL"
+rhvvel700dev="gfs_700_rh_vvel_nh_f${fhr3}_${cyc}.gif"
+
+
+# Set grid date and input file name
+gdattim="${PDY:2:6}/${cyc}00F${fhr3}"
+gdfile=gem_grids${fhr3}.gem
+
+#  Execute the GEMPAK program
+
+"${GEMEXE}/gdplot2_gif" << EOF
+
+
+! ANALYSIS MSLP/1000-500 THICKNESS
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/sfc_mslp_thk.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${mslpthksfcdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  MAP     = 0
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${mslpthksfclab}
+  l
+  r
+
+
+! 500MB ANALYSIS  HEIGHTS/VORTICITY
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/500_hgt_vor.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtvor500dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtvor500lab}
+  l
+  r
+
+
+! 500MB ANALYSIS  HEIGHTS/VORTICITY (US/CANADA)
+
+  restore ${NTS}/base_uscan.nts
+  restore ${NTS}/500_hgt_vor.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${hgtvor500usdev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${hgtvor500lab}
+  l
+  r
+
+
+! ANALYSIS 700MB RELATIVE HUMIDITY AND VERTICAL VELOCITY
+
+  restore ${NTS}/base_nh.nts
+  restore ${NTS}/700_rel_vvel.nts
+
+  CLEAR   = yes
+  GDFILE  = ${gdfile}
+  GDATTIM = ${gdattim}
+  MAP     = 1
+  DEVICE  = gif | ${rhvvel700dev} | ${pixels}
+  TITLE   =
+  TEXT    = 1/2/2/c/sw
+  LATLON  = ${LATVAL}
+  l
+  r
+
+  CLEAR   = no
+  GDPFUN  =
+  TITLE   = 1/-4/${TITLE}
+  TEXT    = 2/2/2/c/sw
+  LATLON  = 0
+  l
+  r
+
+  TITLE   = 1/3/${rhvvel700lab}
+  l
+  r
+
+  exit
+EOF
+
+"${GEMEXE}/gpend"
+
+# Copy the GIF images into my area
+
+cp "${mslpthksfcdev}"   "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtvor500dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+cp "${hgtvor500usdev}"  "${COM_ATMOS_GEMPAK_GIF}"
+cp "${rhvvel700dev}"    "${COM_ATMOS_GEMPAK_GIF}"
+
+# Copy the GIF images onto the NCDC area on the public ftp server
+
+if [[ "${SENDDBN}" == YES ]]; then
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${mslpthksfcdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtvor500dev}"
+  #  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${hgtvor500usdev}"
+  "${DBNROOT}/bin/dbn_alert" MODEL NCDCGIF "${job}" "${COM_ATMOS_GEMPAK_GIF}/${rhvvel700dev}"
+fi
+
+echo "GEMPAK_GIF ${fhr3} hour completed normally"
+
+exit
diff --git a/gempak/ush/gfs_meta_ak.sh b/gempak/ush/gfs_meta_ak.sh
index c258b7e83a..00517e2b19 100755
--- a/gempak/ush/gfs_meta_ak.sh
+++ b/gempak/ush/gfs_meta_ak.sh
@@ -1,48 +1,41 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_ak.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# D.W.Plummer/NCEP   3/97   Added ecmwf comparison.
-# D.W.Plummer/NCEP   3/97   Added $MAPFIL specification for lower resolution
-# D.W.Plummer/NCEP   4/97   Removed run from 3-HOURLY PRECIP
-# J. Carr/HPC        2/99   Changed skip to 0
-# B. Gordon/NCO      5/00   Modified for production on IBM-SP
-#                           Changed gdplot_nc -> gdplot2_nc
-# D. Michaud/NCO     4/01   Modified to Reflect Different Title for
-#                           Parallel runs
-# J. Carr/PMB       11/04   Added a ? to all title lines
-#                           Changed contur from a 1 to a 2.
-# M. Klein/HPC       6/07   Modify for Alaska medium-range desk and rename script.
-#
 
-cd $DATA
+source "${HOMEgfs}/ush/preamble.sh"
 
-set -xa
+cd "${DATA}" || exit 2
 
-rm -rf $DATA/ak
-mkdir -p -m 775 $DATA/ak
-cd $DATA/ak
-cp $FIXgempak/datatype.tbl datatype.tbl
+rm -rf "${DATA}/ak"
+mkdir -p -m 775 "${DATA}/ak"
+cd "${DATA}/ak" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 device="nc | gfs.meta.ak"
-PDY2=$(echo $PDY | cut -c3-)
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 fend=F216
 
-if [ "$envir" = "para" ] ; then
+if [[ "${envir}" == "para" ]] ; then
    export m_title="GFSP"
 else
    export m_title="GFS"
 fi
 
 export pgm=gdplot2_nc;. prep_step
-startmsg
 
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-GFS | ${PDY2}/${cyc}00
-GDATTIM	= F00-$fend-6
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-GFS | ${PDY:2}/${cyc}00
+GDATTIM	= F00-${fend}-6
 DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
@@ -71,13 +64,13 @@ HLSYM   = 2;1.5//21//hw
 CLRBAR  = 1
 WIND    =
 REFVEC  =
-TITLE	= 5/-2/~ ? $m_title PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
+TITLE	= 5/-2/~ ? ${m_title} PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
 l
 run
 
 GLEVEL  = 4400:10000    !0
 GVCORD  = sgma          !none
-SKIP    = 0 
+SKIP    = 0
 SCALE   = 0
 GDPFUN  = sm5s(relh)     !pmsl
 TYPE    = c/f            !c
@@ -88,11 +81,11 @@ FLINE   = 0;24;23;22
 HILO    = 26;2/H#;L#/1018-1070;900-1012//30;30/y
 HLSYM   = 2;1.5//21//hw
 CLRBAR  = 1
-TITLE   = 5/-2/~ ? $m_title PMSL, 1000-500MB MEAN RH|~MSLP, 1000-500 MEAN RH!0
+TITLE   = 5/-2/~ ? ${m_title} PMSL, 1000-500MB MEAN RH|~MSLP, 1000-500 MEAN RH!0
 run
 
 GLEVEL  = 850
-GVCORD  = pres 
+GVCORD  = pres
 SKIP    = 0         !0         !0         !0         !0/1;-1
 SCALE   = 0         !0         !0         !-1        !0
 GDPFUN  = sm9s(tmpc)!sm9s(tmpc)!sm9s(tmpc)!sm5s(hght)!kntv(wnd)
@@ -105,7 +98,7 @@ FLINE   = 24;30;28;29;25;0;17
 HILO    =
 HLSYM   =
 WIND    = 18//1
-TITLE	= 5/-2/~ ? $m_title @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
 l
 run
 
@@ -119,7 +112,7 @@ LINE    = 8//2/0         !23//2/0        !20/1/1/1 !6/1/1/1 ! 24/5/1/1
 FINT    = 70;90
 FLINE   = 0;23;22
 WIND    =
-TITLE	= 5/-2/~ ? $m_title @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
 l
 run
 
@@ -133,8 +126,8 @@ LINE    = 7/5/1/2            ! 29/5/1/2     !5/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!
-HLSYM   = 
-TITLE	= 5/-2/~ ? $m_title @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
+HLSYM   =
+TITLE	= 5/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
 l
 run
 
@@ -150,7 +143,7 @@ FLINE   = 0;25;24;29;7;15
 HILO    =
 HLSYM   =
 WIND    = 18//1
-TITLE	= 5/-2/~ ? $m_title @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
 l
 run
 
@@ -158,87 +151,88 @@ GDATTIM = F06-F180-6
 GLEVEL  = 0
 SKIP    = 0
 GVCORD  = none
-SCALE   = 0   
+SCALE   = 0
 GDPFUN  = p06i !pmsl
 TYPE    = f    !c
-CONTUR  = 2    
+CONTUR  = 2
 CINT    =      !4
 LINE    =      !5/1/1/0
 FINT    = .01;.1;.25;.5;.75;1;1.25;1.5;1.75;2;2.5;3;4;5;6;7;8;9
 FLINE   = 0;21-30;14-20;5
 HILO    =      !26;2////30;30/y
 HLSYM   =      2;1.5//21//hw
-WIND    = 
-TITLE	= 5/-2/~ ? $m_title 6-HR TOTAL PCPN, MSLP|~6-HR TOTAL PCPN, MSLP!0
+WIND    =
+TITLE	= 5/-2/~ ? ${m_title} 6-HR TOTAL PCPN, MSLP|~6-HR TOTAL PCPN, MSLP!0
 l
 run
 
-GDPFUN  = p06i 
-TYPE    = f  
+GDPFUN  = p06i
+TYPE    = f
 HILO    = 31;0/x#2////y
 HLSYM   = 1.5
-TITLE   = 5/-2/~ ? $m_title 6-HR TOTAL PCPN |~6-HR TOTAL PCPN!0
+TITLE   = 5/-2/~ ? ${m_title} 6-HR TOTAL PCPN |~6-HR TOTAL PCPN!0
 run
 
-GDATTIM = F12-$fend-06
+GDATTIM = F12-${fend}-06
 GDPFUN  = p12i  !pmsl
 TYPE    = f     !c
 HILO    =       !26;2////30;30/y
 HLSYM   =       2;1.5//21//hw
-TITLE   = 5/-2/~ ? $m_title 12-HR TOTAL PCPN, MSLP|~12-HR TOTAL PCPN, MSLP!0
+TITLE   = 5/-2/~ ? ${m_title} 12-HR TOTAL PCPN, MSLP|~12-HR TOTAL PCPN, MSLP!0
 run
 
-GDPFUN  = p12i 
+GDPFUN  = p12i
 TYPE    = f
 HILO    = 31;0/x#2////y
 HLSYM   = 1.5
-TITLE	= 5/-2/~ ? $m_title 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN!0
+TITLE	= 5/-2/~ ? ${m_title} 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN!0
 l
 run
 
 
-GDATTIM	= F24-$fend-06
+GDATTIM	= F24-${fend}-06
 GDPFUN  = p24i  !pmsl
 TYPE    = f     !c
 HILO    =       !26;2////30;30/y
 HLSYM   =       2;1.5//21//hw
-TITLE   = 5/-2/~ ? $m_title 24-HR TOTAL PCPN, MSLP|~24-HR TOTAL PCPN, MSLP!0
-run 
+TITLE   = 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN, MSLP|~24-HR TOTAL PCPN, MSLP!0
+run
 
-GDPFUN  = p24i      
+GDPFUN  = p24i
 TYPE    = f
 HILO    = 31;0/x#2////y
 HLSYM   = 1.5
-TITLE	= 5/-2/~ ? $m_title 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
+TITLE	= 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
 run
 
 exit
 EOF
-export err=$?;err_chk
+export err=$?
 
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l gfs.meta.ak
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-
-if [ $SENDCOM = "YES" ] ; then
-  mv gfs.meta.ak ${COMOUT}/gfs_${PDY}_${cyc}_ak
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_ak
-    if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-      DBN_ALERT_TYPE=GFS_METAFILE
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-       ${COMOUT}/gfs_${PDY}_${cyc}_ak
-    fi
-    if [ $fhr -eq 216 ] ; then
-     ${DBNROOT}/bin/dbn_alert MODEL GFS_METAFILE_LAST $job \
-       ${COMOUT}/gfs_${PDY}_${cyc}_ak
+if (( err != 0 )) || [[ ! -s gfs.meta.ak ]]; then
+  echo "FATAL ERROR: Failed to create alaska meta file"
+  exit "${err}"
+fi
+
+mv gfs.meta.ak "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_ak"
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: Failed to move meta file to ${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_ak"
+    exit $(( err + 100 ))
+fi
+
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_ak"
+    if [[ ${DBN_ALERT_TYPE} = "GFS_METAFILE_LAST" ]] ; then
+        DBN_ALERT_TYPE=GFS_METAFILE
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_ak"
     fi
-  fi
 fi
 
diff --git a/gempak/ush/gfs_meta_bwx.sh b/gempak/ush/gfs_meta_bwx.sh
index f5b4e1d944..04f70ad7c1 100755
--- a/gempak/ush/gfs_meta_bwx.sh
+++ b/gempak/ush/gfs_meta_bwx.sh
@@ -1,55 +1,38 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_bwx_new
 #
-# Log :
-# D.W.Plummer/NCEP     2/97   Add log header
-# J. Carr/HPC      12/12/97   Converted from gdplot to gdplot2
-# J. Carr/HPC      08/05/98   Changed map to medium resolution
-# J. Carr/HPC      02/02/99   Changed skip to 0
-# J. Carr/HPC      04/12/99   Added gfs out to 84 hrs.
-# J. Carr/HPC          6/99   Added a filter to map
-# J. Carr/HPC        1/2000   Eliminated 250 mb vort and pw field. Eliminated pv field.  Added another ptype field.
-# J. Carr/HPC        2/2001   Edited to run on the IBM.
-# J. Carr/HPC        5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC        6/2001   Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC        7/2001   Submitted.
-# J. Carr/PMB       11/2004   Added a ? to all title/TITLE lines.
-# M. Klein/HPC      01/2010   Extend to 180 hours
-#
 # Set up Local Variables
 #
-set -x
-#
-export PS4='BWX:$SECONDS + '
-mkdir -p -m 775 $DATA/BWX
-cd $DATA/BWX
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-mdl=gfs
-MDL="GFS"
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/BWX"
+cd "${DATA}/BWX" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
 metatype="bwx"
-metaname="${mdl}_${metatype}_${cyc}.meta"
+metaname="${RUN}_${PDY}_${cyc}_us_${metatype}"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-#if [ ${cyc} -eq 00 ] ; then
-#    fend=F126
-#elif [ ${cyc} -eq 12 ] ; then
-#    fend=F126
-#else
-#    fend=F126
-#fi
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 fend=F180
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc<< EOFplt
-gdfile   = F-${MDL} | ${PDY2}/${cyc}00
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOFplt
+gdfile   = F-${RUN} | ${PDY:2}/${cyc}00
 gdattim  = F00-${fend}-6
 CONTUR	 = 1
 garea    = bwus
-proj     = 
+proj     =
 map      = 1/1/1/yes
 latlon   = 0
 text     = 1/22/2/hw
@@ -68,12 +51,12 @@ cint     = 4/200/308 !4/312/324 !4/328
 line     = 16/1/1    !2/1/3     !32/1/2/1
 fint     = 328;336;344;352;360;368
 fline    = 0;24;30;29;15;18;20
-hilo     = 
-hlsym    = 
+hilo     =
+hlsym    =
 clrbar   = 1/V/LL    !0
-wind     = bk0       !bk0       !bk0        !bk9/0.7/2/112 
-refvec   = 
-title    = 1/0/~ ? ${MDL} BL THTE & WIND (KTS)|~BL THTE & WIND!0
+wind     = bk0       !bk0       !bk0        !bk9/0.7/2/112
+refvec   =
+title    = 1/0/~ ? ${RUN} BL THTE & WIND (KTS)|~BL THTE & WIND!0
 l
 r
 
@@ -90,9 +73,9 @@ hilo     = 0         !0         !0         !20/H#;L#/1020-1070;900-1012
 hlsym    = 0         !0         !0         !1.3;1.3//22;22/3;3/hw
 clrbar   = 1/V/LL    !0
 wind     = bk0       !bk0       !bk0       !bk0       !bk9/0.7/2/112
-refvec   = 
-title    = 1/0/~ ? ${MDL} PMSL, BL TEMP, WIND (KTS)|~PMSL, BL TEMP, WIND!0
-r 
+refvec   =
+title    = 1/0/~ ? ${RUN} PMSL, BL TEMP, WIND (KTS)|~PMSL, BL TEMP, WIND!0
+r
 
 GLEVEL   = 1000                !1000        !0         !1000
 GVCORD   = pres                !pres        !none      !pres
@@ -110,9 +93,9 @@ HLSYM    = !!1.5;1.5//22;22/3;3/hw
 CLRBAR   = 1
 WIND     = !!!bk9/0.6/2/121/.6
 REFVEC   =
-TITLE    = 1/0/~ ? ${MDL} PMSL, 1000 MB TMP (F), FRONTOGENESIS (F)|~@ FRONTOGENESIS!0
+TITLE    = 1/0/~ ? ${RUN} PMSL, 1000 MB TMP (F), FRONTOGENESIS (F)|~@ FRONTOGENESIS!0
 r
- 
+
 glevel	 = 700       !700       !9950      !0
 gdpfun   = sm5s(kinx)!sm5s(tmpc)!sm5s(dwpf)!sm5s(pmsl)
 gvcord	 = pres      !pres      !sgma      !none
@@ -127,63 +110,63 @@ hlsym	 = !!!1.5;1.5//22;22/3;3/hw
 clrbar	 = 1/V/LL!0
 wind	 =
 refvec	 =
-title	 = 1/0/~ ? ${MDL} K INDEX, 700mb TEMP (>6 C), sfc DWPT & MSLP|~K INDEX!0
+title	 = 1/0/~ ? ${RUN} K INDEX, 700mb TEMP (>6 C), sfc DWPT & MSLP|~K INDEX!0
 r
 
-gdattim  = F06-${fend}-06  
+gdattim  = F06-${fend}-06
 glevel   = 0!500:1000!500:1000!0
-gvcord   = none!pres!pres!none 
-skip     = 0 
-scale    = 0   !-1                   !-1            !0 
+gvcord   = none!pres!pres!none
+skip     = 0
+scale    = 0   !-1                   !-1            !0
 gdpfun   = p06i!sm5s(ldf(hght)       !sm5s(ldf(hght)!sm5s(pmsl)
 type     = f   !c                    !c
 cint     =     !3/0/540              !3/543/1000    !4
-line     =     !4/5/2                !2/5/2         !19//3  
+line     =     !4/5/2                !2/5/2         !19//3
 fint     = .01;.1;.25;.5;.75;1;1.25;1.5;1.75;2;2.5;3;4;5;6;7;8;9
 fline    = 0;21-30;14-20;5
 hilo     =     !0!0!19/H#;L#/1020-1070;900-1010
 hlsym    =     !0!0!1.3;1.3//22;22/3;3/hw
 clrbar   = 1
-wind     = bk0 
+wind     = bk0
 CONTUR	 = 2
-refvec   = 
-title    = 1/0/~ ? ${MDL} 6-HR TOTAL PCPN, 1000-500mb THK |~6-HR PCPN & 1000-500 THK!0
+refvec   =
+title    = 1/0/~ ? ${RUN} 6-HR TOTAL PCPN, 1000-500mb THK |~6-HR PCPN & 1000-500 THK!0
 r
 
 gdattim  =  F00-${fend}-6
 GLEVEL	 = 700!700!700!850!850!9950!9950
 GVCORD	 = PRES!PRES!PRES!PRES!PRES!sgma!sgma
-SKIP	 = 0 
-SCALE	 = 0 
+SKIP	 = 0
+SCALE	 = 0
 GDPFUN	 = sm5s(relh)!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)
 TYPE	 = c/f        ! c
-CINT	 = 50;70;90;95!2;-2 !200;0 !2;-2 !200;0 !2;-2 !-100;0;100    
-LINE	 = 32//1/0    !6/3/2!6/1/2 !2/3/2!2/1/2 !20/3/2!20/1/2  
+CINT	 = 50;70;90;95!2;-2 !200;0 !2;-2 !200;0 !2;-2 !-100;0;100
+LINE	 = 32//1/0    !6/3/2!6/1/2 !2/3/2!2/1/2 !20/3/2!20/1/2
 FINT	 = 50;70;90
 FLINE	 = 0;24;23;22
-HILO	 = 
-HLSYM	 = 
+HILO	 =
+HLSYM	 =
 CLRBAR	 = 1
-WIND	 = 
+WIND	 =
 REFVEC	 =
-TITLE    = 1/0/~ ? ${MDL} @ RH, T (BL yel,850 red,700 cyan)|~@ RH, R/S TEMP!0
-r 
+TITLE    = 1/0/~ ? ${RUN} @ RH, T (BL yel,850 red,700 cyan)|~@ RH, R/S TEMP!0
+r
 
 GLEVEL	 = 4400:10000!700:500!700:500!850 !850 !9950!9950
 GVCORD	 = SGMA      !PRES   !PRES   !PRES!PRES!SGMA!SGMA
-SCALE	 = 0!3!3!0  
+SCALE	 = 0!3!3!0
 GDPFUN	 = sm5s(relh)!sm5s(lav(omeg))!sm5s(lav(omeg))!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)
 TYPE	 = c/f        ! c
-CINT	 = 50;70;90;95!1/1!-1;-3;-5;-7;-9;-11;-13;-15;-17;-19;-21!2;-2!200;0!2;-2!200;0    
-LINE	 = 32//2/0    !30/10/3!6/1/2 !2/3/2!2/1/2 !20/3/2!20/1/2  
+CINT	 = 50;70;90;95!1/1!-1;-3;-5;-7;-9;-11;-13;-15;-17;-19;-21!2;-2!200;0!2;-2!200;0
+LINE	 = 32//2/0    !30/10/3!6/1/2 !2/3/2!2/1/2 !20/3/2!20/1/2
 FINT	 = 50;70;90
 FLINE	 = 0;24;23;22
-HILO	 = 
-HLSYM	 = 
+HILO	 =
+HLSYM	 =
 CLRBAR   = 1
-WIND	 = 
+WIND	 =
 REFVEC	 =
-TITLE    = 1/0/~ ? ${MDL} @ RH,T (BL yel,850 red),7-500 VV|~@ RH,R/S T,VV!0
+TITLE    = 1/0/~ ? ${RUN} @ RH,T (BL yel,850 red),7-500 VV|~@ RH,R/S T,VV!0
 r
 
 glevel	 = 0!0!0!0!700:500         !4400:10000
@@ -197,7 +180,7 @@ line	 = 22/1/2/0!4/1/2/0!7/1/2/0!2/1/2/0!6/1/3!21/1/3
 fint	 = 50;200!50;200!50;200!50;200
 fline	 = 0;23;23!0;25;25!0;30;30!0;15;15
 clrbar	 =
-title	 = 1/0/~ ? ${MDL} PCPN TYPE, 1000-500 RH & 7-500 VV|~PCPN TYPE & VV!0
+title	 = 1/0/~ ? ${RUN} PCPN TYPE, 1000-500 RH & 7-500 VV|~PCPN TYPE & VV!0
 r
 
 glevel	 = 0           !0           !0           !0
@@ -211,7 +194,7 @@ line	 = 22/1/2/0    !4/1/2/0     !7/1/2/0     !2/1/2/0
 fint	 = 50;200      !50;200      !50;200      !50;200
 fline	 = 0;23;23     !0;25;25     !0;30;30     !0;15;15
 clrbar	 =
-title	 = 1/0/~ ? ${MDL} PCPN TYPE|~PCPN TYPE!0
+title	 = 1/0/~ ? ${RUN} PCPN TYPE|~PCPN TYPE!0
 r
 
 GLEVEL   = 500
@@ -230,7 +213,7 @@ HLSYM    =
 CLRBAR   = 1
 WIND     = !!!am1/.2/1/121/.4
 REFVEC   =
-TITLE    = 1/0/~ ? ${MDL} @ HGHT, TEMP & WIND|~500 HGHT,TMP,WIND!0
+TITLE    = 1/0/~ ? ${RUN} @ HGHT, TEMP & WIND|~500 HGHT,TMP,WIND!0
 TEXT     = 1/21//hw
 MAP      = 11/1/2/yes
 STNPLT   =
@@ -266,9 +249,9 @@ FLINE    = 0         !0;24;25;30;29;28;27     !11;12;2;10;15;14;0
 HILO     = 0         !0                       !0                       !5/H#;L#
 HLSYM    = 0         !                        !0                       !1.5//21//hw
 CLRBAR   = 0         !0                       !1                       !0
-WIND     = 
-REFVEC   = 
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+WIND     =
+REFVEC   =
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 TEXT     = 1/21////hw
 CLEAR    = YES
 l
@@ -276,59 +259,59 @@ run
 
 GDATTIM  = f24
 GDPFUN   = sm5s(hght)!(sub(hght^f24,hght^f12))!(sub(hght^f24,hght^f12))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
-run 
+run
 
 GDATTIM  = f36
 GDPFUN   = sm5s(hght)!(sub(hght^f36,hght^f24))!(sub(hght^f36,hght^f24))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f48
 GDPFUN   = sm5s(hght)!(sub(hght^f48,hght^f36))!(sub(hght^f48,hght^f36))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f60
 GDPFUN   = sm5s(hght)!(sub(hght^f60,hght^f48))!(sub(hght^f60,hght^f48))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f72
 GDPFUN   = sm5s(hght)!(sub(hght^f72,hght^f60))!(sub(hght^f72,hght^f60))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f84
 GDPFUN   = sm5s(hght)!(sub(hght^f84,hght^f72))!(sub(hght^f84,hght^f72))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f96
 GDPFUN   = sm5s(hght)!(sub(hght^f96,hght^f84))!(sub(hght^f96,hght^f84))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f108
 GDPFUN   = sm5s(hght)!(sub(hght^f108,hght^f96))!(sub(hght^f108,hght^f96))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
 GDATTIM  = f120
 GDPFUN   = sm5s(hght)!(sub(hght^f120,hght^f108))!(sub(hght^f120,hght^f108))!sm5s(hght)
-TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${MDL} @ MB 12-HR HGT FALLS!0
+TITLE    = 1/-1/~ ? ${RUN} @ MB HGT|~500 HGT CHG!1/-2/~ ? ${RUN} @ MB 12-HR HGT FALLS!0
 l
 run
 
-MAP      = 4/1/2/yes 
+MAP      = 4/1/2/yes
 garea    = 38.5;-91.3;51.4;-71.4
 proj     = nps//3;3;0;1
 GDATTIM  = F00-${fend}-6
@@ -346,31 +329,32 @@ hlsym	 = 0
 clrbar	 = 1/V/LL        !0
 wind	 = bk0           !bk0                  !bk0       !bk0        !bk9/0.9/2/112
 refvec	 =
-title	 = 1/0/~ ? ${MDL} 720-940 MB AVG RH,BL1 WND,850 MB OMG,850-2m dT,850 T|~GR LAKE!0
+title	 = 1/0/~ ? ${RUN} 720-940 MB AVG RH,BL1 WND,850 MB OMG,850-2m dT,850 T|~GR LAKE!0
 FILTER   = y
 r
 
 exit
 EOFplt
-export err=$?;err_chk
+export err=$?
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s "${metaname}" ]]; then
+    echo "FATAL ERROR: Failed to create bwx meta file"
+    exit $(( err + 100 ))
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${metaname}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${metaname}"
+    if [[ ${DBN_ALERT_TYPE} = "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      fi
-   fi
+            "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+                "${COM_ATMOS_GEMPAK_META}/${metaname}"
+    fi
 fi
 exit
diff --git a/gempak/ush/gfs_meta_comp.sh b/gempak/ush/gfs_meta_comp.sh
index 9bd27c5736..36d18d8659 100755
--- a/gempak/ush/gfs_meta_comp.sh
+++ b/gempak/ush/gfs_meta_comp.sh
@@ -1,184 +1,161 @@
-#! /bin/sh
+#! /usr/bin/env bash
 # Metafile Script : gfs_meta_comp.sh
 #
-# This is a script which creates a metafile that runs a comparison of 500 MB 
-# heights and PMSL between the older GFS model run and the newer one. The 
+# This is a script which creates a metafile that runs a comparison of 500 MB
+# heights and PMSL between the older GFS model run and the newer one. The
 # metafile also generates a comparison between the UKMET older run and the newer
 # GFS model run.
 #
-# Log :
-# J. Carr/HPC    5/12/97   Developed Script
-# J. Carr/HPC    8/05/98   Changed map to medium resolution and redefined yesterday code
-# J. Carr/HPC    2/01/99   Changed skip to 0
-# J. Carr/HPC    4/12/99   Added gfs model out to 84 hours.
-# J. Carr/HPC       6/99   put a filter on map
-# J. Carr/HPC     4/2000   Upped the eta comp to 60 hrs.
-# J. Carr/HPC     2/2001   Edited to run on the IBM.
-# J. Carr/HPC     5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC     7/2001   Added more comparison times.
-# J. Carr/HPC     7/2001   Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC     7/2001   Submitted.
-# J. Carr/HPC    11/2004   Changed all eta/ETA entries to nam/NAM.
-#                          Inserted a ? in all title/TITLE lines.
-#
 # Set up Local Variables
 #
-set -x
-#
-export PS4='COMP:$SECONDS + '
-rm -Rf $DATA/COMP $DATA/GEMPAK_META_COMP
-mkdir -p -m 775 $DATA/COMP  $DATA/GEMPAK_META_COMP
-cd $DATA/COMP
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-export COMPONENT=${COMPONENT:-atmos}
+source "${HOMEgfs}/ush/preamble.sh"
+
+rm -Rf "${DATA}/COMP" "${DATA}/GEMPAK_META_COMP"
+mkdir -p -m 775 "${DATA}/COMP"  "${DATA}/GEMPAK_META_COMP"
+cd "${DATA}/COMP" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 mdl=gfs
 MDL=GFS
 metatype="comp"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-#
-#XXW export MODEL=$COMROOT/nawips/prod
-# BV export MODEL=$COMROOT/nawips/${envir}
-# BV export HPCGFS=${MODEL}/${mdl}.$PDY
-export HPCGFS=${COMINgempak}/${mdl}.${PDY}/${cyc}/${COMPONENT}/gempak
-export COMIN00=${COMINgempak}/${mdl}.${PDY}/00/${COMPONENT}/gempak
-export COMIN06=${COMINgempak}/${mdl}.${PDY}/06/${COMPONENT}/gempak
-export COMIN12=${COMINgempak}/${mdl}.${PDY}/12/${COMPONENT}/gempak
-export COMIN18=${COMINgempak}/${mdl}.${PDY}/18/${COMPONENT}/gempak
-if [ ${cyc} -eq 00 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_COMP
-elif [ ${cyc} -eq 06 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_COMP
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_COMP
-elif [ ${cyc} -eq 12 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_COMP
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_COMP
-   cp $COMIN12/gfs_${PDY}12f* $DATA/GEMPAK_META_COMP
-elif [ ${cyc} -eq 18 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_COMP
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_COMP
-   cp $COMIN12/gfs_${PDY}12f* $DATA/GEMPAK_META_COMP
-   cp $COMIN18/gfs_${PDY}18f* $DATA/GEMPAK_META_COMP
-fi
-export COMIN=$DATA/GEMPAK_META_COMP
 
-#XXW export HPCNAM=${MODEL}/nam.$PDY
-#XXW export HPCNGM=${MODEL}/ngm.$PDY
-# BV export HPCNAM=$COMROOT/nawips/prod/nam.$PDY
-export HPCNAM=${COMINnam}.$PDY/gempak
+export COMIN="gfs.multi"
+mkdir "${COMIN}"
+for cycle in $(seq -f "%02g" -s ' ' 0  "${STEP_GFS}" "${cyc}"); do
+    YMD=${PDY} HH=${cycle} GRID="1p00" declare_from_tmpl gempak_dir:COM_ATMOS_GEMPAK_TMPL
+    for file_in in "${gempak_dir}/gfs_1p00_${PDY}${cycle}f"*; do
+        file_out="${COMIN}/$(basename "${file_in}")"
+        if [[ ! -L "${file_out}" ]]; then
+            ${NLN} "${file_in}" "${file_out}"
+        fi
+    done
+done
+
+export HPCNAM="nam.${PDY}"
+if [[ ! -L ${HPCNAM} ]]; then
+    ${NLN} "${COMINnam}/nam.${PDY}/gempak" "${HPCNAM}"
+fi
 
-# export HPCNGM=$COMROOT/nawips/prod/ngm.$PDY
 #
 # DEFINE YESTERDAY
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDY2m1=$(echo $PDYm1 | cut -c 3-)
+PDYm1=$(date --utc +%Y%m%d -d "${PDY} - 24 hours")
 #
 # DEFINE 2 DAYS AGO
-PDYm2=$($NDATE -48 ${PDY}${cyc} | cut -c -8)
-PDY2m2=$(echo $PDYm2 | cut -c 3-)
-#
-# DEFINE 3 DAYS AGO
-PDYm3=$($NDATE -72 ${PDY}${cyc} | cut -c -8)
-PDY2m3=$(echo $PDYm3 | cut -c 3-)
-#
-# THE 1200 UTC CYCLE
-#
-if [ ${cyc} -eq 12 ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in US NP
-    do
-        if [ ${gareas} = US ] ; then
+PDYm2=$(date --utc +%Y%m%d -d "${PDY} - 48 hours")
+
+grid="F-${MDL} | ${PDY:2}/${cyc}00"
+for gareas in US NP; do
+    case ${gareas} in
+        US)
             garea="bwus"
             proj=" "
             latlon="0"
-        elif [ ${gareas} = NP ] ; then
+            ;;
+        NP)
             garea="5;-177;45;-72"
             proj="STR/90.0;-155.0;0.0"
             latlon="1/1/1/1/10"
+            ;;
+        *)
+            echo "FATAL ERROR: Unknown domain"
+            exit 100
+    esac
+
+    case ${cyc} in
+        00 | 12)
+            offsets=(6 12 24 48)
+            contours=1
+            type_param="CTYPE"
+            ex=""
+            ;;
+        06 | 18)
+            offsets=(6 12 18 24)
+            contours=2
+            type_param="TYPE"
+            ex="ex"
+            ;;
+        *)
+            echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}"
+            ;;
+    esac
+
+    for offset in "${offsets[@]}"; do
+        init_time=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${offset} hours")
+        init_PDY=${init_time:0:8}
+        init_cyc=${init_time:8:2}
+
+        if (( init_time <= SDATE )); then
+            echo "Skipping generation for ${init_time} because it is before the experiment began"
+            if (( offset == "${offsets[0]}" )); then
+                echo "First forecast time, no metafile produced"
+                exit 0
+            fi
+            continue
         fi
-        for runtime in 06 00 12y 122d
-        do
-            if [ ${runtime} = "06" ] ; then
-                cyc2="06"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/0600"
-                add="06"
-                testgfsfhr="120"
-            elif [ ${runtime} = "00" ] ; then
-                cyc2="00"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/0000"
-                add="12"
-                testgfsfhr="114"
-            elif [ ${runtime} = "12y" ] ; then
-                cyc2="12"
-                desc="Y"
-                #XXW export HPCGFS=${MODEL}/gfs.${PDYm1}
-                # BV export HPCGFS=$COMROOT/nawips/${envir}/gfs.${PDYm1}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
 
-                grid2="F-GFSHPC | ${PDY2m1}/1200"
-                add="24"
-                testgfsfhr="102"
-            elif [ ${runtime} = "122d" ] ; then
-                cyc2="12"
-                desc="Y2"
-                #XXW export HPCGFS=${MODEL}/gfs.${PDYm2}
-                # BV export HPCGFS=$COMROOT/nawips/${esnvir}/gfs.${PDYm2}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm2}/${cyc2}/${COMPONENT}/gempak
+        # Create symlink in DATA to sidestep gempak path limits
+        HPCGFS="${RUN}.${init_time}"
+        if [[ ! -L ${HPCGFS} ]]; then
+            YMD="${init_PDY}" HH="${init_cyc}" GRID="1p00" declare_from_tmpl source_dir:COM_ATMOS_GEMPAK_TMPL
+            ${NLN} "${source_dir}" "${HPCGFS}"
+        fi
 
-                grid2="F-GFSHPC | ${PDY2m2}/1200"
-                add="48"
-                testgfsfhr="96"
-            fi
+        if [[ ${init_PDY} == "${PDY}" ]]; then
+            desc="T"
+        elif [[ ${init_PDY} == "${PDYm1}" ]]; then
+            desc="Y"
+        elif [[ ${init_PDY} == "${PDYm2}" ]]; then
+            desc="Y2"
+        else
+            echo "FATAL ERROR: Unexpected offset"
+            exit 100
+        fi
+
+        testgfsfhr=$(( 126 - offset ))
+
+        for fhr in $(seq -s ' ' 0 6 126); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            gfsoldfhr=F$(printf "%02g" $((fhr + offset)))
+            grid2="F-GFSHPC | ${init_time:2}/${init_cyc}00"
             gdpfun1="sm5s(hght)!sm5s(hght)"
             gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
             line="5/1/3/2/2!6/1/3/2/2"
             hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
             hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z ${desc} CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z ${desc} CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr=F${gfsfhr}
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
+            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${init_cyc}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${init_cyc}Z ${desc} CYAN)"
+            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${init_cyc}Z PMSL!6/-3/~ ? ${MDL} PMSL (${init_cyc}Z ${desc} CYAN)"
+            if (( fhr > testgfsfhr )); then
+                grid="F-${MDL} | ${PDY:2}/${cyc}00"
+                grid2=" "
+                gfsoldfhr=" "
+                gdpfun1="sm5s(hght)"
+                gdpfun2="sm5s(pmsl)"
+                line="5/1/3/2/2"
+                hilo1="5/H#;L#//5/5;5/y"
+                hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y"
+                title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${init_cyc}Z 500 HGT"
+                title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${init_cyc}Z PMSL"
+            fi
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL= mepowo.gsf
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
 GAREA   = ${garea}
-PROJ    = ${proj} 
+PROJ    = ${proj}
 LATLON  = ${latlon}
 SKIP    = 0
 PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-FINT    = 
-FLINE   = 
-REFVEC  = 
+CONTUR  = ${contours}
+CLRBAR  =
+FINT    =
+FLINE   =
+REFVEC  =
 WIND    = 0
 
 GDFILE  = ${grid}  !${grid2}
@@ -188,7 +165,7 @@ GVCORD  = PRES
 GDPFUN  = ${gdpfun1}
 LINE    = ${line}
 SCALE   = -1
-CTYPE   = c
+${type_param}   = c
 CINT    = 6
 HLSYM   = 1.2;1.2//21//hw
 TEXT    = 1/21//hw
@@ -211,22 +188,58 @@ HILO    = ${hilo2}
 TITLE   = ${title2}
 run
 
+${ex}
 EOF
-export err=$?;err_chk
-            done
+            export err=$?;err_chk
         done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        export HPCUKMET=${COMINukmet}.${PDY}/gempak
-        grid2="F-UKMETHPC | ${PDY2}/0000"
-        # for gfsfhr in 00 12 24 36 48 60 84 108
-        for gfsfhr in 00 12 24 84 108
-        do
-            ukmetfhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
+    done
+
+    if (( 10#${cyc} % 12 ==0 )); then
+
+        #
+        # There are some differences between 00z and 12z
+        # The YEST string makes sense (but is inconsistently used)
+        # The others I'm not sure why they differ. - WCK
+        #
+        case ${cyc} in
+            00)
+                type_param="TYPE"
+                hlsym="1.2;1.2//21//hw"
+                wind=""
+                yest=" YEST"
+                run_cmd="run"
+                extra_cmd="\nHLSYM   = 1.2;1.2//21//hw\nTEXT    = s/21//hw"
+                ;;
+            12)
+                type_param="CTYPE"
+                hlsym="1;1//21//hw"
+                wind="0"
+                yest=""
+                run_cmd="ru"
+                extra_cmd=""
+                ;;
+            *)
+                echo "FATAL ERROR: Invalid cycle ${cyc}"
+                exit 100
+                ;;
+        esac
+
+        # COMPARE THE GFS MODEL TO THE UKMET MODEL 12-HOURS PRIOR
+        ukmet_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - 12 hours")
+        ukmet_PDY=${ukmet_date:0:8}
+        ukmet_cyc=${ukmet_date:8:2}
+        export HPCUKMET=ukmet.${ukmet_PDY}
+        if [[ ! -L "${HPCUKMET}" ]]; then
+            ${NLN} "${COMINukmet}/ukmet.${ukmet_PDY}/gempak" "${HPCUKMET}"
+        fi
+        grid2="F-UKMETHPC | ${ukmet_PDY:2}/${ukmet_date}"
+
+        for fhr in 0 12 24 84 108; do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            ukmetfhr=F$(printf "%02g" $((fhr + 12)))
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL= mepowo.gsf
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
@@ -240,22 +253,22 @@ GDATTIM = ${gfsfhr}
 SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
+CLRBAR  =
 GLEVEL  = 500
 GVCORD  = PRES
 GDPFUN  = sm5s(hght)
 LINE    = 5/1/3/2
 SCALE   = -1
-CTYPE   = c
+${type_param}   = c
 CINT    = 6
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
 TEXT    = s/21//hw
-WIND    = 0
-REFVEC  = 
+WIND    = ${wind}
+REFVEC  =
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} 12Z VS UK 00Z 500 HGT!0
+TITLE   = 5/-1/~ ? ${MDL} @ HGT (${cyc}Z YELLOW)|~${gareas} ${cyc}Z VS UK ${ukmet_cyc}Z 500 HGT!0
 l
 run
 
@@ -265,23 +278,23 @@ GDATTIM = ${ukmetfhr}
 GDPFUN  = sm5s(hght)
 LINE    = 6/1/3/2
 HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? UKMET @ HGT (00Z CYAN)!0
+TITLE   = 6/-2/~ ? UKMET @ HGT (${ukmet_cyc}Z${yest} CYAN)!0
 l
-ru
+${run_cmd}
 
 CLEAR   = yes
 GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
+CINT    = 4${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} 12Z VS UK 00Z PMSL!0
+TITLE   = 5/-1/~ ? ${MDL} PMSL (${cyc}Z YELLOW)|~${gareas} ${cyc}Z VS UK ${ukmet_cyc}Z PMSL!0
 l
-ru
+${run_cmd}
 
 CLEAR   = no
 GDFILE  = ${grid2}
@@ -291,426 +304,55 @@ LINE    = 6/1/3/2
 HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
 TITLE   = 6/-2/~ ? UKMET PMSL (00Z CYAN)!0
 l
-ru
+${run_cmd}
 
 EOF
-export err=$?;err_chk
-        done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 1200 UTC ECMWF FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        #XXW grid2=${MODEL}/ecmwf.${PDYm1}/ecmwf_glob_${PDYm1}12 
-        grid2=${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12 
-        for gfsfhr in 00 24 48 72 96 120
-        do
-            ecmwffhr=F$(expr ${gfsfhr} + 24)
-	    gfsfhr=F${gfsfhr}
-		
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
 
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-CTYPE   = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    = 0              
-REFVEC  =                                                                         
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} 12Z VS EC Y 12Z 500 HGT!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${ecmwffhr}
-GDPFUN  = sm5s(hght)
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? ECMWF @ HGT (12Z YEST CYAN)!0
-l
-run
+            export err=$?;err_chk
+        done
 
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 5/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} 12Z VS EC Y 12Z PMSL!0
-l
-run
+        # COMPARE THE GFS MODEL TO THE 12 UTC ECMWF FROM YESTERDAY
+        offset=$(( (10#${cyc}+12)%24 + 12 ))
+        ecmwf_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${offset} hours")
+        ecmwf_PDY=${ecmwf_date:0:8}
+        # ecmwf_cyc=${ecmwf_date:8:2}
+        grid2=${COMINecmwf}/ecmwf.${ecmwf_PDY}/gempak/ecmwf_glob_${ecmwf_date}
 
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${ecmwffhr}
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? ECMWF PMSL (12Z YEST CYAN)!0
-l
-run
+        for fhr in $(seq -s ' ' $(( offset%24 )) 24 120 ); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            ecmwffhr=F$(printf "%02g" $((fhr + 24)))
 
-EOF
-export err=$?;err_chk
-        done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 1200 UTC NAM AND NGM
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2="F-NAMHPC | ${PDY2}/${cyc}00"
-        # grid2ngm="F-NGMHPC | ${PDY2}/${cyc}00"
-        for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-        do
-            namfhr=F${gfsfhr}
-        #   ngmfhr=F${gfsfhr}
-            gfsfhr=F${gfsfhr}
-		
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL= mepowo.gsf
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
 GAREA   = ${garea}
 PROJ    = ${proj}
-LATLON  = ${latlon} 
+LATLON  = ${latlon}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 3/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
-HILO    = 3/H#;L#//5/5;5/y
-TITLE   = 3/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} ${MDL}/NAM/NGM 500 HGT!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${namfhr}
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
 GDPFUN  = sm5s(hght)
 LINE    = 5/1/3/2
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-2/~ ? NAM @ HGT (12Z CYAN)!0
-l
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 3/1/3/2
-HILO    = 3/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 3/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} ${MDL}/NAM/NGM PMSL!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${namfhr}
-LINE    = 5/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-2/~ ? NAM PMSL (12Z CYAN)!0
-l
-run
-
-EOF
-export err=$?;err_chk
-        done
-    done
-fi
-
-if [ ${cyc} -eq 00 ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in US NP 
-    do
-        if [ ${gareas} = US ] ; then
-            garea="bwus" 
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = NP ] ; then
-            garea="5;-177;45;-72"
-            proj="STR/90.0;-155.0;0.0"
-            latlon="1/1/1/1/10"
-        fi
-        for runtime in 18 12 00y 002d
-        do
-            if [ ${runtime} = "18" ] ; then
-                cyc2="18"
-                desc="Y"
-# BV            export HPCGFS=${MODEL}/gfs.${PDYm1}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1800"
-                add="06"
-                testgfsfhr="120"
-            elif [ ${runtime} = "12" ] ; then
-                cyc2="12"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1200"
-                add="12"
-                testgfsfhr="114"
-            elif [ ${runtime} = "00y" ] ; then
-                cyc2="00"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/0000"
-                add="24"
-                testgfsfhr="102"
-            elif [ ${runtime} = "002d" ] ; then
-                cyc2="00"
-                desc="Y2"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm2}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m2}/0000"
-                add="48"
-                testgfsfhr="96"
-            fi
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z ${desc} CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z ${desc} CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr=F${gfsfhr}
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj} 
-LATLON  = ${latlon}
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-FINT    = 
-FLINE   = 
-REFVEC  =                                                                         
-WIND    = 0 
-
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500                                                                    
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
 SCALE   = -1
-CTYPE   = c
+${type_param}   = c
 CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-EOF
-export err=$?;err_chk
-            done
-        done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        export HPCUKMET=${COMINukmet}.${PDYm1}/gempak
-        grid2="F-UKMETHPC | ${PDY2m1}/1200"
-        # for gfsfhr in 00 12 24 36 48 60 84 108
-        for gfsfhr in 00 12 24 84 108
-        do
-            ukmetfhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
-	    
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HEIGHTS (00Z YELLOW)|~${gareas} 00Z VS UK Y 12Z 500 HGT!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${ukmetfhr}
-GDPFUN  = sm5s(hght)
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? UKMET @ HEIGHTS (12Z YEST CYAN)!0
-l
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 5/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} 00Z VS UK Y 12Z PMSL!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${ukmetfhr}
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? UKMET PMSL (12Z YEST CYAN)!0
-l
-run
-
-EOF
-export err=$?;err_chk
-        done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC ECMWF FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        # JY grid2="$COMROOT/nawips/prod/ecmwf.${PDYm1}/ecmwf_glob_${PDYm1}12"
-        grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12"
-        for gfsfhr in 12 36 60 84 108
-        do
-            ecmwffhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
-	    
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
+TEXT    = s/21//hw
+WIND    = ${wind}
+REFVEC  =
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (00Z YELLOW)|~${gareas} 00Z VS EC Y 12Z 500 HGT!0
+TITLE   = 5/-1/~ ? ${MDL} @ HGT (${cyc}Z YELLOW)|~${gareas} ${cyc}Z VS EC Y 12Z 500 HGT!0
 l
 run
 
@@ -729,14 +371,12 @@ GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
+CINT    = 4         ${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} 00Z VS EC Y 12Z PMSL!0
+TITLE   = 5/-1/~ ? ${MDL} PMSL (${cyc}Z YELLOW)|~${gareas} ${cyc}Z VS EC Y 12Z PMSL!0
 l
 run
 
@@ -751,18 +391,18 @@ l
 run
 
 EOF
-export err=$?;err_chk
+
+            export err=$?;err_chk
         done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 0000 UTC NAM AND NGM
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2="F-NAMHPC | ${PDY2}/${cyc}00"
-        for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-        do
-            namfhr=F${gfsfhr}
-            gfsfhr=F${gfsfhr}
-		
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+
+        # COMPARE THE GFS MODEL TO THE NAM and NGM
+        grid2="F-NAMHPC | ${PDY:2}/${cyc}00"
+        for fhr in $(seq -s ' ' 0 6 84); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            namfhr=F$(printf "%02g" "${fhr}")
+
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL= mepowo.gsf
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
@@ -773,25 +413,25 @@ LATLON  = ${latlon}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
 GDPFUN  = sm5s(hght)
-LINE    = 3/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
+LINE    = 3/1/3/2
+SCALE   = -1
+TYPE    = c
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
+TEXT    = s/21//hw
+WIND    =
+REFVEC  =
 HILO    = 3/H#;L#//5/5;5/y
-TITLE   = 3/-1/~ ? ${MDL} @ HGT (00Z YELLOW)|~${gareas} ${MDL}/NAM/NGM 500 HGT!0
+TITLE   = 3/-1/~ ? ${MDL} @ HGT (${cyc}Z YELLOW)|~${gareas} ${MDL}/NAM/NGM 500 HGT!0
 l
 run
 
@@ -801,7 +441,7 @@ GDATTIM = ${namfhr}
 GDPFUN  = sm5s(hght)
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-2/~ ? NAM @ HGT (00Z CYAN)!0
+TITLE   = 5/-2/~ ? NAM @ HGT (${cyc}Z CYAN)!0
 l
 run
 
@@ -810,14 +450,12 @@ GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
+CINT    = 4${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 3/1/3/2
 HILO    = 3/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 3/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} ${MDL}/NAM/NGM PMSL!0
+TITLE   = 3/-1/~ ? ${MDL} PMSL (${cyc}Z YELLOW)|~${gareas} ${MDL}/NAM/NGM PMSL!0
 l
 run
 
@@ -827,295 +465,40 @@ GDPFUN  = sm5s(pmsl)
 GDATTIM = ${namfhr}
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-2/~ ? NAM PMSL (CYAN)!0
+TITLE   = 5/-2/~ ? NAM PMSL (${cyc}Z CYAN)!0
 l
 run
 
 EOF
-export err=$?;err_chk
-        done
-    done
-fi
-
-if [ ${cyc} -eq 18 ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in US NP
-    do
-        if [ ${gareas} = US ] ; then
-            garea="bwus"
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = NP ] ; then
-            garea="5;-177;45;-72"
-            proj="STR/90.0;-155.0;0.0"
-            latlon="1/1/1/1/10"
-        fi
-        for runtime in 12 06 00 18y
-        do
-            if [ ${runtime} = "12" ] ; then
-                cyc2="12"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/1200"
-                add="06"
-                testgfsfhr="120"
-            elif [ ${runtime} = "06" ] ; then
-                cyc2="06"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/0600"
-                add="12"
-                testgfsfhr="114"
-            elif [ ${runtime} = "00" ] ; then
-                cyc2="00"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/0000"
-                add="18"
-                testgfsfhr="108"
-            elif [ ${runtime} = "18y" ] ; then
-                cyc2="18"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1800"
-                add="24"
-                testgfsfhr="102"
-            fi   
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z ${desc} CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z ${desc} CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr="F${gfsfhr}"
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-SKIP    = 0     
-PANEL   = 0
-CONTUR  = 1
-CLRBAR  =
-FINT    =
-FLINE   =
-REFVEC  =
-WIND    = 0
-
-GDFILE  = ${grid}!${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500   
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
-SCALE   = -1
-TYPE    = c
-CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-ex
-EOF
-export err=$?;err_chk
-            done
-        done
-    done
-fi
-
-if [ ${cyc} -eq 06 ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in US NP
-    do
-        if [ ${gareas} = US ] ; then
-            garea="bwus"
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = NP ] ; then
-            garea="5;-177;45;-72"
-            proj="STR/90.0;-155.0;0.0"
-            latlon="1/1/1/1/10"
-        fi
-        for runtime in 00 18 12 06
-        do
-            if [ ${runtime} -eq 00 ] ; then
-                cyc2="00"
-                desc="T"
-                grid2="F-${MDL} | ${PDY2}/0000"
-                add="06"
-                testgfsfhr="120"
-            elif [ ${runtime} -eq 18 ] ; then
-                cyc2="18"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
 
-                grid2="F-GFSHPC | ${PDY2m1}/1800"
-                add="12"
-                testgfsfhr="114"
-            elif [ ${runtime} -eq 12 ] ; then
-                cyc2="12"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1200"
-                add="18"
-                testgfsfhr="108"
-            elif [ ${runtime} -eq 06 ] ; then
-                cyc2="06"
-                desc="Y"
-                export HPCGFS=${COMINgempak}/${NET}/${envir}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/0600"
-                add="24"
-                testgfsfhr="102"
-            fi   
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z ${desc} CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z ${desc} CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z VS ${desc} ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr="F${gfsfhr}"
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-\$MAPFIL= mepowo.gsf
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-SKIP    = 0     
-PANEL   = 0
-CONTUR  = 1
-CLRBAR  =
-FINT    =
-FLINE   =
-REFVEC  =
-WIND    = 0
-
-GDFILE  = ${grid}!${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500   
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
-SCALE   = -1
-TYPE    = c
-CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-ex
-EOF
-export err=$?;err_chk
-            done
+            export err=$?;err_chk
         done
-    done
-fi
+    fi
+done
 
-####################################################
+#####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-    if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-      DBN_ALERT_TYPE=GFS_METAFILE
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-       ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} = "GFS_METAFILE_LAST" ]] ; then
+        DBN_ALERT_TYPE=GFS_METAFILE
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
     fi
-    if [ $fhr -eq 126 ] ; then
-     ${DBNROOT}/bin/dbn_alert MODEL GFS_METAFILE_LAST $job \
-       ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
+    if (( fhr == 126 )) ; then
+        "${DBNROOT}/bin/dbn_alert" MODEL GFS_METAFILE_LAST "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
     fi
-   fi
 fi
 
-
 exit
diff --git a/gempak/ush/gfs_meta_crb.sh b/gempak/ush/gfs_meta_crb.sh
index 82fa7795e8..83f08e3d1a 100755
--- a/gempak/ush/gfs_meta_crb.sh
+++ b/gempak/ush/gfs_meta_crb.sh
@@ -1,50 +1,40 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_crb_new
 #
-# Log :
-# J.Carr/HPC         03/13/2001   New script for the Caribbean desk.
-# J. Carr/HPC            5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC            6/2001   Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC            7/2001   Submitted.
-# J. Carr/PMB        11/15/2004   Added a ? to all title/TITLE lines. Changed contur parameter to 2.
-#                                 Changed 12-hr increments to 6-hr with regards to 12-hr and 24-hr pcpn.
-#
 # Set Up Local Variables
 #
-set -x
-#
-export PS4='crb:$SECONDS + '
-mkdir -p -m 775 $DATA/crb
-cd $DATA/crb
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/crb"
+cd "${DATA}/crb" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 #
 mdl=gfs
 MDL=GFS
 metatype="crb"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo ${PDY} | cut -c3-)
+
 #
-# DEFINE YESTERDAY
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDY2m1=$(echo ${PDYm1} | cut -c 3-)
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-#if [ ${cyc} -eq 00 ] ; then
-#    fend=F126
-#elif [ ${cyc} -eq 12 ] ; then
-#    fend=F126
-#else
-#    fend=F126
-#    fend=F84
-#fi
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
+
+# DEFINE YESTERDAY
+PDYm1=$(date --utc +%Y%m%d -d "${PDY} 00 - 24 hours")
 
 fend=F126
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-${MDL} | ${PDY2}/${cyc}00
-GDATTIM	= F00-${fend}-06 
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-${MDL} | ${PDY:2}/${cyc}00
+GDATTIM	= F00-${fend}-06
 DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
@@ -70,13 +60,13 @@ HILO    = !!26;2/H#;L#/1020-1070;900-1012//30;30/y
 HLSYM   = !!2;1.5//21//hw
 CLRBAR  = 1
 WIND    =                        !                       !bk9/0.7/2/112
-REFVEC  = 
+REFVEC  =
 TITLE	= 5/-2/~ ? ${MDL} MSLP, 1000-500mb THICK & 850mb WIND|~MSLP, 1000-500 THKN!
 ru
 
 glevel  = 9950
 gvcord  = sgma
-scale   = 7                         !0 
+scale   = 7                         !0
 gdpfun  = sm5s(sdiv(mixr@0%none;wnd)!kntv(wnd)
 type    = f                         !b
 cint    = 0                         !
@@ -90,22 +80,22 @@ wind    = am0!bk9/0.8/2/112
 refvec  =
 title   = 1/-2/~ ? ${MDL} BL MOIST CONV & WIND|~BL MOISTURE CONV!0
 r
- 
+
 glevel  = 0         !9950
 gvcord  = none      !sgma
 scale   = 0
 skip    = 0/1
 gdpfun  = sm5s(thte)!kntv(wnd)
 type    = c/f       !b
-cint    = 4/200/336 
-line 	= 5/1/1 
+cint    = 4/200/336
+line 	= 5/1/1
 fint    = 336;340;344;348;352;356;360;364;368;372;376
 fline   = 0 ; 21; 22; 23; 24; 25; 26; 27; 28; 29; 30; 14
-hilo    = 
-hlsym   = 
+hilo    =
+hlsym   =
 clrbar  = 1/V/LL!0
 wind    = bk0        !bk9/0.9/2/112
-refvec  = 
+refvec  =
 title   = 1/-2/~ ? ${MDL} BL THTE & WIND (KTS)|~BL THTE & WIND
 r
 
@@ -115,9 +105,9 @@ SKIP    = 0/1;2
 GDPFUN	= vor(wnd)              !vor(wnd)!kntv(wnd)
 CINT	= 2/-99/-2              !2/2/99
 LINE	= 29/5/1/2              !7/5/1/2
-HILO	= 2;6/X;N/-99--4;4-99   !                   
-SCALE	= 5                     !5 
-WIND    = !!bk6/.8/2/112!0 
+HILO	= 2;6/X;N/-99--4;4-99   !
+SCALE	= 5                     !5
+WIND    = !!bk6/.8/2/112!0
 TITLE	= 1//~ ? ${MDL} @ WIND AND REL VORT|~@ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE	= 0;14-21
@@ -134,23 +124,23 @@ CINT    = 5/20
 LINE    = 26//1
 FINT    = 5/20
 FLINE   = 0;24;30;29;23;22;14;15;16;17;20;5
-HILO    = 
+HILO    =
 HLSYM   =
 CLRBAR  = 1
 WIND    = bk0!ak7/.3/1/221/.4!ak6/.3/1/221/.4
 REFVEC  = 0
 TITLE   = 1/-2/~ ? ${MDL} @ WIND SHEAR (KNTS)|~850MB-300MB WIND SHEAR!0
 filter  = no
- 
+
 
 GLEVEL	= 700
 GVCORD  = pres
 GDPFUN	= vor(wnd)              !vor(wnd)!kntv(wnd)
 CINT	= 2/-99/-2              !2/2/99
 LINE	= 29/5/1/2              !7/5/1/2
-HILO	= 2;6/X;N/-99--4;4-99   !                   
-SCALE	= 5                     !5 
-WIND    = !!bk6/.8/2/112!0 
+HILO	= 2;6/X;N/-99--4;4-99   !
+SCALE	= 5                     !5
+WIND    = !!bk6/.8/2/112!0
 TITLE	= 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~@ WIND AND REL VORT!0
 FINT    = 6;8;10;12;14;16;18;20
 FLINE	= 0;14-21
@@ -167,8 +157,8 @@ LINE    = 7/5/1/2        !29/5/1/2!7/5/1/2   !29/5/1/2 !20/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!        !2;6/X;N/10-99;10-99!       !
-HLSYM   = 
-WIND    = bk0            !bk0     !bk0       !bk0      !bk0       !bk9/0.7/2/112!0 
+HLSYM   =
+WIND    = bk0            !bk0     !bk0       !bk0      !bk0       !bk9/0.7/2/112!0
 TITLE   = 1/-2/~ ? ${MDL} @ HEIGHT AND VORTICITY|~@ HGT AND VORTICITY!0
 ru
 
@@ -209,7 +199,7 @@ FLINE	= 0;21-30;14-20;5
 HILO	= 31;0/x#/10-400///y
 HLSYM	= 1.5
 CLRBAR	= 1/V/LL
-WIND	= 
+WIND	=
 REFVEC	=
 TITLE	= 1/-2/~ ? ${MDL} 12-HR TOTAL PCPN|~12-HR TOTAL PCPN
 r
@@ -228,7 +218,7 @@ type    = c   !c/f !b
 cint    = 6/6/18!6/24
 line    = 22///2!32//2/2
 fint    = !13;25;38;50
-fline   = !0;23;22;21;2       
+fline   = !0;23;22;21;2
 hilo    = 0!0
 HLSYM   = 0!0
 clrbar  = 0!1
@@ -246,11 +236,11 @@ CINT    = 10;20;80;90 !30;40;50;60;70
 LINE    = 32//2       !23//2
 FINT    = 10;30;70;90
 FLINE   = 18;8;0;22;23
-HILO    = 
+HILO    =
 HLSYM   =
 CLRBAR  = 1
-WIND    = 
-REFVEC  = 
+WIND    =
+REFVEC  =
 TITLE	= 1/-2/~ ? ${MDL} @ LYR RH|~MEAN RH!0
 ru
 
@@ -259,19 +249,29 @@ EOF
 export err=$?;err_chk
 
 
-if [ ${cyc} -eq 00 ] ; then
-    export HPCECMWF=${COMINecmwf}.${PDY}/gempak
-    export HPCUKMET=${COMINukmet}.${PDYm1}/gempak
-    grid1="F-${MDL} | ${PDY2}/${cyc}00"
-    grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12"
-    grid3="F-UKMETHPC | ${PDY2m1}/1200"
-    for gfsfhr in 12 36 60 84 108
-    do
-        ecmwffhr="F$(expr ${gfsfhr} + 12)"
-        gfsfhr="F${gfsfhr}"
+if [[ ${cyc} == 00 ]] ; then
+    export HPCECMWF=ecmwf.${PDY}
+    HPCECMWF_m1=ecmwf.${PDY}
+    export HPCUKMET=ukmet.${PDYm1}
+    if [[ ! -L "${HPCECMWF}" ]]; then
+        ${NLN} "${COMINecmwf}ecmwf.${PDY}/gempak" "${HPCECMWF}"
+    fi
+    if [[ ! -L "${HPCECMWF_m1}" ]]; then
+        ${NLN} "${COMINecmwf}ecmwf.${PDYm1}/gempak" "${HPCECMWF_m1}"
+    fi
+    if [[ ! -L "${HPCUKMET}" ]]; then
+        ${NLN} "${COMINukmet}/ukmet.${PDYm1}/gempak" "${HPCUKMET}"
+    fi
+
+    grid1="F-${MDL} | ${PDY:2}/${cyc}00"
+    grid2="${HPCECMWF_m1}/ecmwf_glob_${PDYm1}12"
+    grid3="F-UKMETHPC | ${PDYm1:2}/1200"
+    for fhr in $(seq -s ' ' 12 24 108); do
+        gfsfhr=F$(printf "%02g" "${fhr}")
+        ecmwffhr=F$(printf "%02g" $((fhr + 12)))
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF10
+        export pgm=gdplot2_nc;. prep_step
+        "${GEMEXE}/gdplot2_nc" << EOF10
 GDFILE  = ${grid1} !${grid2}
 GDATTIM = ${gfsfhr}!${ecmwffhr}
 DEVICE  = ${device}
@@ -280,8 +280,8 @@ TEXT    = 1/21//hw
 MAP     = 6/1/1/yes
 CLEAR   = yes
 CLRBAR  = 1
-PROJ    = mer//3;3;0;1 
-GAREA   = -25;-130;40;-15 
+PROJ    = mer//3;3;0;1
+GAREA   = -25;-130;40;-15
 LATLON  = 18//1/1/10
 
 GLEVEL  = 500
@@ -327,16 +327,15 @@ r
 
 ex
 EOF10
-export err=$?;err_chk
+        export err=$?;err_chk
 
     done
-    for gfsfhr in 00 12 24 36 48 60 84 108 132
-    do
-        ukmetfhr="F$(expr ${gfsfhr} + 12)"
-        gfsfhr=F${gfsfhr}
+    for fhr in 0 12 24 36 48 60 84 108 132; do
+        gfsfhr=F$(printf "%02g" "${fhr}")
+        ukmetfhr=F$(printf "%02g" $((fhr + 12)))
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF25
+        export pgm=gdplot2_nc;. prep_step
+        "${GEMEXE}/gdplot2_nc" << EOF25
 DEVICE  = ${device}
 PANEL   = 0
 TEXT    = 1/21//hw
@@ -390,7 +389,7 @@ r
 
 ex
 EOF25
-export err=$?;err_chk
+        export err=$?;err_chk
 
     done
 fi
@@ -400,20 +399,20 @@ fi
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    fi
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_hi.sh b/gempak/ush/gfs_meta_hi.sh
index 2b47474e12..c1e7f1bcf8 100755
--- a/gempak/ush/gfs_meta_hi.sh
+++ b/gempak/ush/gfs_meta_hi.sh
@@ -1,43 +1,37 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_hi.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# D.W.Plummer/NCEP   4/97   Changed SKIP for grid2
-# B. Gordon          4/00   Converted for production on the IBM-SP
-#                           and changed gdplot_nc -> gdplot2_nc
-# D. Michaud         4/16   Added logic to display different title
-#                           for parallel runs
-# B. Gordon          7/02   Converted to run off the GFS due to demise
-#                           of the MRF.
-# J. Carr           11/04   Added a ? to all title/TITLE lines. Changed contur parameter to a 2.
-#                           Changed the GDATTIM line to end at F240 every 6 hrs instead of out to
-#                           F384 every 12 hrs. This is to account for 06 and 18 UTC runs.
-# M. Klein           4/07   Fix bug in PW display.
-#
-set -xa
-mkdir -p -m 775 $DATA/mrfhi
-cd $DATA/mrfhi
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/mrfhi"
+cd "${DATA}/mrfhi" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 device="nc | mrfhi.meta"
 
-PDY2=$(echo $PDY | cut -c3-)
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
-if [ "$envir" = "prod" ] ; then
+if [[ "${envir}" = "prod" ]] ; then
    export m_title="GFS"
 else
    export m_title="GFSP"
 fi
 
 export pgm=gdplot2_nc;. prep_step
-startmsg
 
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-GFS | ${PDY2}/${cyc}00
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-GFS | ${PDY:2}/${cyc}00
 GDATTIM	= F000-F192-06; F214-F240-12
-DEVICE	= $device
+DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
 CONTUR	= 2
@@ -45,61 +39,61 @@ MAP	= 1
 CLEAR	= yes
 CLRBAR  = 1
 
-restore ${USHgempak}/restore/garea_hi.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_hi.nts
 
-restore ${USHgempak}/restore/pmsl_thkn.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title MSL PRESSURE, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
+TITLE	= 5/-2/~ ? ${m_title} MSL PRESSURE, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
 l
 ru
 
 
-restore ${USHgempak}/restore/850mb_hght_tmpc.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGHTS, TEMPERATURE AND WIND (KTS)|~@ HGHT, TMP, WIND!0!0!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGHTS, TEMPERATURE AND WIND (KTS)|~@ HGHT, TMP, WIND!0!0!0
 l
 ru
 
 
-restore ${USHgempak}/restore/700mb_hght_relh_omeg.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGHTS, REL HUMIDITY AND OMEGA|~@ HGHT, RH AND OMEGA!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGHTS, REL HUMIDITY AND OMEGA|~@ HGHT, RH AND OMEGA!0
 l
 ru
 
 
-restore ${USHgempak}/restore/500mb_hght_absv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HEIGHTS AND VORTICITY|~@ HGHT AND VORTICITY!0
+TITLE	= 5/-2/~ ? ${m_title} @ HEIGHTS AND VORTICITY|~@ HGHT AND VORTICITY!0
 l
 ru
 
 
-restore ${USHgempak}/restore/200mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/200mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
 l
 ru
 
 
-restore ${USHgempak}/restore/250mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
 l
 ru
 
 
-restore ${USHgempak}/restore/300mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/300mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0
 l
 ru
 
@@ -109,7 +103,7 @@ GVCORD  = sgma
 GDPFUN  = sm5s(relh)
 CINT    = 10
 LINE    = 21/1/2
-TITLE	= 5/-2/~ ? $m_title MEAN RELATIVE HUMIDITY|~1000-440 MB MEAN RH!0
+TITLE	= 5/-2/~ ? ${m_title} MEAN RELATIVE HUMIDITY|~1000-440 MB MEAN RH!0
 SCALE   = 0
 FINT    = 10;70;90
 FLINE   = 20;0;22;23
@@ -122,7 +116,7 @@ GVCORD  = none
 GDPFUN  = sm5s(quo(pwtr,25.4))     !sm5s(quo(pwtr,25.4))
 CINT    = 0.25/0.25/0.5            !0.25/0.75/6.0
 LINE    = 22///2                   !32//2/2
-TITLE	= 5/-2/~ ? $m_title PRECIPITABLE WATER (in)|~PRECIPITABLE WATER!0
+TITLE	= 5/-2/~ ? ${m_title} PRECIPITABLE WATER (in)|~PRECIPITABLE WATER!0
 SCALE   = 0
 SKIP    = 0
 FINT    = 0.5;1.0;1.5;2.0
@@ -137,7 +131,7 @@ GVCORD  = pres
 GDPFUN  = sm5s(tmpc)   !kntv(wnd)
 CINT    = 5
 LINE    = 2/1/3
-TITLE	= 5/-2/~ ? $m_title @ TEMPERATURE|~1000 MB TEMP!0
+TITLE	= 5/-2/~ ? ${m_title} @ TEMPERATURE|~1000 MB TEMP!0
 SCALE   = 0
 SKIP    = 0
 FINT    = 0;30
@@ -149,12 +143,12 @@ CLRBAR  = 1
 r
 
 
-restore ${USHgempak}/restore/precip.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
 GDATTIM = F12-F192-06; F214-F384-12
 GDPFUN  = (quo(p12m,25.4))
-TITLE   = 5/-2/~ ? $m_title 12-HOUR TOTAL PRECIPITATION (IN)|~12-HOURLY TOTAL PCPN
+TITLE   = 5/-2/~ ? ${m_title} 12-HOUR TOTAL PRECIPITATION (IN)|~12-HOURLY TOTAL PCPN
 hilo    = 31;0/x#2/.01-20//50;50/y!17/H#;L#/1020-1070;900-1012
 hlsym   = 1.5!1;1//22;22/2;2/hw
 l
@@ -162,13 +156,13 @@ ru
 
 GDATTIM = F24-F192-06; F214-F384-12
 GDPFUN  = (quo(p24m,25.4))
-TITLE   = 5/-2/~ ? $m_title 24-HOUR TOTAL PRECIPITATION (IN)|~24-HOURLY TOTAL PCPN
+TITLE   = 5/-2/~ ? ${m_title} 24-HOUR TOTAL PRECIPITATION (IN)|~24-HOURLY TOTAL PCPN
 l
 ru
 
 GDATTIM = F180
 GDPFUN  = (quo(p180m,25.4))
-TITLE   = 5/-2/~ ? $m_title 180-HOUR TOTAL PRECIPITATION (IN)|~180-HOURLY TOTAL PCPN
+TITLE   = 5/-2/~ ? ${m_title} 180-HOUR TOTAL PRECIPITATION (IN)|~180-HOURLY TOTAL PCPN
 l
 ru
 
@@ -181,20 +175,20 @@ export err=$?; err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l mrfhi.meta
-export err=$?;export pgm="GEMPAK CHECK FILE"; err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-  mv mrfhi.meta ${COMOUT}/gfs_${PDY}_${cyc}_hi
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_hi
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+if (( err != 0 )) || [[ ! -s mrfhi.meta ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file mrfhi.meta"
+    exit $(( err + 100 ))
+fi
+
+mv mrfhi.meta "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_hi"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+    "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_hi"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/gfs_${PDY}_${cyc}_hi
-      fi
-  fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_hi"
+    fi
 fi
 
 #
diff --git a/gempak/ush/gfs_meta_hur.sh b/gempak/ush/gfs_meta_hur.sh
index aed25d6d78..156a32b5b8 100755
--- a/gempak/ush/gfs_meta_hur.sh
+++ b/gempak/ush/gfs_meta_hur.sh
@@ -1,87 +1,56 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_hur_new
 #
-# Log :
-# D.W. Plummer/NCEP   2/97   Add log header
-# J.W. Carr/HPC    4/15/97   changed the skip parameter
-# J.W. Carr/HPC    4/06/98   Converted from gdplot to gdplot2
-# J.L. Partain/MPC 5/25/98   Chg VOR to AVOR @ 500mb,chg 200 to 250mb to match ETA,NGM
-# J.W. Carr/HPC    8/05/98   Changed map to medium resolution
-# J.W. Carr/HPC    2/02/99   Changed skip to 0
-# J.W. Carr/HPC    4/12/99   Added 84-hr time step.
-# J. Carr/HPC         6/99   Added a filter to map.
-# J. Carr/HPC       2/2001   Edited to run on the IBM.
-# J. Carr/HPC       5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC       6/2001   Incorporated the crb metafile into this one.
-# J. Carr/HPC       6/2001   Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC       7/2001   Submitted.
-# J. Carr/PMB      11/2004   Added a ? to all title lines. Changed contur to a 2 from a 1.
-#
 # Set up Local Variables
 #
-set -x
-#
-export PS4='hur:$SECONDS + '
-mkdir  -p -m 775 $DATA/hur
-cd $DATA/hur
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir  -p -m 775 "${DATA}/hur"
+cd "${DATA}/hur" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 mdl=gfs
 MDL=GFS
 metatype="hur"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo ${PDY} | cut -c3-)
+
 #
-# DEFINE YESTERDAY
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDY2m1=$(echo ${PDYm1} | cut -c 3-)
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-if [ ${cyc} -eq 00 ] ; then
-    gdat="F000-F126-06"
-    gdatpcpn06="F006-F126-06"
-    gdatpcpn12="F012-F126-06"
-    gdatpcpn24="F024-F126-06"
-    gdatpcpn48="F048-F126-06"
-    gdatpcpn60="F060-F126-06"
-    gdatpcpn72="F072-F126-06"
-    gdatpcpn84="F084-F126-06"
-    gdatpcpn96="F096-F126-06"
-    gdatpcpn120="F120-F126-06"
-    gdatpcpn126="F126"
-    run="r"
-elif [ ${cyc} -eq 12 ] ; then
-    gdat="F000-F126-06"
-    gdatpcpn06="F006-F126-06"
-    gdatpcpn12="F012-F126-06"
-    gdatpcpn24="F024-F126-06"
-    gdatpcpn48="F048-F126-06"
-    gdatpcpn60="F060-F126-06"
-    gdatpcpn72="F072-F126-06"
-    gdatpcpn84="F084-F126-06"
-    gdatpcpn96="F096-F126-06"
-    gdatpcpn120="F120-F126-06"
-    gdatpcpn126="F126"
-    run="r"
-else
-    gdat="F000-F126-06"
-    gdatpcpn06="F006-F126-06"
-    gdatpcpn12="F012-F126-06"
-    gdatpcpn24="F024-F126-06"
-    gdatpcpn48="F048-F126-06"
-    gdatpcpn60="F060-F126-06"
-    gdatpcpn72="F072-F126-06"
-    gdatpcpn84="F084-F126-06"
-    gdatpcpn96="F096-F126-06"
-    gdatpcpn120="F120-F126-06"
-    gdatpcpn126="F126"
-    run="r"
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
 fi
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-gdfile  = F-${MDL} | ${PDY2}/${cyc}00
+#
+# DEFINE YESTERDAY
+PDYm1=$(date --utc +%Y%m%d -d "${PDY} 00 - 24 hours")
+#
+case ${cyc} in
+    00)
+        gdat="F000-F126-06"
+        gdatpcpn12="F012-F126-06"
+        gdatpcpn24="F024-F126-06"
+        ;;
+    12)
+        gdat="F000-F126-06"
+        gdatpcpn12="F012-F126-06"
+        gdatpcpn24="F024-F126-06"
+        ;;
+    *)
+        gdat="F000-F126-06"
+        gdatpcpn12="F012-F126-06"
+        gdatpcpn24="F024-F126-06"
+        ;;
+esac
+
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOF
+gdfile  = F-${MDL} | ${PDY:2}/${cyc}00
 gdattim = ${gdat}
 GAREA   = -6;-111;52;-14
 PROJ    = MER/0.0;-49.5;0.0
@@ -89,7 +58,7 @@ MAP     = 1/1/1/yes
 LATLON  = 1//1/1/10
 CONTUR	= 2
 clear   = y
-device  = ${device} 
+device  = ${device}
 TEXT	= 1/22/2/hw
 PANEL	= 0
 filter  = yes
@@ -205,10 +174,10 @@ GDPFUN	= vor(wnd)           !vor(wnd) !sm5s(pmsl)!kntv(wnd)
 TYPE	= c/f                !c        !c         !b
 CINT	= 2/-99/-2           !2/2/99   !2//1008
 LINE	= 29/5/1/2           !7/5/1/2  !6/1/1
-HILO	= 2;6/X;N/-99--4;4-99!         !6/L#/880-1004///1         
+HILO	= 2;6/X;N/-99--4;4-99!         !6/L#/880-1004///1
 HLSYM	= 1;1//22;22/3;3/hw
 SCALE	= 5                  !5        !0
-WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112 
+WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112
 TITLE	= 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~@ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE	= 0;14-21
@@ -222,7 +191,7 @@ LINE	= 29/5/1/2           !7/5/1/2  !6//1
 HILO	= 2;6/X;N/-99--4;4-99!         !6/L#/880-1004///1
 HLSYM	= 1;1//22;22/3;3/hw
 SCALE	= 5                  !5        !0
-WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112 
+WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112
 TITLE	= 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~@ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE	= 0;14-21
@@ -238,12 +207,12 @@ LINE	= 29/5/1/2           !7/5/1/2   !20/1/2/1
 HILO	= 2;6/X;N/-99--4;4-99!          !
 HLSYM	= 1;1//22;22/3;3/hw  !
 SCALE	= 5                  !5         !-1
-WIND    = bk0                !bk0       !bk0       !bk9/.8/1.4/112 
+WIND    = bk0                !bk0       !bk0       !bk9/.8/1.4/112
 TITLE	= 1/-2/~ ? ${MDL} @ WIND AND ABS VORT|~@ WIND AND ABS VORT!0
 FINT    = 16;20;24;28;32;36;40;44
 FLINE	= 0;23-15
 TYPE	= c/f                !c         !c         !b
-r 
+r
 
 GLEVEL  = 250
 GVCORD  = PRES
@@ -260,14 +229,14 @@ CINT    = 5/20
 LINE    = 26//1
 FINT    = 5/25
 FLINE   = 0;24;30;29;23;22;14;15;16;17;20;5
-HILO    = 
+HILO    =
 HLSYM   =
 CLRBAR  = 1
 WIND    = ak0!ak7/.1/1/221/.2!ak6/.1/1/221/.2
-REFVEC  = 
+REFVEC  =
 TITLE   = 1/-2/~ ? ${MDL} @  WIND SHEAR (850=Purple, 300=Cyan) |~850-300MB WIND SHEAR!0
 filter  = no
-r 
+r
 
 glevel   = 250
 gvcord   = pres
@@ -295,13 +264,13 @@ type    = b                                                       !c
 cint    = 0!4
 line    = 0!20//3
 SKIP    = 0/2;2
-fint    = 
-fline   = 
+fint    =
+fline   =
 hilo    = 0!26;2/H#;L#/1020-1070;900-1012//30;30/y
 hlsym   = 0!2;1.5//21//hw
 clrbar  = 0
 wind    = bk10/0.9/1.4/112!bk0
-refvec  = 
+refvec  =
 title   = 1/-2/~ ? ${MDL} 850-400mb MLW and MSLP|~850-400mb MLW & MSLP!0
 r
 
@@ -333,22 +302,28 @@ exit
 EOF
 export err=$?;err_chk
 
-if [ ${cyc} -eq 00 ] ; then
-    # BV export MODEL=/com/nawips/prod
-    # JY export HPCECMWF=${MODEL}/ecmwf.${PDY}
-    # JY export HPCUKMET=${MODEL}/ukmet.${PDY}
-    export HPCECMWF=${COMINecmwf}.${PDY}/gempak
-    export HPCUKMET=${COMINukmet}.${PDY}/gempak
-    grid1="F-${MDL} | ${PDY2}/${cyc}00"
-    grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12"
-    grid3="F-UKMETHPC | ${PDY2}/${cyc}00"
-    for gfsfhr in 12 36 60 84 108
-    do
-        ecmwffhr="F$(expr ${gfsfhr} + 12)"
-        gfsfhr="F${gfsfhr}"
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+if [[ ${cyc} -eq 00 ]] ; then
+    export HPCECMWF=ecmwf.${PDY}
+    HPCECMWF_m1=ecmwf.${PDY}
+    export HPCUKMET=ukmet.${PDYm1}
+    if [[ ! -L "${HPCECMWF}" ]]; then
+        ${NLN} "${COMINecmwf}ecmwf.${PDY}/gempak" "${HPCECMWF}"
+    fi
+    if [[ ! -L "${HPCECMWF_m1}" ]]; then
+        Ln -sf "${COMINecmwf}ecmwf.${PDYm1}/gempak" "${HPCECMWF_m1}"
+    fi
+    if [[ ! -L "${HPCUKMET}" ]]; then
+        ${NLN} "${COMINukmet}/ukmet.${PDYm1}/gempak" "${HPCUKMET}"
+    fi
+    grid1="F-${MDL} | ${PDY:2}/${cyc}00"
+    grid2="${HPCECMWF_m1}/ecmwf_glob_${PDYm1}12"
+    grid3="F-UKMETHPC | ${PDY:2}/${cyc}00"
+    for fhr in $(seq -s ' ' 12 24 108); do
+        gfsfhr=F$(printf "%02g" "${fhr}")
+        ecmwffhr=F$(printf "%02g" $((fhr + 12)))
+
+        export pgm=gdplot2_nc;. prep_step
+        "${GEMEXE}/gdplot2_nc" << EOF
 GDFILE  = ${grid1} !${grid2}
 GDATTIM = ${gfsfhr}!${ecmwffhr}
 DEVICE  = ${device}
@@ -405,16 +380,15 @@ r
 
 ex
 EOF
-export err=$?;err_chk
+        export err=$?;err_chk
 
     done
-    for gfsfhr in 12 24 36 48 60 72 96 120
-    do
-        ukmetfhr=F${gfsfhr}
-        gfsfhr=F${gfsfhr}
+    for gfsfhr in 12 24 36 48 60 72 96 120; do
+        gfsfhr=F$(printf "%02g" "${fhr}")
+        ukmetfhr=F$(printf "%02g" $((fhr)))
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+        export pgm=gdplot2_nc;. prep_step
+        "${GEMEXE}/gdplot2_nc" << EOF
 DEVICE  = ${device}
 PANEL   = 0
 TEXT    = 1/21//hw
@@ -468,7 +442,7 @@ r
 
 ex
 EOF
-export err=$?;err_chk
+        export err=$?;err_chk
 
     done
 fi
@@ -477,19 +451,19 @@ fi
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    fi
 fi
 exit
diff --git a/gempak/ush/gfs_meta_mar_atl.sh b/gempak/ush/gfs_meta_mar_atl.sh
index c8db3e59d4..fdc7c027ba 100755
--- a/gempak/ush/gfs_meta_mar_atl.sh
+++ b/gempak/ush/gfs_meta_mar_atl.sh
@@ -1,31 +1,36 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_mar_atl.sh
 #
-# Log :
-# J. Carr/PMB    12/08/2004    Pushed into production.
-#
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/MAR_ATL"
+cd "${DATA}/MAR_ATL" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='MAR_ATL:$SECONDS + '
-mkdir -p -m 775 $DATA/MAR_ATL
-cd $DATA/MAR_ATL
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_atl"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
+"${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL=mepowo.gsf+mehsuo.ncp+mereuo.ncp+mefbao.ncp
-gdfile     = F-${MDL} | ${PDY2}/${cyc}00
+gdfile     = F-${MDL} | ${PDY:2}/${cyc}00
 gdattim    = f00-f180-6
 GAREA      = 16;-100;71;5
 PROJ       = mer//3;3;0;1
@@ -34,7 +39,7 @@ LATLON	   = 18/2///10
 CONTUR	   = 0
 clear      = y
 
-device     = $device 
+device     = ${device}
 
 GLEVEL	= 850:1000                  !0
 GVCORD	= pres                      !none
@@ -69,8 +74,8 @@ fline      = 29;30;24;0 !
 hilo       = 0!0!0!20/H#;L#/1020-1070;900-1012
 hlsym      = 0!0!0!1;1//22;22/3;3/hw
 clrbar     = 1/V/LL!0
-wind       = bk9/0.8/1/112! 
-refvec     = 
+wind       = bk9/0.8/1/112!
+refvec     =
 title      = 1/-2/~ ? |~ PMSL, BL TEMP, WIND!1//${MDL} MSL PRES,BL TEMP,WIND (KTS)!0
 text       = 1.2/22/2/hw
 clear      = y
@@ -113,9 +118,9 @@ LINE    = 7/5/1/2            !20/1/2/1
 FINT    = 15;21;27;33;39;45;51;57
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!          !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
-WIND    =  
+WIND    =
 REFVEC  =
 TITLE   = 5//~ ? ${MDL} @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
 TEXT    = 1/21//hw
@@ -146,22 +151,22 @@ CLEAR	= yes
 li
 r
 
-glevel     = 300!300!300 
-gvcord     = pres!pres!pres 
+glevel     = 300!300!300
+gvcord     = pres!pres!pres
 panel      = 0
 skip       = 1!1!1/3/3!1
 scale      = 0!0!5!5!-1
 GDPFUN      = mag(kntv(wnd))//jet!jet!div(wnd)//dvg!dvg!sm5s(hght)
 TYPE      = c!c/f!c/f!c!c
 cint       = 30;50!70;90;110;130;150;170;190!-11;-9;-7;-5;-3!2/3/18!12/720
-line       = 26!32//2!19/-2//2!20!1//2 
-fint       = !70;90;110;130;150;170;190!3;5;7;9;11;13!  
+line       = 26!32//2!19/-2//2!20!1//2
+fint       = !70;90;110;130;150;170;190!3;5;7;9;11;13!
 fline      = !0;24;25;29;7;15;14;2!0;23;22;21;17;16;2!
 hilo       = 0!0!0!0!1/H;L/3
 hlsym      = 0!0!0!0!1.5;1.5//22;22/2;2/hw
 clrbar     = 0!0!1/V/LL!0
-wind       = !!am16/0.3//211/0.4! 
-refvec     = 10 
+wind       = !!am16/0.3//211/0.4!
+refvec     = 10
 title      = 1/-2/~ ?|~ @ SPEED & DIVERG!1//${MDL} @ HGHTS, ISOTACHS, & DIVERGENCE!0
 text       = 1.2/22/2/hw
 clear      = y
@@ -262,14 +267,15 @@ export err=$?;err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_atl
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_atl
-   fi
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_atl"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+   "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+      "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_atl"
 fi
 
 
diff --git a/gempak/ush/gfs_meta_mar_comp.sh b/gempak/ush/gfs_meta_mar_comp.sh
index a55fa3c642..d25fc0dc9a 100755
--- a/gempak/ush/gfs_meta_mar_comp.sh
+++ b/gempak/ush/gfs_meta_mar_comp.sh
@@ -1,158 +1,162 @@
-#! /bin/sh
-# Metafile Script : gfs_meta_mar_comp.sh
+#! /usr/bin/env bash
 #
-# This is a script which creates a metafile that runs a comparison of 500 MB 
-# heights and PMSL between the older GFS model run and the newer one. The 
-# metafile also generates a comparison between the UKMET older run and the newer
-# GFS model run.
+# Metafile Script : gfs_meta_mar_comp.sh
 #
-# Log :
-# J. Carr/PMB    12/07/2004    Pushed into production.
-
 # Set up Local Variables
 #
-set -x
-#
-export PS4='MAR_COMP_F${fend}:$SECONDS + '
-rm -Rf $DATA/GEMPAK_META_MAR
-mkdir -p -m 775 $DATA/GEMPAK_META_MAR $DATA/MAR_COMP
 
-cd $DATA/MAR_COMP
-cp $FIXgempak/datatype.tbl datatype.tbl
+source "${HOMEgfs}/ush/preamble.sh"
+
+rm -Rf "${DATA}/GEMPAK_META_MAR"
+mkdir -p -m 775 "${DATA}/GEMPAK_META_MAR" "${DATA}/MAR_COMP"
+
+cd "${DATA}/MAR_COMP" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
-export COMPONENT=${COMPONENT:-atmos}
+export COMIN="gfs.multi"
+mkdir -p "${COMIN}"
+for cycle in $(seq -f "%02g" -s ' ' 0 "${STEP_GFS}" "${cyc}"); do
+    YMD=${PDY} HH=${cycle} GRID="1p00" declare_from_tmpl gempak_dir:COM_ATMOS_GEMPAK_TMPL
+    for file_in in "${gempak_dir}/gfs_1p00_${PDY}${cycle}f"*; do
+        file_out="${COMIN}/$(basename "${file_in}")"
+        if [[ ! -L "${file_out}" ]]; then
+            ${NLN} "${file_in}" "${file_out}"
+        fi
+    done
+done
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export HPCNAM="nam.${PDY}"
+if [[ ! -L ${HPCNAM} ]]; then
+    ${NLN} "${COMINnam}/nam.${PDY}/gempak" "${HPCNAM}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_comp"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-#
-# BV export MODEL=/com/nawips/prod
-#XXW export HPCGFS=${MODEL}/${mdl}.$PDY
-# BV export HPCGFS=${COMROOT}/nawips/${envir}/${mdl}.$PDY
-export HPCGFS=${COMINgempak}/${mdl}.${PDY}/${cyc}/${COMPONENT}/gempak
-export COMIN00=${COMINgempak}/${mdl}.${PDY}/00/${COMPONENT}/gempak
-export COMIN06=${COMINgempak}/${mdl}.${PDY}/06/${COMPONENT}/gempak
-export COMIN12=${COMINgempak}/${mdl}.${PDY}/12/${COMPONENT}/gempak
-export COMIN18=${COMINgempak}/${mdl}.${PDY}/18/${COMPONENT}/gempak
-if [ ${cyc} -eq 00 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_MAR
-elif [ ${cyc} -eq 06 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_MAR
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_MAR
-elif [ ${cyc} -eq 12 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_MAR
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_MAR
-   cp $COMIN12/gfs_${PDY}12f* $DATA/GEMPAK_META_MAR
-elif [ ${cyc} -eq 18 ] ; then
-   cp $COMIN00/gfs_${PDY}00f* $DATA/GEMPAK_META_MAR
-   cp $COMIN06/gfs_${PDY}06f* $DATA/GEMPAK_META_MAR
-   cp $COMIN12/gfs_${PDY}12f* $DATA/GEMPAK_META_MAR
-   cp $COMIN18/gfs_${PDY}18f* $DATA/GEMPAK_META_MAR
-fi
-export COMIN=$DATA/GEMPAK_META_MAR
-
-# export HPCNAM=${COMINnam}.$PDY
-export HPCNAM=${COMINnam}.$PDY/gempak
 
-# export HPCNGM=${MODEL}/ngm.$PDY
-#
-# DEFINE YESTERDAY
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDY2m1=$(echo $PDYm1 | cut -c 3-)
-#
-# DEFINE 2 DAYS AGO
-PDYm2=$($NDATE -48 ${PDY}${cyc} | cut -c -8)
-PDY2m2=$(echo $PDYm2 | cut -c 3-)
-#
-# DEFINE 3 DAYS AGO
-PDYm3=$($NDATE -72 ${PDY}${cyc} | cut -c -8)
-PDY2m3=$(echo $PDYm3 | cut -c 3-)
-#
-# THE 1200 UTC CYCLE
-#
-if [ ${cyc} -eq 12 ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in NAtl NPac 
-    do
-        if [ ${gareas} = "NAtl" ] ; then
+grid="F-${MDL} | ${PDY:2}/${cyc}00"
+for garea in NAtl NPac; do
+    case ${garea} in
+        NAtl)
             garea="natl"
             proj=" "
             latlon="18/2/1/1/10"
-        elif [ ${gareas} = "NPac" ] ; then
+            ;;
+        NPac)
             garea="mpac"
             proj=" "
             latlon="18/2/1/1/10"
-        fi  
-        for runtime in 06 00
-        do
-            if [ ${runtime} = "06" ] ; then
-                cyc2="06"
-                grid2="F-${MDL} | ${PDY2}/0600"
-                add="06"
-                testgfsfhr="78"
-            elif [ ${runtime} = "00" ] ; then
-                cyc2="00"
-                grid2="F-${MDL} | ${PDY2}/0000"
-                add="12"
-                testgfsfhr="114"
+            ;;
+        *)
+            echo "FATAL ERROR: Unknown domain"
+            exit 100
+    esac
+
+    offsets=(6 12)
+    for offset in "${offsets[@]}"; do
+        init_time=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${offset} hours")
+        init_PDY=${init_time:0:8}
+        init_cyc=${init_time:8:2}
+
+        if (( init_time <= SDATE )); then
+            echo "Skipping generation for ${init_time} because it is before the experiment began"
+            if (( offset == "${offsets[0]}" )); then
+                echo "First forecast time, no metafile produced"
+                exit 0
             fi
+            continue
+        fi
+
+        # Create symlink in DATA to sidestep gempak path limits
+        HPCGFS="${RUN}.${init_time}"
+        if [[ ! -L ${HPCGFS} ]]; then
+            YMD="${init_PDY}" HH="${init_cyc}" GRID="1p00" declare_from_tmpl source_dir:COM_ATMOS_GEMPAK_TMPL
+            ${NLN} "${source_dir}" "${HPCGFS}"
+        fi
+
+        case ${cyc} in
+            00 | 12)
+                contours=1
+                type_param="CTYPE"
+                ex=""
+                ;;
+            06 | 18)
+                contours=2
+                type_param="TYPE"
+                ex="ex"
+                ;;
+            *)
+                echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}"
+        esac
+
+        case ${cyc}_${init_cyc} in
+            00_*)   testgfsfhr=114;;
+            06_00)  testgfsfhr=84;;
+            06_18)  testgfsfhr=72;;
+            12_00)  testgfsfhr=114;;
+            12_06)  testgfsfhr=78;;
+            18_06)  testgfsfhr=72;;
+            18_12)  testgfsfhr=84;;
+            *)
+                echo "FATAL ERROR: Undefined pairing of cycles"
+                exit 200
+                ;;
+        esac
+
+        for fhr in $(seq -s ' ' 0 6 126); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            gfsoldfhr=F$(printf "%02g" $((fhr + offset)))
+            grid2="F-GFSHPC | ${init_time:2}/${init_cyc}00"
             gdpfun1="sm5s(hght)!sm5s(hght)"
             gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
             line="5/1/3/2/2!6/1/3/2/2"
             hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
             hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr=F${gfsfhr}
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${garea} ${cyc}Z vs ${init_cyc}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${init_cyc}Z CYAN)"
+            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${garea} ${cyc}Z vs ${init_cyc}Z PMSL!6/-3/~ ? ${MDL} PMSL (${init_cyc}Z CYAN)"
+            if (( fhr > testgfsfhr )); then
+                grid2=" "
+                gfsoldfhr=" "
+                gdpfun1="sm5s(hght)"
+                gdpfun2="sm5s(pmsl)"
+                line="5/1/3/2/2"
+                hilo1="5/H#;L#//5/5;5/y"
+                hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y"
+                title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${garea} ${cyc}Z vs ${init_cyc}Z 500 HGT"
+                title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${garea} ${cyc}Z vs ${init_cyc}Z PMSL"
+            fi
+
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
 GAREA   = ${garea}
-PROJ    = ${proj} 
+PROJ    = ${proj}
 LATLON  = ${latlon}
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-FINT    = 
-FLINE   = 
-REFVEC  =                                                                         
-WIND    = 0 
+CONTUR  = ${contours}
+CLRBAR  =
+FINT    =
+FLINE   =
+REFVEC  =
+WIND    = 0
 
 GDFILE  = ${grid}  !${grid2}
 GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500                                                                    
+GLEVEL  = 500
 GVCORD  = PRES
 GDPFUN  = ${gdpfun1}
 LINE    = ${line}
 SCALE   = -1
-CTYPE   = c
+${type_param}   = c
 CINT    = 6
 HLSYM   = 1.2;1.2//21//hw
 TEXT    = 1/21//hw
@@ -175,100 +179,59 @@ HILO    = ${hilo2}
 TITLE   = ${title2}
 run
 
+${ex}
 EOF
-export err=$?;err_chk
-            done
+            export err=$?;err_chk
         done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        export HPCUKMET=${COMINukmet}.${PDY}/gempak
-        grid2="F-UKMETHPC | ${PDY2}/0000"
-        # for gfsfhr in 00 12 24 36 48 60 84 108
-        for gfsfhr in 00 12 24 84 108
-        do
-            ukmetfhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
-	    
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-CTYPE   = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    = 0              
-REFVEC  =                                                                         
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} 12Z vs UKM 00Z 500 HGT!0
-l
-run
+    done
 
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${ukmetfhr}
-GDPFUN  = sm5s(hght)
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? UKMET @ HGT (00Z CYAN)!0
-l
-ru
+    if (( 10#${cyc} % 12 ==0 )); then
+
+        #
+        # There are some differences between 00z and 12z
+        # The YEST string makes sense (but is inconsistently used)
+        # The others I'm not sure why they differ. - WCK
+        #
+        case ${cyc} in
+            00)
+                type_param="TYPE"
+                hlsym="1.2;1.2//21//hw"
+                wind=""
+                yest=" YEST"
+                run_cmd="run"
+                extra_cmd="\nHLSYM   = 1.2;1.2//21//hw\nTEXT    = s/21//hw"
+                ;;
+            12)
+                type_param="CTYPE"
+                hlsym="1;1//21//hw"
+                wind="0"
+                yest=""
+                run_cmd="ru"
+                extra_cmd=""
+                ;;
+            *)
+                echo "FATAL ERROR: Invalid cycle {cyc} in ${BASH_SOURCE[0]}"
+                exit 100
+                ;;
+        esac
+
+        # COMPARE THE GFS MODEL TO THE UKMET MODEL 12-HOURS PRIOR
+        ukmet_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - 12 hours")
+        ukmet_PDY=${ukmet_date:0:8}
+        ukmet_cyc=${ukmet_date:8:2}
+
+        export HPCUKMET="ukmet.${ukmet_PDY}"
+        if [[ ! -L "${HPCUKMET}" ]]; then
+            ${NLN} "${COMINukmet}/ukmet.${ukmet_PDY}/gempak" "${HPCUKMET}"
+        fi
+        grid2="F-UKMETHPC | ${ukmet_PDY:2}/${ukmet_date}"
 
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 5/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} 12Z vs UKM 00Z PMSL!0
-l
-ru
+        for fhr in 00 12 24 84 108; do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            ukmetfhr=F$(printf "%02g" $((fhr + 12)))
 
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${ukmetfhr}
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? UKMET PMSL (00Z CYAN)!0
-l
-ru
-
-EOF
-export err=$?;err_chk
-        done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 1200 UTC ECMWF FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2=${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12 
-        for gfsfhr in 00 24 48 72 96 120
-        do
-            ecmwffhr=F$(expr ${gfsfhr} + 24)
-	    gfsfhr=F${gfsfhr}
-		
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
@@ -278,271 +241,85 @@ LATLON  = ${latlon}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
 GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-CTYPE   = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    = 0              
-REFVEC  =                                                                         
+LINE    = 5/1/3/2
+SCALE   = -1
+${type_param}   = c
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
+TEXT    = s/21//hw
+WIND    = 0
+REFVEC  =
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} 12Z vs ECM yest 12Z 500 HGT!0
+TITLE   = 5/-1/~ ? ${MDL} @ HGT (${cyc}Z YELLOW)|~${garea} ${cyc}Z vs UKM ${ukmet_cyc}Z 500 HGT!0
 l
 run
 
 CLEAR   = no
 GDFILE  = ${grid2}
-GDATTIM = ${ecmwffhr}
+GDATTIM = ${ukmetfhr}
 GDPFUN  = sm5s(hght)
 LINE    = 6/1/3/2
 HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? ECMWF @ HGT (12Z YEST CYAN)!0
+TITLE   = 6/-2/~ ? UKMET @ HGT (${ukmet_cyc}Z${yest} CYAN)!0
 l
-run
+${run_cmd}
 
 CLEAR   = yes
 GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
+CINT    = 4${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} 12Z vs ECM yest 12Z PMSL!0
+TITLE   = 5/-1/~ ? ${MDL} PMSL (${cyc}Z YELLOW)|~${garea} ${cyc}Z vs UKM ${ukmet_cyc}Z PMSL!0
 l
-run
+${run_cmd}
 
 CLEAR   = no
 GDFILE  = ${grid2}
 GDPFUN  = sm5s(pmsl)
-GDATTIM = ${ecmwffhr}
+GDATTIM = ${ukmetfhr}
 LINE    = 6/1/3/2
 HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? ECMWF PMSL (12Z YEST CYAN)!0
+TITLE   = 6/-2/~ ? UKMET PMSL (${ukmet_cyc}Z CYAN)!0
 l
-run
+${run_cmd}
 
 EOF
-export err=$?;err_chk
+            export err=$?;err_chk
         done
-        # COMPARE THE 1200 UTC GFS MODEL TO THE 1200 UTC NAM AND NGM
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2="F-NAMHPC | ${PDY2}/${cyc}00"
-        # grid2ngm="F-NGMHPC | ${PDY2}/${cyc}00"
-        for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-        do
-            namfhr=F${gfsfhr}
-        #   ngmfhr=F${gfsfhr}
-            gfsfhr=F${gfsfhr}
-		
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon} 
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1;1//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
-HILO    = 3/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${gareas} ${MDL}/NAM/NGM 500 HGT!0
-l
-run
 
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${namfhr}
-GDPFUN  = sm5s(hght)
-LINE    = 6/1/3/2
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? NAM @ HGT (12Z CYAN)!0
-l
-run
+        # COMPARE THE GFS MODEL TO THE 12 UTC ECMWF FROM YESTERDAY
+        offset=$(( (10#${cyc}+12)%24 + 12 ))
+        ecmwf_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${offset} hours")
+        ecmwf_PDY=${ecmwf_date:0:8}
+        # ecmwf_cyc=${ecmwf_date:8:2}
 
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 5/1/3/2
-HILO    = 3/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${gareas} ${MDL}/NAM/NGM PMSL!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${namfhr}
-LINE    = 6/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? NAM PMSL (12Z CYAN)!0
-l
-run
-
-EOF
-export err=$?;err_chk
-        done
-    done
-fi
 
-if [ ${cyc} = "00" ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in NAtl NPac
-    do
-        if [ ${gareas} = "NAtl" ] ; then
-            garea="natl" 
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = "NPac" ] ; then
-            garea="mpac"
-            proj=" "
-            latlon="18/2/1/1/10"
+        HPCECMWF=ecmwf.${PDY}
+        if [[ ! -L "${HPCECMWF}" ]]; then
+            ${NLN} "${COMINecmwf}/ecmwf.${ecmwf_PDY}/gempak" "${HPCECMWF}"
         fi
-        for runtime in 18 12
-        do
-            if [ ${runtime} = "18" ] ; then
-                cyc2="18"
-		#XXW export HPCGFS=${MODEL}/${mdl}.${PDYm1}
-		# BV export HPCGFS=$COMROOT/nawips/${envir}/${mdl}.${PDYm1}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1800"
-                add="06"
-                testgfsfhr="114"
-            elif [ ${runtime} = "12" ] ; then
-                cyc2="12"
-		#XXW export HPCGFS=${MODEL}/${mdl}.${PDYm1}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-
-                grid2="F-GFSHPC | ${PDY2m1}/1200"
-                add="12"
-                testgfsfhr="114"
-            fi
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 126
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr=F${gfsfhr}
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj} 
-LATLON  = ${latlon}
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-FINT    = 
-FLINE   = 
-REFVEC  =                                                                         
-WIND    = 0 
+        grid2="${HPCECMWF}/ecmwf_glob_${ecmwf_date}"
 
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500                                                                    
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
-SCALE   = -1
-CTYPE   = c
-CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-EOF
-export err=$?;err_chk
+        for fhr in $(seq -s ' ' $(( offset%24 )) 24 120 ); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            ecmwffhr=F$(printf "%02g" $((fhr + 24)))
 
-            done
-        done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-	export HPCUKMET=${COMINukmet}.${PDYm1}/gempak
-        grid2="F-UKMETHPC | ${PDY2m1}/1200"
-        # for gfsfhr in 00 12 24 36 48 60 84 108
-        for gfsfhr in 00 12 24 84 108
-        do 
-            ukmetfhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
-export pgm=gdplot2_nc;. prep_step; startmsg    
-$GEMEXE/gdplot2_nc << EOF
+            export pgm=gdplot2_nc;. prep_step
+            "${GEMEXE}/gdplot2_nc" << EOF
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
@@ -551,103 +328,26 @@ PROJ    = ${proj}
 LATLON  = ${latlon}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
-SKIP    = 0            
-PANEL   = 0
-CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
-GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
-HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HEIGHTS (00Z YELLOW)|~${gareas} 00Z vs UKM 12Z 500 HGT!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDATTIM = ${ukmetfhr}
-GDPFUN  = sm5s(hght)
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? UKMET @ HEIGHTS (12Z YEST CYAN)!0
-l
-run
 
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-LINE    = 5/1/3/2
-HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} 00Z vs UKM 12Z PMSL!0
-l
-run
-
-CLEAR   = no
-GDFILE  = ${grid2}
-GDPFUN  = sm5s(pmsl)
-GDATTIM = ${ukmetfhr}
-LINE    = 6/1/3/2
-HILO    = 6/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? UKMET PMSL (12Z YEST CYAN)!0
-l
-run
-
-EOF
-export err=$?;err_chk
-        done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC ECMWF FROM YESTERDAY
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12"
-        for gfsfhr in 12 36 60 84 108
-        do
-            ecmwffhr=F$(expr ${gfsfhr} + 12)
-            gfsfhr=F${gfsfhr}
-	    
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-GDFILE  = ${grid}
-GDATTIM = ${gfsfhr}
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
 GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
+LINE    = 5/1/3/2
+SCALE   = -1
+${type_param}   = c
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
+TEXT    = s/21//hw
+WIND    = ${wind}
+REFVEC  =
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (00Z YELLOW)|~${gareas} 00Z vs ECM 12Z 500 HGT!0
+TITLE   = 5/-1/~ ? ${MDL} @ HGT (12Z YELLOW)|~${garea} 12Z vs ECM yest 12Z 500 HGT!0
 l
 run
 
@@ -666,14 +366,12 @@ GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
+CINT    = 4${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 5/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} 00Z vs ECM 12Z PMSL!0
+TITLE   = 5/-1/~ ? ${MDL} PMSL (12Z YELLOW)|~${garea} 12Z vs ECM yest 12Z PMSL!0
 l
 run
 
@@ -688,18 +386,16 @@ l
 run
 
 EOF
-export err=$?;err_chk
+            export err=$?;err_chk
         done
-        # COMPARE THE 0000 UTC GFS MODEL TO THE 0000 UTC NAM AND NGM
-        grid="F-${MDL} | ${PDY2}/${cyc}00"
-        grid2="F-NAMHPC | ${PDY2}/${cyc}00"
-        for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-        do
-            namfhr=F${gfsfhr}
-            gfsfhr=F${gfsfhr}
-		
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
+
+        # COMPARE THE GFS MODEL TO THE NAM and NGM
+        grid2="F-NAMHPC | ${PDY:2}/${cyc}00"
+        for fhr in $(seq -s ' ' 0 6 84); do
+            gfsfhr=F$(printf "%02g" "${fhr}")
+            namfhr=F$(printf "%02g" "${fhr}")
+
+            "${GEMEXE}/gdplot2_nc" << EOF
 DEVICE  = ${device}
 MAP     = 1/1/1/yes
 CLEAR   = yes
@@ -709,25 +405,25 @@ LATLON  = ${latlon}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 
-SKIP    = 0            
+SKIP    = 0
 PANEL   = 0
 CONTUR  = 2
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES 
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
 GDPFUN  = sm5s(hght)
-LINE    = 5/1/3/2         
-SCALE   = -1           
-TYPE    = c            
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   = 1.2;1.2//21//hw 
-TEXT    = s/21//hw                                                                       
-WIND    =               
-REFVEC  =                                                                         
+LINE    = 5/1/3/2
+SCALE   = -1
+TYPE    = c
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   = ${hlsym}
+TEXT    = s/21//hw
+WIND    =
+REFVEC  =
 HILO    = 3/H#;L#//5/5;5/y
-TITLE   = 5/-1/~ ? ${MDL} @ HGT (00Z YELLOW)|~${gareas} ${MDL}/NAM/NGM 500 HGT!0
+TITLE   = 5/-1/~ ? ${MDL} @ HGT (${cyc}Z YELLOW)|~${garea} ${MDL}/NAM/NGM 500 HGT!0
 l
 run
 
@@ -737,7 +433,7 @@ GDATTIM = ${namfhr}
 GDPFUN  = sm5s(hght)
 LINE    = 6/1/3/2
 HILO    = 5/H#;L#//5/5;5/y
-TITLE   = 6/-2/~ ? NAM @ HGT (00Z CYAN)!0
+TITLE   = 6/-2/~ ? NAM @ HGT (${cyc}Z CYAN)!0
 l
 run
 
@@ -746,14 +442,12 @@ GLEVEL  = 0
 GVCORD  = none
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)
-CINT    = 4                                           
-HLSYM   = 1.2;1.2//21//hw                                                           
-TEXT    = s/21//hw                                                                
+CINT    = 4${extra_cmd}
 GDFILE  = ${grid}
 GDATTIM = ${gfsfhr}
 LINE    = 5/1/3/2
 HILO    = 3/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 5/-1/~ ? ${MDL} PMSL (00Z YELLOW) |~${gareas} ${MDL}/NAM/NGM PMSL!0
+TITLE   = 5/-1/~ ? ${MDL} PMSL (${cyc}Z YELLOW)|~${garea} ${MDL}/NAM/NGM PMSL!0
 l
 run
 
@@ -763,252 +457,30 @@ GDPFUN  = sm5s(pmsl)
 GDATTIM = ${namfhr}
 LINE    = 6/1/3/2
 HILO    = 5/H#;L#/1018-1060;900-1012/5/10;10/y
-TITLE   = 6/-2/~ ? NAM PMSL (CYAN)!0
+TITLE   = 6/-2/~ ? NAM PMSL (${cyc}Z CYAN)!0
 l
 run
 
 EOF
-export err=$?;err_chk
-
+            export err=$?;err_chk
         done
-    done
-fi
+    fi
+done
 
-if [ ${cyc} -eq "18" ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in NAtl NPac
-    do
-        if [ ${gareas} = "NAtl" ] ; then
-            garea="natl"
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = "NPac" ] ; then
-            garea="mpac"
-            proj=" "
-            latlon="18/2/1/1/10"
-        fi
-        for runtime in 12 06
-        do
-            if [ ${runtime} = "12" ] ; then
-                cyc2="12"
-                grid2="F-${MDL} | ${PDY2}/1200"
-                add="06"
-                testgfsfhr="84"
-            elif [ ${runtime} = "06" ] ; then
-                cyc2="06"
-                grid2="F-${MDL} | ${PDY2}/0600"
-                add="12"
-                testgfsfhr="72"
-            fi   
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr="F${gfsfhr}"
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-export pgm=gdplot2_nc;. prep_step; startmsg
-
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-SKIP    = 0     
-PANEL   = 0
-CONTUR  = 1
-CLRBAR  =
-FINT    =
-FLINE   =
-REFVEC  =
-WIND    = 0
-
-GDFILE  = ${grid}!${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500   
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
-SCALE   = -1
-TYPE    = c
-CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-ex
-EOF
-export err=$?;err_chk
-            done
-        done
-    done
-fi
-
-if [ ${cyc} -eq "06" ] ; then
-    grid="F-${MDL} | ${PDY2}/${cyc}00"
-    for gareas in NAtl NPac
-    do
-        if [ ${gareas} = "NAtl" ] ; then
-            garea="natl"
-            proj=" "
-            latlon="0"
-        elif [ ${gareas} = "NPac" ] ; then
-            garea="mpac"
-            proj=" "
-            latlon="18/2/1/1/10"
-        fi
-        for runtime in 00 18
-        do
-            if [ ${runtime} = "00" ] ; then
-                cyc2="00"
-                grid2="F-${MDL} | ${PDY2}/0000"
-                add="06"
-                testgfsfhr="84"
-            elif [ ${runtime} = "18" ] ; then
-                cyc2="18"
-		#XXW export HPCGFS=${MODEL}/${mdl}.${PDYm1}
-                export HPCGFS=${COMINgempak}/${mdl}.${PDYm1}/${cyc2}/${COMPONENT}/gempak
-                grid2="F-GFSHPC | ${PDY2m1}/1800"
-                add="12"
-                testgfsfhr="72"
-            fi   
-            gdpfun1="sm5s(hght)!sm5s(hght)"
-            gdpfun2="sm5s(pmsl)!sm5s(pmsl)"
-            line="5/1/3/2/2!6/1/3/2/2"
-            hilo1="5/H#;L#//5/5;5/y!6/H#;L#//5/5;5/y"
-            hilo2="5/H#;L#/1018-1060;900-1012/5/10;10/y!6/H#;L#/1018-1060;900-1012/5/10;10/y"
-            hilo3="5/H#;L#//5/5;5/y"
-            hilo4="5/H#;L#/1018-1060;900-1012/5/10;10/y"
-            title1="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT!6/-3/~ ? ${MDL} @ HGT (${cyc2}Z CYAN)"
-            title2="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL!6/-3/~ ? ${MDL} PMSL (${cyc2}Z CYAN)"
-            title3="5/-2/~ ? ^ ${MDL} @ HGT (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z 500 HGT"
-            title4="5/-2/~ ? ^ ${MDL} PMSL (${cyc}Z YELLOW)|^${gareas} ${cyc}Z vs ${cyc2}Z PMSL"
-            for gfsfhr in 00 06 12 18 24 30 36 42 48 54 60 66 72 78 84
-            do
-                gfsoldfhr=F$(expr ${gfsfhr} + ${add})
-                gfsfhr2=$(echo ${gfsfhr})
-                gfsfhr="F${gfsfhr}"
-                if [ ${gfsfhr2} -gt ${testgfsfhr} ] ; then
-                    grid="F-${MDL} | ${PDY2}/${cyc}00"
-                    grid2=" "
-                    gfsoldfhr=" "
-                    gdpfun1="sm5s(hght)"
-                    gdpfun2="sm5s(pmsl)"
-                    line="5/1/3/2/2"
-                    hilo1=$(echo ${hilo3})
-                    hilo2=$(echo ${hilo4})
-                    title1=$(echo ${title3})
-                    title2=$(echo ${title4})
-                fi
-export pgm=gdplot2_nc;. prep_step; startmsg
-
-$GEMEXE/gdplot2_nc << EOF
-DEVICE  = ${device}
-MAP     = 1/1/1/yes
-CLEAR   = yes
-GAREA   = ${garea}
-PROJ    = ${proj}
-LATLON  = ${latlon}
-SKIP    = 0     
-PANEL   = 0
-CONTUR  = 1
-CLRBAR  =
-FINT    =
-FLINE   =
-REFVEC  =
-WIND    = 0
-
-GDFILE  = ${grid}!${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-GLEVEL  = 500   
-GVCORD  = PRES
-GDPFUN  = ${gdpfun1}
-LINE    = ${line}
-SCALE   = -1
-TYPE    = c
-CINT    = 6
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-HILO    = ${hilo1}
-TITLE   = ${title1}
-run
-
-CLEAR   = yes
-GLEVEL  = 0
-GVCORD  = none
-SCALE   = 0
-GDPFUN  = ${gdpfun2}
-CINT    = 4
-HLSYM   = 1.2;1.2//21//hw
-TEXT    = 1/21//hw
-GDFILE  = ${grid}  !${grid2}
-GDATTIM = ${gfsfhr}!${gfsoldfhr}
-LINE    = ${line}
-HILO    = ${hilo2}
-TITLE   = ${title2}
-run
-
-ex
-EOF
-export err=$?;err_chk
-
-            done
-        done
-    done
-fi
-
-####################################################
+#####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l ${metaname}
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_comp
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_comp
-   fi
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_comp"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert MODEL" "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_comp"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_mar_pac.sh b/gempak/ush/gfs_meta_mar_pac.sh
index b44f60a2f7..e9a3f8bdaf 100755
--- a/gempak/ush/gfs_meta_mar_pac.sh
+++ b/gempak/ush/gfs_meta_mar_pac.sh
@@ -1,31 +1,36 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_mar_pac.sh
 #
-# Log :
-# J. Carr/PMB    12/08/2004     Pushed into production
-
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/MAR_PAC"
+cd "${DATA}/MAR_PAC" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='MAR_PAC:$SECONDS + '
-mkdir -p -m 775 $DATA/MAR_PAC
-cd $DATA/MAR_PAC
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_pac"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
+"${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL=mepowo.gsf+mehsuo.ncp+mereuo.ncp+himouo.nws
-gdfile     = F-${MDL} | ${PDY2}/${cyc}00
+gdfile     = F-${MDL} | ${PDY:2}/${cyc}00
 gdattim    = f00-f180-6
 GAREA      = 4;120;69;-105
 PROJ       = mer//3;3;0;1
@@ -34,7 +39,7 @@ LATLON	   = 18/2///10
 CONTUR	   = 1
 clear      = y
 
-device     = $device 
+device     = ${device}
 
 GLEVEL	= 850:1000                  !0
 GVCORD	= pres                      !none
@@ -69,8 +74,8 @@ fline      = 29;30;24;0 !
 hilo       = 0!0!0!20/H#;L#/1020-1070;900-1012
 hlsym      = 0!0!0!1;1//22;22/3;3/hw
 clrbar     = 1/V/LL!0
-wind       = bk9/0.8/1/112! 
-refvec     = 
+wind       = bk9/0.8/1/112!
+refvec     =
 title      = 1/-2/~ ?|~ PMSL, BL TEMP, WIND!1//${MDL} MSL PRES,BL TEMP,WIND (KTS)!0
 text       = 1.2/22/2/hw
 clear      = y
@@ -113,9 +118,9 @@ LINE    = 7/5/1/2            !20/1/2/1
 FINT    = 15;21;27;33;39;45;51;57
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!          !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
-WIND    =  
+WIND    =
 REFVEC  =
 TITLE   = 5//~ ? ${MDL} @  HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
 TEXT    = 1/21//hw
@@ -146,22 +151,22 @@ CLEAR	= yes
 li
 r
 
-glevel     = 300!300!300 
-gvcord     = pres!pres!pres 
+glevel     = 300!300!300
+gvcord     = pres!pres!pres
 panel      = 0
 skip       = 1!1!1/3/3!1
 scale      = 0!0!5!5!-1
 GDPFUN      = mag(kntv(wnd))//jet!jet!div(wnd)//dvg!dvg!sm5s(hght)
 TYPE      = c!c/f!c/f!c!c
 cint       = 30;50!70;90;110;130;150;170;190!-11;-9;-7;-5;-3!2/3/18!12/720
-line       = 26!32//2!19/-2//2!20!1//2 
-fint       = !70;90;110;130;150;170;190!3;5;7;9;11;13!  
+line       = 26!32//2!19/-2//2!20!1//2
+fint       = !70;90;110;130;150;170;190!3;5;7;9;11;13!
 fline      = !0;24;25;29;7;15;14;2!0;23;22;21;17;16;2!
 hilo       = 0!0!0!0!1/H;L/3
 hlsym      = 0!0!0!0!1.5;1.5//22;22/2;2/hw
 clrbar     = 0!0!1/V/LL!0
-wind       = !!am16/0.3//211/0.4! 
-refvec     = 10 
+wind       = !!am16/0.3//211/0.4!
+refvec     = 10
 title      = 1/-2/~ ?|~ @ SPEED & DIVERG!1//${MDL} @ HGHTS, ISOTACHS, & DIVERGENCE!0
 text       = 1.2/22/2/hw
 clear      = y
@@ -255,19 +260,21 @@ exit
 EOFplt
 
 export err=$?;err_chk
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_pac
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_pac
-   fi
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_pac"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_pac"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_mar_ql.sh b/gempak/ush/gfs_meta_mar_ql.sh
index f1abf3d395..0443a08a72 100755
--- a/gempak/ush/gfs_meta_mar_ql.sh
+++ b/gempak/ush/gfs_meta_mar_ql.sh
@@ -1,39 +1,43 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_mar_ql.sh
 #
-# Log :
-# J. Carr/PMB    12/07/2004   Pushed into production
-#
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/MAR_QL"
+cd "${DATA}/MAR_QL" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='MAR_QL_F${fend}:$SECONDS + '
-mkdir -p -m 775  $DATA/MAR_QL
-cd $DATA/MAR_QL
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_ql"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-# fend=180
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
+"${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL=mepowo.gsf+mehsuo.ncp+mereuo.ncp+mefbao.ncp
-gdfile	= F-${MDL} | ${PDY2}/${cyc}00
+gdfile	= F-${MDL} | ${PDY:2}/${cyc}00
 gdattim	= f00-f${fend}-6
 GAREA	= 15;-100;70;5
 PROJ	= mer//3;3;0;1
 MAP	= 31 + 6 + 3 + 5
 LATLON	= 18/2/1/1/10
 CONTUR	= 0
-device	= $device 
+device	= ${device}
 GLEVEL	= 9950!0
 GVCORD	= sgma!none
 PANEL	= 0
@@ -83,7 +87,7 @@ ru
 
 GLEVEL  = 500
 GVCORD  = PRES
-SKIP    = 0                  
+SKIP    = 0
 SCALE   = 5                  !-1
 GDPFUN   = (avor(wnd))        !hght
 TYPE   = c/f                !c
@@ -92,7 +96,7 @@ LINE    = 7/5/1/2            !20/1/2/1
 FINT    = 15;21;27;33;39;45;51;57
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!          !
-HLSYM   = 
+HLSYM   =
 WIND    = 0
 TITLE   = 5//~ ? GFS @ HEIGHTS AND VORTICITY|~WATL @ HGHT AND VORT!0
 li
@@ -148,7 +152,7 @@ ru
 
 GLEVEL  = 500
 GVCORD  = PRES
-SKIP    = 0                  
+SKIP    = 0
 SCALE   = 5                  !-1
 GDPFUN  = (avor(wnd))        !hght
 TYPE    = c/f                !c
@@ -157,27 +161,29 @@ LINE    = 7/5/1/2            !20/1/2/1
 FINT    = 15;21;27;33;39;45;51;57
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!          !
-HLSYM   = 
+HLSYM   =
 WIND    = 0
 TITLE   = 5//~ ? GFS @ HEIGHTS AND VORTICITY|~EPAC @ HGHT AND VORT!0
 li
 ru
 exit
 EOFplt
+export err=$?;err_chk
 
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_ql
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_ql
-   fi
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_ql"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_ql"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_mar_skewt.sh b/gempak/ush/gfs_meta_mar_skewt.sh
index 040e09e932..dd1d1b92ef 100755
--- a/gempak/ush/gfs_meta_mar_skewt.sh
+++ b/gempak/ush/gfs_meta_mar_skewt.sh
@@ -1,31 +1,35 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_mar_skewt.sh
 #
-# Log :
-# J. Carr/PMB     12/08/2004    Pushed into production
-
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/MAR_SKEWT"
+cd "${DATA}/MAR_SKEWT" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='MAR_SKEWT:$SECONDS + '
-mkdir -p -m 775 $DATA/MAR_SKEWT
-cd $DATA/MAR_SKEWT
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_skewt"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
 
-for fhr in 000 006 012 018 024 030 036 042 048 054 060 066 072
-do
-    export pgm=gdprof;. prep_step; startmsg
+for fhr in $(seq -f "%03g" -s ' ' 0 6 72); do
+    export pgm=gdprof;. prep_step
 
-$GEMEXE/gdprof << EOFplt
+   "${GEMEXE}/gdprof" << EOFplt
 GDATTIM  = F${fhr}
 GVCORD   = PRES
 GDFILE   = F-${MDL}
@@ -38,12 +42,12 @@ SCALE    = 0
 XAXIS    = -40/50/10/1;1;1
 YAXIS    = 1050/100//1;1;1
 WIND     = bk1
-REFVEC   = 
+REFVEC   =
 WINPOS   = 1
 FILTER   = no
 PANEL    = 0
 TEXT     = 1.2/22/2/hw
-DEVICE   = $device
+DEVICE   = ${device}
 OUTPUT   = T
 THTALN   = 18/1/1
 THTELN   = 23/2/1
@@ -272,25 +276,26 @@ ru
 
 exit
 EOFplt
-export err=$?;err_chk
+   export err=$?;err_chk
 
 done
 
-$GEMEXE/gpend
+"${GEMEXE}/gpend"
 
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_skewt
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_skewt
-   fi
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_skewt"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_skewt"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_mar_ver.sh b/gempak/ush/gfs_meta_mar_ver.sh
index 63ccba00ed..692da0d349 100755
--- a/gempak/ush/gfs_meta_mar_ver.sh
+++ b/gempak/ush/gfs_meta_mar_ver.sh
@@ -1,31 +1,36 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_mar_ver.sh
 #
-# Log :
-# J. Carr/PMB    12/08/2004    Pushed into production
-#
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/MAR_VER"
+cd "${DATA}/MAR_VER" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='MAR_VER:$SECONDS + '
-mkdir -p -m 775 $DATA/MAR_VER
-cd $DATA/MAR_VER
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
 metatype="mar_ver"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
+"${GEMEXE}/gdplot2_nc" << EOFplt
 \$MAPFIL=hipowo.gsf+mefbao.ncp
-gdfile	= F-${MDL} | ${PDY2}/${cyc}00
+gdfile	= F-${MDL} | ${PDY:2}/${cyc}00
 gdattim	= f00-f48-6
 GLEVEL  = 9950
 GVCORD  = sgma
@@ -42,7 +47,7 @@ FLINE   =
 HILO    =
 HLSYM   =
 CLRBAR  =
-WIND    = 
+WIND    =
 REFVEC  =
 TITLE   = 31/-2/~ ? ${MDL} Gridded BL Wind Direction (40m AGL)|~ WATL GRIDDED WIND DIR!0
 TEXT    = 0.8/21/1/hw
@@ -51,7 +56,7 @@ GAREA   = 27.2;-81.9;46.7;-61.4
 PROJ    = STR/90.0;-67.0;1.0
 MAP     = 31+6
 LATLON  = 18/1/1/1;1/5;5
-DEVICE  = $device
+DEVICE  = ${device}
 STNPLT  = 31/1.3/22/1.6/hw|25/19/1.3/1.6|buoys.tbl
 SATFIL  =
 RADFIL  =
@@ -86,19 +91,21 @@ exit
 EOFplt
 
 export err=$?;err_chk
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_ver
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}_${PDY}_${cyc}_mar_ver
-   fi
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_ver"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_mar_ver"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_nhsh.sh b/gempak/ush/gfs_meta_nhsh.sh
index 3e0146270e..c4acf38602 100755
--- a/gempak/ush/gfs_meta_nhsh.sh
+++ b/gempak/ush/gfs_meta_nhsh.sh
@@ -1,39 +1,34 @@
-#!/bin/sh
-
+#! /usr/bin/env bash
 #
 # Metafile Script : mrf_meta_nhsh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# D.W.Plummer/NCEP   2/97   Added $MAPFIL=mepowo.gsf
-# D.W.Plummer/NCEP   4/97   Changed SKIP for grid2
-# B. Gordon          4/00   Converted for production on IBM-SP
-#                           and changed gdplot_nc -> gdplot2_nc
-# D. Michaud         4/16   Added logic to display different titles
-#                           for parallel runs
-# B. Gordon          7/02   Converted to run off the GFS due to demise
-#                           of the MRF.
-# J. Carr           11/04   Changed contur from 1 to a 2.
-#                           Added a ? to all title/TITLE lines.
-#
-set -xa
-mkdir -p -m 775 $DATA/mrfnhsh
-cd $DATA/mrfnhsh
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-PDY2=$(echo $PDY | cut -c3-)
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/mrfnhsh"
+cd "${DATA}/mrfnhsh" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
-if [ "$envir" = "para" ] ; then
+if [[ "${envir}" == "para" ]] ; then
    export m_title="GFSP"
 else
    export m_title="GFS"
 fi
 
-export pgm=gdplot2_nc; prep_step; startmsg
+export pgm=gdplot2_nc; prep_step
 
-$GEMEXE/gdplot2_nc << EOF
+"${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL=mepowo.gsf
-GDFILE	= F-GFS | ${PDY2}/${cyc}00
+GDFILE	= F-GFS | ${PDY:2}/${cyc}00
 GDATTIM	= F000-F384-12
 DEVICE	= nc | Nmeta_nh
 PANEL	= 0
@@ -43,11 +38,11 @@ MAP	= 1
 CLEAR	= yes
 CLRBAR  = 1
 
-restore ${USHgempak}/restore/garea_nh.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_nh.nts
 
-restore ${USHgempak}/restore/500mb_hght_absv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts
 CLRBAR  = 1
-TEXT    = 1/21//hw 
+TEXT    = 1/21//hw
 SKIP	= 0                  !0                  !1
 SCALE	= 5                  !5                  !-1
 GFUNC	= (avor(wnd))//v     !mul(v,-1)          !hght
@@ -59,47 +54,47 @@ HILO	= 2;6/X;N/10-99;10-99!2;6/X;N/10-99;10-99!
 TITLE	= 5//~ ? @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!
 TEXT	= 1/21//hw
 CLEAR	= yes
- 
-TITLE	= 5//~ ? $m_title @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
+
+TITLE	= 5//~ ? ${m_title} @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
 l
 ru
 
 
-restore ${USHgempak}/restore/garea_sh.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts
 
 DEVICE	= nc | Nmeta_sh
-TITLE	= 5//~ ? $m_title @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
+TITLE	= 5//~ ? ${m_title} @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0
 l
 ru
 
 
-restore ${USHgempak}/restore/garea_nh.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_nh.nts
 DEVICE	= nc | Nmeta_nh
 
-restore ${USHgempak}/restore/250mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
 GDPFUN  = knts((mag(wnd)))            !sm9s(hght)
-TITLE	= 5/-2/~ ? $m_title @ HEIGHTS, ISOTACHS AND WIND (KTS)|~ @ HGHT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~ @ HGHT AND WIND!0
 l
 ru
 
 
-restore ${USHgempak}/restore/garea_sh.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts
 DEVICE	= nc | Nmeta_sh
 ru
 
-restore ${USHgempak}/restore/precip.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
 GDATTIM = F12-F240-12
 GDPFUN   = (quo(mul(pr12,43200),25.4))
 GDPFUN   = (quo(p12m,25.4))
-TITLE   = 5//~ ? $m_title 12-HOUR TOTAL PRECIPITATION (IN)|~ 12-HOURLY TOTAL PCPN
+TITLE   = 5//~ ? ${m_title} 12-HOUR TOTAL PRECIPITATION (IN)|~ 12-HOURLY TOTAL PCPN
 l
 r
 
-restore ${USHgempak}/restore/garea_sh.nts
+restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts
 DEVICE	= nc | Nmeta_sh
 ru
 
@@ -112,27 +107,20 @@ export err=$?; err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l Nmeta_nh
-export err=$?;export pgm="GEMPAK CHECK FILE"; err_chk
-ls -l Nmeta_sh
-export err=$?;export pgm="GEMPAK CHECK FILE"; err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-  mv Nmeta_nh ${COMOUT}/gfs_${PDY}_${cyc}_nh
-  mv Nmeta_sh ${COMOUT}/gfs_${PDY}_${cyc}_sh
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_nh
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_sh
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-        DBN_ALERT_TYPE=GFS_METAFILE
-        $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-         $COMOUT/gfs_${PDY}_${cyc}_nh
-        $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-         $COMOUT/gfs_${PDY}_${cyc}_sh
-      fi
-  fi
-fi
-
-#
+for metaname in Nmeta_nh Nmeta_sh; do
+    if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+        echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+        exit $(( err + 100 ))
+    fi
+
+    mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_${metaname/Nmeta_}"
+    if [[ "${SENDDBN}" == "YES" ]] ; then
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_${metaname/Nmeta_}"
+        if [[ ${DBN_ALERT_TYPE} = "GFS_METAFILE_LAST" ]] ; then
+            DBN_ALERT_TYPE=GFS_METAFILE
+            "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+                "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_${metaname/Nmeta_}"
+        fi
+    fi
+done
diff --git a/gempak/ush/gfs_meta_opc_na_ver b/gempak/ush/gfs_meta_opc_na_ver
index 8d5f394b3d..3aaf93db68 100755
--- a/gempak/ush/gfs_meta_opc_na_ver
+++ b/gempak/ush/gfs_meta_opc_na_ver
@@ -1,247 +1,79 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_opc_na_ver
 #
-# Log :
-# J. Carr/HPC   12/08/2004     Submitted into production.
-#
 # Set up Local Variables
 #
-set -x
-#
-export PS4='OPC_NA_VER_F${fend}:$SECONDS + '
-mkdir -p -m 775 $DATA/OPC_NA_VER_F${fend}
-cd $DATA/OPC_NA_VER_F${fend}
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-export COMPONENT=${COMPONENT:-atmos}
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/OPC_NA_VER_F${fend}"
+cd "${DATA}/OPC_NA_VER_F${fend}" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
-metatype="ver"
 metaname="gfsver_mpc_na_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-#
-# DEFINE 1 CYCLE AGO
-dc1=$($NDATE -06 ${PDY}${cyc} | cut -c -10)
-date1=$(echo ${dc1} | cut -c -8)
-sdate1=$(echo ${dc1} | cut -c 3-8)
-cycle1=$(echo ${dc1} | cut -c 9,10)
-# DEFINE 2 CYCLES AGO
-dc2=$($NDATE -12 ${PDY}${cyc} | cut -c -10)
-date2=$(echo ${dc2} | cut -c -8)
-sdate2=$(echo ${dc2} | cut -c 3-8)
-cycle2=$(echo ${dc2} | cut -c 9,10)
-# DEFINE 3 CYCLES AGO
-dc3=$($NDATE -18 ${PDY}${cyc} | cut -c -10)
-date3=$(echo ${dc3} | cut -c -8)
-sdate3=$(echo ${dc3} | cut -c 3-8)
-cycle3=$(echo ${dc3} | cut -c 9,10)
-# DEFINE 4 CYCLES AGO
-dc4=$($NDATE -24 ${PDY}${cyc} | cut -c -10)
-date4=$(echo ${dc4} | cut -c -8)
-sdate4=$(echo ${dc4} | cut -c 3-8)
-cycle4=$(echo ${dc4} | cut -c 9,10)
-# DEFINE 5 CYCLES AGO
-dc5=$($NDATE -30 ${PDY}${cyc} | cut -c -10)
-date5=$(echo ${dc5} | cut -c -8)
-sdate5=$(echo ${dc5} | cut -c 3-8)
-cycle5=$(echo ${dc5} | cut -c 9,10)
-# DEFINE 6 CYCLES AGO
-dc6=$($NDATE -36 ${PDY}${cyc} | cut -c -10)
-date6=$(echo ${dc6} | cut -c -8)
-sdate6=$(echo ${dc6} | cut -c 3-8)
-cycle6=$(echo ${dc6} | cut -c 9,10)
-# DEFINE 7 CYCLES AGO
-dc7=$($NDATE -42 ${PDY}${cyc} | cut -c -10)
-date7=$(echo ${dc7} | cut -c -8)
-sdate7=$(echo ${dc7} | cut -c 3-8)
-cycle7=$(echo ${dc7} | cut -c 9,10)
-# DEFINE 8 CYCLES AGO
-dc8=$($NDATE -48 ${PDY}${cyc} | cut -c -10)
-date8=$(echo ${dc8} | cut -c -8)
-sdate8=$(echo ${dc8} | cut -c 3-8)
-cycle8=$(echo ${dc8} | cut -c 9,10)
-# DEFINE 9 CYCLES AGO
-dc9=$($NDATE -54 ${PDY}${cyc} | cut -c -10)
-date9=$(echo ${dc9} | cut -c -8)
-sdate9=$(echo ${dc9} | cut -c 3-8)
-cycle9=$(echo ${dc9} | cut -c 9,10)
-# DEFINE 10 CYCLES AGO
-dc10=$($NDATE -60 ${PDY}${cyc} | cut -c -10)
-date10=$(echo ${dc10} | cut -c -8)
-sdate10=$(echo ${dc10} | cut -c 3-8)
-cycle10=$(echo ${dc10} | cut -c 9,10)
-# DEFINE 11 CYCLES AGO
-dc11=$($NDATE -66 ${PDY}${cyc} | cut -c -10)
-date11=$(echo ${dc11} | cut -c -8)
-sdate11=$(echo ${dc11} | cut -c 3-8)
-cycle11=$(echo ${dc11} | cut -c 9,10)
-# DEFINE 12 CYCLES AGO
-dc12=$($NDATE -72 ${PDY}${cyc} | cut -c -10)
-date12=$(echo ${dc12} | cut -c -8)
-sdate12=$(echo ${dc12} | cut -c 3-8)
-cycle12=$(echo ${dc12} | cut -c 9,10)
-# DEFINE 13 CYCLES AGO
-dc13=$($NDATE -78 ${PDY}${cyc} | cut -c -10)
-date13=$(echo ${dc13} | cut -c -8)
-sdate13=$(echo ${dc13} | cut -c 3-8)
-cycle13=$(echo ${dc13} | cut -c 9,10)
-# DEFINE 14 CYCLES AGO
-dc14=$($NDATE -84 ${PDY}${cyc} | cut -c -10)
-date14=$(echo ${dc14} | cut -c -8)
-sdate14=$(echo ${dc14} | cut -c 3-8)
-cycle14=$(echo ${dc14} | cut -c 9,10)
-# DEFINE 15 CYCLES AGO
-dc15=$($NDATE -90 ${PDY}${cyc} | cut -c -10)
-date15=$(echo ${dc15} | cut -c -8)
-sdate15=$(echo ${dc15} | cut -c 3-8)
-cycle15=$(echo ${dc15} | cut -c 9,10)
-# DEFINE 16 CYCLES AGO
-dc16=$($NDATE -96 ${PDY}${cyc} | cut -c -10)
-date16=$(echo ${dc16} | cut -c -8)
-sdate16=$(echo ${dc16} | cut -c 3-8)
-cycle16=$(echo ${dc16} | cut -c 9,10)
-# DEFINE 17 CYCLES AGO
-dc17=$($NDATE -102 ${PDY}${cyc} | cut -c -10)
-date17=$(echo ${dc17} | cut -c -8)
-sdate17=$(echo ${dc17} | cut -c 3-8)
-cycle17=$(echo ${dc17} | cut -c 9,10)
-# DEFINE 18 CYCLES AGO
-dc18=$($NDATE -108 ${PDY}${cyc} | cut -c -10)
-date18=$(echo ${dc18} | cut -c -8)
-sdate18=$(echo ${dc18} | cut -c 3-8)
-cycle18=$(echo ${dc18} | cut -c 9,10)
-# DEFINE 19 CYCLES AGO
-dc19=$($NDATE -114 ${PDY}${cyc} | cut -c -10)
-date19=$(echo ${dc19} | cut -c -8)
-sdate19=$(echo ${dc19} | cut -c 3-8)
-cycle19=$(echo ${dc19} | cut -c 9,10)
-# DEFINE 20 CYCLES AGO
-dc20=$($NDATE -120 ${PDY}${cyc} | cut -c -10)
-date20=$(echo ${dc20} | cut -c -8)
-sdate20=$(echo ${dc20} | cut -c 3-8)
-cycle20=$(echo ${dc20} | cut -c 9,10)
-# DEFINE 21 CYCLES AGO
-dc21=$($NDATE -126 ${PDY}${cyc} | cut -c -10)
-date21=$(echo ${dc21} | cut -c -8)
-sdate21=$(echo ${dc21} | cut -c 3-8)
-cycle21=$(echo ${dc21} | cut -c 9,10)
 
 # SET CURRENT CYCLE AS THE VERIFICATION GRIDDED FILE.
-vergrid="F-${MDL} | ${PDY2}/${cyc}00"
+vergrid="F-${MDL} | ${PDY:2}/${cyc}00"
 fcsthr="f00"
 
 # SET WHAT RUNS TO COMPARE AGAINST BASED ON MODEL CYCLE TIME.
-if [ ${cyc} -eq 00 ] ; then
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc16} ${dc18} ${dc20}"
-elif [ ${cyc} -eq 12 ] ; then
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc16} ${dc18} ${dc20}"
-else
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc17} ${dc19} ${dc21}"
-fi 
+# seq won't give us any splitting problems, ignore warnings
+# shellcheck disable=SC2207,SC2312
+case ${cyc} in
+    00 | 12) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 96 12 120)) ;;
+    06 | 18) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 90 12 126)) ;;
+    *)
+        echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}"
+        exit 100
+        ;;
+esac
 
 #GENERATING THE METAFILES.
 MDL2="GFSHPC"
-for verday in ${verdays}
-    do
-    cominday=$(echo ${verday} | cut -c -8)
-    #XXW export HPCGFS=$COMROOT/nawips/prod/${mdl}.${cominday}
-    # BV export HPCGFS=$COMROOT/nawips/${envir}/${mdl}.${cominday}
-    export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cyc}/${COMPONENT}/gempak
+for lookback in "${lookbacks[@]}"; do
+    init_time="$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${lookback} hours")"
+    init_PDY=${init_time:0:8}
+    init_cyc=${init_time:8:2}
+
+    if (( init_time <= ${SDATE:-0} )); then
+        echo "Skipping ver for ${init_time} because it is before the experiment began"
+        if (( lookback == "${lookbacks[0]}" )); then
+            echo "First forecast time, no metafile produced"
+            exit 0
+        else
+            break
+        fi
+    fi
 
-    if [ ${verday} -eq ${dc1} ] ; then
-        dgdattim=f006
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle1}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate1}/${cycle1}00"
-    elif [ ${verday} -eq ${dc2} ] ; then
-        dgdattim=f012
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle2}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate2}/${cycle2}00"
-    elif [ ${verday} -eq ${dc3} ] ; then
-        dgdattim=f018
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle3}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate3}/${cycle3}00"
-    elif [ ${verday} -eq ${dc4} ] ; then
-        dgdattim=f024
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle4}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate4}/${cycle4}00"
-    elif [ ${verday} -eq ${dc5} ] ; then
-        dgdattim=f030
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle5}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate5}/${cycle5}00"
-    elif [ ${verday} -eq ${dc6} ] ; then
-        dgdattim=f036
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle6}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate6}/${cycle6}00"
-    elif [ ${verday} -eq ${dc7} ] ; then
-        dgdattim=f042
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle7}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate7}/${cycle7}00"
-    elif [ ${verday} -eq ${dc8} ] ; then
-        dgdattim=f048
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle8}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate8}/${cycle8}00"
-    elif [ ${verday} -eq ${dc9} ] ; then
-        dgdattim=f054
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle9}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate9}/${cycle9}00"
-    elif [ ${verday} -eq ${dc10} ] ; then
-        dgdattim=f060
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle10}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate10}/${cycle10}00"
-    elif [ ${verday} -eq ${dc11} ] ; then
-        dgdattim=f066
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle11}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate11}/${cycle11}00"
-    elif [ ${verday} -eq ${dc12} ] ; then
-        dgdattim=f072
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle12}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate12}/${cycle12}00"
-    elif [ ${verday} -eq ${dc13} ] ; then
-        dgdattim=f078
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle13}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate13}/${cycle13}00"
-    elif [ ${verday} -eq ${dc14} ] ; then
-        dgdattim=f084
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle14}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate14}/${cycle14}00"
-    elif [ ${verday} -eq ${dc15} ] ; then
-        dgdattim=f090
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle15}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate15}/${cycle15}00"
-    elif [ ${verday} -eq ${dc16} ] ; then
-        dgdattim=f096
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle16}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate16}/${cycle16}00"
-    elif [ ${verday} -eq ${dc17} ] ; then
-        dgdattim=f102
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle17}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate17}/${cycle17}00"
-    elif [ ${verday} -eq ${dc18} ] ; then
-        dgdattim=f108
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle18}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate18}/${cycle18}00"
-    elif [ ${verday} -eq ${dc19} ] ; then
-        dgdattim=f114
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle19}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate19}/${cycle19}00"
-    elif [ ${verday} -eq ${dc20} ] ; then
-        dgdattim=f120
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle20}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate20}/${cycle20}00"
-    elif [ ${verday} -eq ${dc21} ] ; then
-        dgdattim=f126
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle21}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate21}/${cycle21}00"
+    dgdattim="f$(printf "%03g" "${lookback}")"
+
+    # Create symlink in DATA to sidestep gempak path limits
+    HPCGFS="${RUN}.${init_time}"
+    if [[ ! -L ${HPCGFS} ]]; then
+        YMD=${init_PDY} HH=${init_cyc} GRID="1p00" declare_from_tmpl source_dir:COM_ATMOS_GEMPAK_TMPL
+        ${NLN} "${source_dir}" "${HPCGFS}"
     fi
 
-# 500 MB HEIGHT METAFILE
-export pgm=gdplot2_nc;. prep_step; startmsg
+    grid="F-${MDL2} | ${init_PDY}/${init_cyc}00"
+
+    # 500 MB HEIGHT METAFILE
+    export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
-PROJ     = MER 
+    "${GEMEXE}/gdplot2_nc" << EOFplt
+PROJ     = MER
 GAREA    = 15.0;-100.0;70.0;20.0
 map      = 1//2
 clear    = yes
@@ -294,11 +126,11 @@ r
 gdfile   = ${vergrid}
 gdattim  = ${fcsthr}
 gdpfun   = mag(kntv(wnd))
-glevel   = 9950 
-gvcord   = sgma 
-scale    = 0 
-cint     = 35;50;65 
-line     = 6/1/3 
+glevel   = 9950
+gvcord   = sgma
+scale    = 0
+cint     = 35;50;65
+line     = 6/1/3
 title    = 6/-2/~ GFS WIND ISOTACHS 30m|~WIND DIFF
 clear    = yes
 r
@@ -306,29 +138,35 @@ r
 gdfile   = ${grid}
 gdattim  = ${dgdattim}
 line     = 5/1/3
-contur   = 0 
-title    = 5/-1/~ GFS WIND ISOTACHS 30m 
+contur   = 0
+title    = 5/-1/~ GFS WIND ISOTACHS 30m
 clear    = no
 r
 
 ex
 EOFplt
 
-export err=$?;err_chk
+    export err=$?;err_chk
+    if (( err != 0 )); then
+        echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+        exit $(( err + 100 ))
+    fi
 done
 
-####################################################
+#####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l ${metaname}
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}ver_${PDY}_${cyc}_na_mar
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}ver_${PDY}_${cyc}_na_mar
-   fi
+if [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit 100
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}ver_${PDY}_${cyc}_na_mar"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}ver_${PDY}_${cyc}_na_mar"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_opc_np_ver b/gempak/ush/gfs_meta_opc_np_ver
index 5cb9fba3c9..0968b55747 100755
--- a/gempak/ush/gfs_meta_opc_np_ver
+++ b/gempak/ush/gfs_meta_opc_np_ver
@@ -1,249 +1,79 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_opc_np_ver
 #
-# Log :
-# J. Carr/HPC   12/08/2004     Submitted into production.
-#
 # Set up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/OPC_NP_VER_F${fend}"
+cd "${DATA}/OPC_NP_VER_F${fend}" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
 #
-export PS4='OPC_NP_VER_F${fend}:$SECONDS + '
-mkdir -p -m 775 $DATA/OPC_NP_VER_F${fend}
-cd $DATA/OPC_NP_VER_F${fend}
-cp $FIXgempak/datatype.tbl datatype.tbl
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export COMPONENT=${COMPONENT:-atmos}
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL="GFS"
-metatype="ver"
 metaname="gfsver_mpc_np_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-#
-#
-# DEFINE 1 CYCLE AGO
-dc1=$($NDATE -06 ${PDY}${cyc} | cut -c -10)
-date1=$(echo ${dc1} | cut -c -8)
-sdate1=$(echo ${dc1} | cut -c 3-8)
-cycle1=$(echo ${dc1} | cut -c 9,10)
-# DEFINE 2 CYCLES AGO
-dc2=$($NDATE -12 ${PDY}${cyc} | cut -c -10)
-date2=$(echo ${dc2} | cut -c -8)
-sdate2=$(echo ${dc2} | cut -c 3-8)
-cycle2=$(echo ${dc2} | cut -c 9,10)
-# DEFINE 3 CYCLES AGO
-dc3=$($NDATE -18 ${PDY}${cyc} | cut -c -10)
-date3=$(echo ${dc3} | cut -c -8)
-sdate3=$(echo ${dc3} | cut -c 3-8)
-cycle3=$(echo ${dc3} | cut -c 9,10)
-# DEFINE 4 CYCLES AGO
-dc4=$($NDATE -24 ${PDY}${cyc} | cut -c -10)
-date4=$(echo ${dc4} | cut -c -8)
-sdate4=$(echo ${dc4} | cut -c 3-8)
-cycle4=$(echo ${dc4} | cut -c 9,10)
-# DEFINE 5 CYCLES AGO
-dc5=$($NDATE -30 ${PDY}${cyc} | cut -c -10)
-date5=$(echo ${dc5} | cut -c -8)
-sdate5=$(echo ${dc5} | cut -c 3-8)
-cycle5=$(echo ${dc5} | cut -c 9,10)
-# DEFINE 6 CYCLES AGO
-dc6=$($NDATE -36 ${PDY}${cyc} | cut -c -10)
-date6=$(echo ${dc6} | cut -c -8)
-sdate6=$(echo ${dc6} | cut -c 3-8)
-cycle6=$(echo ${dc6} | cut -c 9,10)
-# DEFINE 7 CYCLES AGO
-dc7=$($NDATE -42 ${PDY}${cyc} | cut -c -10)
-date7=$(echo ${dc7} | cut -c -8)
-sdate7=$(echo ${dc7} | cut -c 3-8)
-cycle7=$(echo ${dc7} | cut -c 9,10)
-# DEFINE 8 CYCLES AGO
-dc8=$($NDATE -48 ${PDY}${cyc} | cut -c -10)
-date8=$(echo ${dc8} | cut -c -8)
-sdate8=$(echo ${dc8} | cut -c 3-8)
-cycle8=$(echo ${dc8} | cut -c 9,10)
-# DEFINE 9 CYCLES AGO
-dc9=$($NDATE -54 ${PDY}${cyc} | cut -c -10)
-date9=$(echo ${dc9} | cut -c -8)
-sdate9=$(echo ${dc9} | cut -c 3-8)
-cycle9=$(echo ${dc9} | cut -c 9,10)
-# DEFINE 10 CYCLES AGO
-dc10=$($NDATE -60 ${PDY}${cyc} | cut -c -10)
-date10=$(echo ${dc10} | cut -c -8)
-sdate10=$(echo ${dc10} | cut -c 3-8)
-cycle10=$(echo ${dc10} | cut -c 9,10)
-# DEFINE 11 CYCLES AGO
-dc11=$($NDATE -66 ${PDY}${cyc} | cut -c -10)
-date11=$(echo ${dc11} | cut -c -8)
-sdate11=$(echo ${dc11} | cut -c 3-8)
-cycle11=$(echo ${dc11} | cut -c 9,10)
-# DEFINE 12 CYCLES AGO
-dc12=$($NDATE -72 ${PDY}${cyc} | cut -c -10)
-date12=$(echo ${dc12} | cut -c -8)
-sdate12=$(echo ${dc12} | cut -c 3-8)
-cycle12=$(echo ${dc12} | cut -c 9,10)
-# DEFINE 13 CYCLES AGO
-dc13=$($NDATE -78 ${PDY}${cyc} | cut -c -10)
-date13=$(echo ${dc13} | cut -c -8)
-sdate13=$(echo ${dc13} | cut -c 3-8)
-cycle13=$(echo ${dc13} | cut -c 9,10)
-# DEFINE 14 CYCLES AGO
-dc14=$($NDATE -84 ${PDY}${cyc} | cut -c -10)
-date14=$(echo ${dc14} | cut -c -8)
-sdate14=$(echo ${dc14} | cut -c 3-8)
-cycle14=$(echo ${dc14} | cut -c 9,10)
-# DEFINE 15 CYCLES AGO
-dc15=$($NDATE -90 ${PDY}${cyc} | cut -c -10)
-date15=$(echo ${dc15} | cut -c -8)
-sdate15=$(echo ${dc15} | cut -c 3-8)
-cycle15=$(echo ${dc15} | cut -c 9,10)
-# DEFINE 16 CYCLES AGO
-dc16=$($NDATE -96 ${PDY}${cyc} | cut -c -10)
-date16=$(echo ${dc16} | cut -c -8)
-sdate16=$(echo ${dc16} | cut -c 3-8)
-cycle16=$(echo ${dc16} | cut -c 9,10)
-# DEFINE 17 CYCLES AGO
-dc17=$($NDATE -102 ${PDY}${cyc} | cut -c -10)
-date17=$(echo ${dc17} | cut -c -8)
-sdate17=$(echo ${dc17} | cut -c 3-8)
-cycle17=$(echo ${dc17} | cut -c 9,10)
-# DEFINE 18 CYCLES AGO
-dc18=$($NDATE -108 ${PDY}${cyc} | cut -c -10)
-date18=$(echo ${dc18} | cut -c -8)
-sdate18=$(echo ${dc18} | cut -c 3-8)
-cycle18=$(echo ${dc18} | cut -c 9,10)
-# DEFINE 19 CYCLES AGO
-dc19=$($NDATE -114 ${PDY}${cyc} | cut -c -10)
-date19=$(echo ${dc19} | cut -c -8)
-sdate19=$(echo ${dc19} | cut -c 3-8)
-cycle19=$(echo ${dc19} | cut -c 9,10)
-# DEFINE 20 CYCLES AGO
-dc20=$($NDATE -120 ${PDY}${cyc} | cut -c -10)
-date20=$(echo ${dc20} | cut -c -8)
-sdate20=$(echo ${dc20} | cut -c 3-8)
-cycle20=$(echo ${dc20} | cut -c 9,10)
-# DEFINE 21 CYCLES AGO
-dc21=$($NDATE -126 ${PDY}${cyc} | cut -c -10)
-date21=$(echo ${dc21} | cut -c -8)
-sdate21=$(echo ${dc21} | cut -c 3-8)
-cycle21=$(echo ${dc21} | cut -c 9,10)
 
 # SET CURRENT CYCLE AS THE VERIFICATION GRIDDED FILE.
-vergrid="F-${MDL} | ${PDY2}/${cyc}00"
+vergrid="F-${MDL} | ${PDY:2}/${cyc}00"
 fcsthr="f00"
 
 # SET WHAT RUNS TO COMPARE AGAINST BASED ON MODEL CYCLE TIME.
-if [ ${cyc} -eq 00 ] ; then
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc16} ${dc18} ${dc20}"
-elif [ ${cyc} -eq 12 ] ; then
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc16} ${dc18} ${dc20}"
-else
-    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc17} ${dc19} ${dc21}"
-fi 
+# seq won't give us any splitting problems, ignore warnings
+# shellcheck disable=SC2207,SC2312
+case ${cyc} in
+    00 | 12) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 96 12 120)) ;;
+    06 | 18) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 90 12 126)) ;;
+    *)
+        echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}"
+        exit 100
+        ;;
+esac
 
 #GENERATING THE METAFILES.
 MDL2="GFSHPC"
-for verday in ${verdays}
-    do
-    cominday=$(echo ${verday} | cut -c -8)
-    #XXW export HPCGFS=$COMROOT/nawips/prod/${mdl}.${cominday}
-    # BV export HPCGFS=$COMROOT/nawips/${envir}/${mdl}.${cominday}
-    export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cyc}/${COMPONENT}/gempak
+for lookback in "${lookbacks[@]}"; do
+    init_time="$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${lookback} hours")"
+    init_PDY=${init_time:0:8}
+    init_cyc=${init_time:8:2}
+
+    if (( init_time <= ${SDATE:-0} )); then
+        echo "Skipping ver for ${init_time} because it is before the experiment began"
+        if (( lookback == "${lookbacks[0]}" )); then
+            echo "First forecast time, no metafile produced"
+            exit 0
+        else
+            break
+        fi
+    fi
 
-    if [ ${verday} -eq ${dc1} ] ; then
-        dgdattim=f006
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle1}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate1}/${cycle1}00"
-    elif [ ${verday} -eq ${dc2} ] ; then
-        dgdattim=f012
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle2}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate2}/${cycle2}00"
-    elif [ ${verday} -eq ${dc3} ] ; then
-        dgdattim=f018
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle3}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate3}/${cycle3}00"
-    elif [ ${verday} -eq ${dc4} ] ; then
-        dgdattim=f024
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle4}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate4}/${cycle4}00"
-    elif [ ${verday} -eq ${dc5} ] ; then
-        dgdattim=f030
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle5}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate5}/${cycle5}00"
-    elif [ ${verday} -eq ${dc6} ] ; then
-        dgdattim=f036
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle6}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate6}/${cycle6}00"
-    elif [ ${verday} -eq ${dc7} ] ; then
-        dgdattim=f042
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle7}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate7}/${cycle7}00"
-    elif [ ${verday} -eq ${dc8} ] ; then
-        dgdattim=f048
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle8}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate8}/${cycle8}00"
-    elif [ ${verday} -eq ${dc9} ] ; then
-        dgdattim=f054
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle9}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate9}/${cycle9}00"
-    elif [ ${verday} -eq ${dc10} ] ; then
-        dgdattim=f060
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle10}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate10}/${cycle10}00"
-    elif [ ${verday} -eq ${dc11} ] ; then
-        dgdattim=f066
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle11}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate11}/${cycle11}00"
-    elif [ ${verday} -eq ${dc12} ] ; then
-        dgdattim=f072
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle12}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate12}/${cycle12}00"
-    elif [ ${verday} -eq ${dc13} ] ; then
-        dgdattim=f078
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle13}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate13}/${cycle13}00"
-    elif [ ${verday} -eq ${dc14} ] ; then
-        dgdattim=f084
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle14}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate14}/${cycle14}00"
-    elif [ ${verday} -eq ${dc15} ] ; then
-        dgdattim=f090
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle15}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate15}/${cycle15}00"
-    elif [ ${verday} -eq ${dc16} ] ; then
-        dgdattim=f096
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle16}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate16}/${cycle16}00"
-    elif [ ${verday} -eq ${dc17} ] ; then
-        dgdattim=f102
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle17}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate17}/${cycle17}00"
-    elif [ ${verday} -eq ${dc18} ] ; then
-        dgdattim=f108
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle18}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate18}/${cycle18}00"
-    elif [ ${verday} -eq ${dc19} ] ; then
-        dgdattim=f114
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle19}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate19}/${cycle19}00"
-    elif [ ${verday} -eq ${dc20} ] ; then
-        dgdattim=f120
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle20}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate20}/${cycle20}00"
-    elif [ ${verday} -eq ${dc21} ] ; then
-        dgdattim=f126
-        export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle21}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate21}/${cycle21}00"
+    dgdattim="f$(printf "%03g" "${lookback}")"
+
+    # Create symlink in DATA to sidestep gempak path limits
+    HPCGFS="${RUN}.${init_time}"
+    if [[ ! -L "${HPCGFS}" ]]; then
+        YMD=${init_PDY} HH=${init_cyc} GRID="1p00" declare_from_tmpl source_dir:COM_ATMOS_GEMPAK_TMPL
+        ${NLN} "${source_dir}" "${HPCGFS}"
     fi
 
-# 500 MB HEIGHT METAFILE
+    grid="F-${MDL2} | ${init_PDY}/${init_cyc}00"
 
-export pgm=gdplot2_nc;. prep_step; startmsg
+    # 500 MB HEIGHT METAFILE
+    export pgm=gdplot2_nc;. prep_step
 
-$GEMEXE/gdplot2_nc << EOFplt
-PROJ     = MER 
+    "${GEMEXE}/gdplot2_nc" << EOFplt
+PROJ     = MER
 GAREA    = 5.0;120.0;70.0;-105.0
 map      = 1//2
 clear    = yes
@@ -296,11 +126,11 @@ r
 gdfile   = ${vergrid}
 gdattim  = ${fcsthr}
 gdpfun   = mag(kntv(wnd))
-glevel   = 9950 
-gvcord   = sgma 
-scale    = 0 
-cint     = 35;50;65 
-line     = 6/1/3 
+glevel   = 9950
+gvcord   = sgma
+scale    = 0
+cint     = 35;50;65
+line     = 6/1/3
 title    = 6/-2/~ GFS WIND ISOTACHS 30m|~WIND DIFF
 clear    = yes
 r
@@ -308,29 +138,34 @@ r
 gdfile   = ${grid}
 gdattim  = ${dgdattim}
 line     = 5/1/3
-contur   = 0 
-title    = 5/-1/~ GFS WIND ISOTACHS 30m 
+contur   = 0
+title    = 5/-1/~ GFS WIND ISOTACHS 30m
 clear    = no
 r
 
 ex
 EOFplt
-export err=$?;err_chk
-
+    export err=$?;err_chk
+    if (( err != 0 )); then
+        echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+        exit $(( err + 100 ))
+    fi
 done
 
-####################################################
+#####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l ${metaname}
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}ver_${PDY}_${cyc}_np_mar
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job ${COMOUT}/${mdl}ver_${PDY}_${cyc}_np_mar
-   fi
+if [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit 100
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}ver_${PDY}_${cyc}_np_mar"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}ver_${PDY}_${cyc}_np_mar"
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_precip.sh b/gempak/ush/gfs_meta_precip.sh
index cf3db9cbae..ea12012758 100755
--- a/gempak/ush/gfs_meta_precip.sh
+++ b/gempak/ush/gfs_meta_precip.sh
@@ -1,17 +1,24 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_precip.sh
 #
-# Log :
-# M. Klein/WPC    01/29/2014   Created.  Adapted from gfs_meta_qpf.sh
-#
 # Set up Local Variables
 #
-set -x
-export PS4='qpf:$SECONDS + '
-mkdir -p -m 775 $DATA/precip
-cd $DATA/precip
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/precip"
+cd "${DATA}/precip" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 #
 # Set model and metafile naming conventions
@@ -21,13 +28,12 @@ MDL=GFS
 metatype="precip"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
 
 #
-# Set range of forecast hours.  GFS is available every 6 hours through F192, then 
+# Set range of forecast hours.  GFS is available every 6 hours through F192, then
 # every 12 hours after.  The request was to have the fields go to F216, so will run
 # the gdplot for the ranges set below, then for the 12-hour and greater QPF periods,
-# run the gdplot2 from F204-F216.  6-hour QPF will stop at F192. 
+# run the gdplot2 from F204-F216.  6-hour QPF will stop at F192.
 #
 
 gdatpcpn06="F006-F192-06"
@@ -41,7 +47,6 @@ gdatpcpn96="F096-F192-06"
 gdatpcpn120="F120-F192-06"
 gdatpcpn144="F144-F192-06"
 gdatpcpn168="F168-F192-06"
-run="r"
 
 #
 # For CPC - Day 6-10 and Day 8-14 QPFs using a North American regional display
@@ -49,49 +54,50 @@ run="r"
 garea_cpc="17.529;-129.296;53.771;-22.374"
 proj_cpc="str/90;-105;0"
 
-# Notes -- 
+# Notes --
 #  00Z cycle - 8-14 Day -- No F198 file, so started at F204. Makes a P156I, not P162I.
 #  06Z cycle - 6-10 Day -- No F258 file, so ended at F252.  Makes a P108I, not P114I.
-#            - 8-14 Day -- No F354 file, so ended at F348.  Makes a P156I, not P162I.            
+#            - 8-14 Day -- No F354 file, so ended at F348.  Makes a P156I, not P162I.
 #  12Z cycle - 8-14 Day -- No F210 file, so started at F216. Makes a P156I, not P162I.
 #  18Z cycle - 6-10 Day -- No F270 file, so ended at F264.  Makes a P108I, not P114I.
 #            - 8-14 Day -- No F366 file, so ended at F360. Makes a P156I, not P162I.
 
-gdattim_6to10=""
-gdattim_8to14=""
-gdpfun_6to10="p114i"
-gdpfun_8to14="p162i"
-if [ ${cyc} = "00" ] ; then
-    gdattim_6to10="${PDY2}/${cyc}00F264"
-    gdattim_8to14="${PDY2}/${cyc}00F360"
-    gdpfun_6to10="p114i"
-    gdpfun_8to14="p156i"
-elif [ ${cyc} = "06" ] ; then
-    #gdattim_6to10="${PDY2}/${cyc}00F258"
-    #gdattim_8to14="${PDY2}/${cyc}00F354"
-    gdattim_6to10="${PDY2}/${cyc}00F252"
-    gdattim_8to14="${PDY2}/${cyc}00F348"
-    gdpfun_6to10="p108i"
-    gdpfun_8to14="p156i"
-elif [ ${cyc} = "12" ] ; then
-    gdattim_6to10="${PDY2}/${cyc}00F276"
-    gdattim_8to14="${PDY2}/${cyc}00F372"
-    gdpfun_6to10="p114i"
-    gdpfun_8to14="p156i"
-elif [ ${cyc} = "18" ] ; then
-    #gdattim_6to10="${PDY2}/${cyc}00F270"
-    #gdattim_8to14="${PDY2}/${cyc}00F366"
-    gdattim_6to10="${PDY2}/${cyc}00F264"
-    gdattim_8to14="${PDY2}/${cyc}00F360"
-    gdpfun_6to10="p108i"
-    gdpfun_8to14="p156i"
-fi
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOFplt
-gdfile   = F-${MDL} | ${PDY2}/${cyc}00
+case ${cyc} in
+    00)
+        gdattim_6to10="${PDY:2}/${cyc}00F264"
+        gdattim_8to14="${PDY:2}/${cyc}00F360"
+        gdpfun_6to10="p114i"
+        gdpfun_8to14="p156i"
+        ;;
+    06)
+        gdattim_6to10="${PDY:2}/${cyc}00F252"
+        gdattim_8to14="${PDY:2}/${cyc}00F348"
+        gdpfun_6to10="p108i"
+        gdpfun_8to14="p156i"
+        ;;
+    12)
+        gdattim_6to10="${PDY:2}/${cyc}00F276"
+        gdattim_8to14="${PDY:2}/${cyc}00F372"
+        gdpfun_6to10="p114i"
+        gdpfun_8to14="p156i"
+        ;;
+    18)
+        gdattim_6to10="${PDY:2}/${cyc}00F264"
+        gdattim_8to14="${PDY:2}/${cyc}00F360"
+        gdpfun_6to10="p108i"
+        gdpfun_8to14="p156i"
+        ;;
+    *)
+        echo "FATAL ERROR: InvaLid cycle ${cyc} passed to ${BASH_SOURCE[0]}"
+        exit 100
+        ;;
+esac
+
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOFplt
+gdfile   = F-${MDL} | ${PDY:2}/${cyc}00
 garea    = us
-proj     = 
+proj     =
 map      = 1/1/2/yes
 device   = ${device}
 clear    = yes
@@ -131,15 +137,15 @@ scale    = 0
 gdpfun   = p06i
 type     = f
 cint     =
-line     = 
+line     =
 hilo     = 31;0/x#2/.01-20//50;50/y
 hlsym    = 1.5
-wind     = 
+wind     =
 title    = 1/-2/~ ? ${MDL} 6-HOUR TOTAL PCPN|~6-HR TOTAL PCPN!0
 l
 r
 
-gdattim  = ${gdatpcpn12} 
+gdattim  = ${gdatpcpn12}
 gdpfun   = p12i
 title    = 1/-2/~ ? ${MDL} 12-HOUR TOTAL PCPN|~12-HR TOTAL PCPN!0
 l
@@ -149,7 +155,7 @@ gdattim  = F204-F216-12
 l
 r
 
-gdattim  = ${gdatpcpn24}       
+gdattim  = ${gdatpcpn24}
 gdpfun   = p24i
 title    = 1/-2/~ ? ${MDL} 24-HOUR TOTAL PCPN|~24-HR TOTAL PCPN!0
 l
@@ -177,7 +183,7 @@ r
 gdattim  = F204-F216-12
 r
 
-gdattim  = ${gdatpcpn72}       
+gdattim  = ${gdatpcpn72}
 gdpfun   = p72i
 title    = 1/-2/~ ? ${MDL} 72 HOUR TOTAL PCPN|~72-HR TOTAL PCPN!0
 r
@@ -185,7 +191,7 @@ r
 gdattim  = F204-F216-12
 r
 
-gdattim  = ${gdatpcpn84}       
+gdattim  = ${gdatpcpn84}
 gdpfun   = p84i
 title    = 1/-2/~ ? ${MDL} 84 HOUR TOTAL PCPN|~84-HR TOTAL PCPN!0
 r
@@ -249,20 +255,20 @@ export err=$?;err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+    fi
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_qpf.sh b/gempak/ush/gfs_meta_qpf.sh
index 49ca0d8bd4..56197675e2 100755
--- a/gempak/ush/gfs_meta_qpf.sh
+++ b/gempak/ush/gfs_meta_qpf.sh
@@ -1,58 +1,48 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_qpf.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# J. Carr/HPC      7/7/97   Changed script so that it uses gdplot2 instead of gdplot
-# J. Carr/HPC      8/5/98   Removed pcpn potential product and changed map to a medium resolution
-# J. Carr/HPC      2/2/99   Changed skip to 0
-# J. Carr/HPC     2/10/99   Changed type c/f to just f for pcpn
-# J. Carr/HPC     4/12/99   Added 84-hr time for the gfs.
-# J. Carr/HPC        6/99   Added a filter on map
-# J. Carr/HPC      2/2001   Edited to run on IBM.
-# J. Carr/HPC      5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC      6/2001   Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC      7/2001   Submitted.
-# J. Carr/HPC     11/2004   Changed contur from 1 to a 2.
-#                           Inserted a ? in all title lines.
-#                           Commented out if statement for cycles since this is old code based on when various runs of GFS ran
-#                           out to differing times.
-# M. Klein/HPC    02/2010   Run 48-hour QPF out to F216 for medium-range.
-#
 # Set up Local Variables
 #
-set -x
-export PS4='qpf:$SECONDS + '
-mkdir -p -m 775 $DATA/qpf
-cd $DATA/qpf
-cp $FIXgempak/datatype.tbl datatype.tbl
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/qpf"
+cd "${DATA}/qpf" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL=GFS
 metatype="qpf"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo $PDY | cut -c3-)
-    gdat="F000-F126-06"
-    gdatpcpn06="F006-F126-06"
-    gdatpcpn12="F012-F126-06"
-    gdatpcpn24="F024-F126-06"
-    gdatpcpn48="F048-F216-06"
-    gdatpcpn60="F060-F126-06"
-    gdatpcpn72="F072-F126-06"
-    gdatpcpn84="F084-F126-06"
-    gdatpcpn96="F096-F126-06"
-    gdatpcpn120="F120-F126-06"
-    gdatpcpn126="F126"
-    run="r"
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOFplt
-gdfile   = F-${MDL} | ${PDY2}/${cyc}00
+gdat="F000-F126-06"
+gdatpcpn06="F006-F126-06"
+gdatpcpn12="F012-F126-06"
+gdatpcpn24="F024-F126-06"
+gdatpcpn48="F048-F216-06"
+gdatpcpn60="F060-F126-06"
+gdatpcpn72="F072-F126-06"
+gdatpcpn84="F084-F126-06"
+gdatpcpn96="F096-F126-06"
+gdatpcpn120="F120-F126-06"
+run="r"
+
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOFplt
+gdfile   = F-${MDL} | ${PDY:2}/${cyc}00
 gdattim  = ${gdat}
 garea    = us
-proj     = 
+proj     =
 map      = 1/1/2/yes
 device   = ${device}
 clear    = yes
@@ -63,20 +53,20 @@ latlon   = 0
 filter   = yes
 
 glevel   = 0
-gvcord   = none 
+gvcord   = none
 skip     = 0
 scale    = 0
 gdpfun   = sm5s(lft4)!sm5s(lft4)  !sm5s(lft4)!kntv(wnd@9950%sgma)
 type     = c/f       !c           !c         !b
 cint     = 2/2       !-10000;0.05 !2/-100/-2
-line     = 20/-32/2  !0;5//0;4/0;0!32//2  
-fint     = -8;-6;-4;-2;0.05;10 
+line     = 20/-32/2  !0;5//0;4/0;0!32//2
+fint     = -8;-6;-4;-2;0.05;10
 fline    = 2;15;21;22;23;0;24
 hilo     = 0         !0
 hlsym    = 1;1//22;22/2;2/hw!0
 clrbar   = 1/V/LL    !0
-wind     = bk0       !bk0          !bk0       !bk10/0.8/2/112!bk0 
-refvec   = 
+wind     = bk0       !bk0          !bk0       !bk10/0.8/2/112!bk0
+refvec   =
 title    = 1/-2/~ ? ${MDL} Best LI AND BL WINDS|~BEST LI!0
 r
 
@@ -89,7 +79,7 @@ type     = c                      !c/f !c         !c             !c
 cint     = 0.25/0.25/0.5          !0.25/0.75/6.0!4!3/0/540!3/543/1000
 line     = 22///2                 !32//2/2!6//3!4/5/2!5/5/2
 fint     = 0                      !0.5;1.0;1.5;2.0
-fline    = 0                      !0;23;22;30;14       
+fline    = 0                      !0;23;22;30;14
 hilo     = 0                      !0!6/H#;L#/1020-1070;900-1012!0
 HLSYM    = 0                      !0!1.5;1.5//22;22/3;3/hw!0
 clrbar   = 0                      !1/V/LL!0!0
@@ -143,7 +133,7 @@ type     = c!c/f!b
 cint     = 0.25/0.25/0.5!0.25/0.75/6.0
 line     = 22///2!32//2/2
 fint     = !0.5;1.0;1.5;2.0
-fline    = !0;23;22;21;2       
+fline    = !0;23;22;21;2
 hilo     = 0!0
 HLSYM    = 0!0
 clrbar   = 0!1/V/LL
@@ -169,7 +159,7 @@ WIND     = !
 REFVEC   =
 TITLE    = 1/-2/~ ? ${MDL} PCPN POTENTIAL (PW X (1000-440 MB RH)) INCHES OF PW|~PCPN POT!0
 r
- 
+
 glevel   = 850!850!850
 gvcord   = pres!pres!pres
 skip     = 0/1;1
@@ -180,11 +170,11 @@ cint     = -4;-2;0;2;4!2/6/28!3
 line     = 3//1!32//1!6//3
 fint     = 4;8;12;16;20
 fline    = 0;23;22;30;14;2
-hilo     = 0!0!6/H#;L#  
+hilo     = 0!0!6/H#;L#
 hlsym    = 0!0!1.5;1.5//22;22/2;2/hw
 clrbar   = 1/V/LL!0
-wind     = bk0!bk0!bk0!bk9/0.8/2/212 
-refvec   = 
+wind     = bk0!bk0!bk0!bk9/0.8/2/212
+refvec   =
 title    = 1/-2/~ ? ${MDL} @ DEW POINT, WIND, AND HGHT|~@ DEW POINT!0
 r
 
@@ -205,25 +195,25 @@ refvec   =
 title    = 1/-2/~ ? ${MDL} @ DEWPOINT, WIND, AND HGHT|~@ DEWPOINT!0
 r
 
-glevel   = 850                    !850       !0         !850   
+glevel   = 850                    !850       !0         !850
 gvcord   = pres                   !pres      !none      !pres
 skip     = 0/1;2
 scale    = 2                      !-1/2      !0                                    !2
 gdpfun   = sm5s(mag(smul(mixr;wnd)!sm5s(hght)!sm5s(thte)!smul(mixr;wnd)
 type     = c/f                    !c         !c         !a
 cint     = 3                      !3         !5
-line     = 3                      !6//2      !25/10/2   
+line     = 3                      !6//2      !25/10/2
 fint     = 6;12;18;24;30
 fline    = 0;23;22;21;14;15;2
 hilo     = 0!6/H#;L#!0
 hlsym    = 0!1;1//22;22/2;2/hw
 clrbar   = 1/V/LL!0
-wind     = bk0!bk0!bk0!am16/0.6/2/211/0.3!bk0 
-refvec   = 10 
+wind     = bk0!bk0!bk0!am16/0.6/2/211/0.3!bk0
+refvec   = 10
 text     = s/22/2/hw
 title    = 1/-2/~ ? ${MDL} @ MOIST. TRNSPT, HGHT, BL THTE|~@ H2O TRANSPORT!0
 r
- 
+
 glevel	 = 850
 gvcord	 = pres
 skip     = 0/1;1
@@ -234,8 +224,8 @@ cint	 = 2                 !4//304    !4/308/324 !4/328
 line	 = 32/1/2            !23/10/3   !22/10/3   !21/1/2
 fint	 = -14;-10;-6;-2;2;6;10;14!
 fline	 = 7;29;30;24;0;14;15;18;5!
-hilo	 = 
-hlsym	 = 
+hilo	 =
+hlsym	 =
 clrbar	 = 1/V/LL!0
 wind	 = bk0               !bk0       !bk0        !bk0       !bk9/0.8/2/112!bk0
 refvec	 = 10
@@ -343,13 +333,13 @@ refvec   =
 title    = 1/-2/~ ? ${MDL} 6-HOUR TOTAL PCPN, MSLP |~6-HR TOTAL PCPN!0
 r
 
-gdattim  = ${gdatpcpn12} 
+gdattim  = ${gdatpcpn12}
 gdpfun   = p12i
 type     = f
 title    = 1/-2/~ ? ${MDL} 12-HOUR TOTAL PCPN|~12-HR TOTAL PCPN!0
 r
 
-gdattim  = ${gdatpcpn24}       
+gdattim  = ${gdatpcpn24}
 gdpfun   = p24i
 title    = 1/-2/~ ? ${MDL} 24-HOUR TOTAL PCPN|~24-HR TOTAL PCPN!0
 r
@@ -364,12 +354,12 @@ gdpfun   = p60i
 title    = 1/-2/~ ? ${MDL} 60 HOUR TOTAL PCPN|~60-HR TOTAL PCPN!0
 r
 
-gdattim  = ${gdatpcpn72}       
+gdattim  = ${gdatpcpn72}
 gdpfun   = p72i
 title    = 1/-2/~ ? ${MDL} 72 HOUR TOTAL PCPN|~72-HR TOTAL PCPN!0
 r
 
-gdattim  = ${gdatpcpn84}       
+gdattim  = ${gdatpcpn84}
 gdpfun   = p84i
 title    = 1/-2/~ ? ${MDL} 84 HOUR TOTAL PCPN|~84-HR TOTAL PCPN!0
 r
@@ -403,19 +393,19 @@ export err=$?;err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-        DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_us_${metatype}
-      fi
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+   "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+      "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
+   if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
+      DBN_ALERT_TYPE=GFS_METAFILE
+      "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+         "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_us_${metatype}"
    fi
 fi
 
diff --git a/gempak/ush/gfs_meta_sa.sh b/gempak/ush/gfs_meta_sa.sh
index 47984e641d..2b36f6a25c 100755
--- a/gempak/ush/gfs_meta_sa.sh
+++ b/gempak/ush/gfs_meta_sa.sh
@@ -1,52 +1,37 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_sa.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# J.W.Carr/HPC       4/97   Changed Comparison to 1200 UTC UKMET instead of 0000 UTC UKMET
-# J.W.Carr/HPC       4/97   Added UKMET2 --- past 72 hours to the comparison
-# J.W.Carr/HPC       2/98   changed garea of sfc conv, bl dwpt and wind product
-# J.W.Carr/HPC       5/98   converted gdplot to gdplot2
-# J.W.Carr/HPC       8/98   Changed map to medium resolution
-# J. Carr/HPC        7/99   Put a filter on map.
-# J. Carr/HPC       02/2001 Updated to run on IBM and send to ncodas
-# J. Carr/HPC       04/2001 Remove old metafiles from metaout before creating new ones.
-# J. Carr/HPC        5/2001 Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC        6/2001 Converted to a korn shell prior to delivering script to Production.
-# J. Carr/HPC        8/2001 Submitted.
-# J. Carr/HPC        3/2002 Tweaked a few products.
-#
 # Set Up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/SA"
+cd "${DATA}/SA" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='SA:$SECONDS + '
-mkdir -p -m 775 $DATA/SA
-cd $DATA/SA
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL=GFS
 metatype="sa"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo ${PDY} | cut -c3-)
-#
-#if [ ${cyc} -eq 00 ] ; then
-#    fend=F126
-#elif [ ${cyc} -eq 12 ] ; then
-#    fend=F126
-#else
-#    fend=F126
-#fi
 
 fend=F126
 #
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-${MDL} | ${PDY2}/${cyc}00
-GDATTIM	= F00-${fend}-06 
+export pgm=gdplot2_nc;. prep_step
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-${MDL} | ${PDY:2}/${cyc}00
+GDATTIM	= F00-${fend}-06
 DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
@@ -72,8 +57,8 @@ FLINE   =
 HILO    = !!26;2/H#;L#/1020-1070;900-1012//30;30/y
 HLSYM   = 1.3;1.3//21//hw
 CLRBAR  = 1
-WIND    = !                         ! 
-REFVEC  = 
+WIND    = !                         !
+REFVEC  =
 TITLE	= 1/-2/~ ${MDL} MSLP, 1000-500mb THICK|~MSLP, 1000-500 THKN!
 ru
 
@@ -104,30 +89,30 @@ type    = c/f!c!c!c
 cint    = 13;25;38;50;62!4!4/0/540!4/544/600
 line    = 32/1/1!6/1/3!5/5/2!17/5/2
 fint    = 13;25;38;50
-fline   = 0;23;22;21;2      
+fline   = 0;23;22;21;2
 hilo    = 26;2/H#;L#/1017-1050;930-1004/2//y
 HLSYM   = 0!1.5;1.5//22;22/3;3/hw!0
 clrbar  = 1/V/LL!0
-wind    =  
+wind    =
 refvec  =
 title   = 1/-2/~ ${MDL} PW, EST MSLP, THICKNESS|~PRECIP WATER, MSLP!0
 r
 
-glevel  = 0!0 
-gvcord  = none 
-skip    = 0 
+glevel  = 0!0
+gvcord  = none
+skip    = 0
 scale   = 0
 gdpfun  = sm5s(lft4)!sm5s(lft4)!sm5s(lft4)!kntv(wnd@9950%sgma)
 type    = c/f       !c         !c         !b
 cint    = 3/3       !1/-0.5/0.5!3/-15/-3
-line    = 25/1/1    !22/1/2    !21/1/1  
-fint    = -9;-6;-3;3;6 
+line    = 25/1/1    !22/1/2    !21/1/1
+fint    = -9;-6;-3;3;6
 fline   = 2;15;22;0;0;24
 hilo    = 0!0
 hlsym   = 1;1//22;22/2;2/hw
 clrbar  = 1/V/LL!0
 wind    = bk0!bk0!bk0!bk9/0.9/2/112
-refvec  = 
+refvec  =
 title   = 1/-2/~ ${MDL} LI AND BL WINDS|~LIFTED INDEX!0
 r
 
@@ -142,7 +127,7 @@ scale   = 7   !0   !0
 gdpfun  = sm5s(sdiv(mixr(dwpc;pres@0%none);wnd)!sm5s(dwpc)!sm5s(dwpc)!kntv(wnd@9950%sgma)
 type    = f                               !c         !c         !b
 cint    = 1//-1                           !3/12      !3/21
-line    = 32                              !5//2      !6//2                      
+line    = 32                              !5//2      !6//2
 clrbar  = 1/V/LL!0
 fint    = -8;-6;-4;-2
 fline   = 2;23;22;3;0
@@ -159,13 +144,13 @@ GAREA   = -66;-127;14.5;-19
 GLEVEL  = 0            !0      !0         !0
 GVCORD  = none         !none   !none      !none
 SKIP    = 0
-SCALE   = 0               
+SCALE   = 0
 GDPFUN  = sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(pmsl)!kntv(wnd@9950%sgma)
 TYPE    = c/f          !c      !c         !c         !b
 CINT    = 3/-99/0      !3/3/21 !3/24/99   !4
 LINE    = 27/1/2       !2/1/2  !16/1/2    !19//3
-FINT    = -18;-15;-12;-9;-6;-3;0      
-FLINE   = 30;29;7;6;4;25;24;0       
+FINT    = -18;-15;-12;-9;-6;-3;0
+FLINE   = 30;29;7;6;4;25;24;0
 HILO    = 0            !0      !0         !26;2/H#;L#/1016-1050;930-1006/2//y
 HLSYM   = 0            !0      !0         !1.5;1.5//22;22/3;3/hw
 CLRBAR  = 1/V/LL       !0
@@ -188,13 +173,13 @@ GAREA   = -66;-127;14.5;-19
 GLEVEL  = 9950         !9950   !9950      !0
 GVCORD  = sgma!sgma!sgma!none
 SKIP    = 0
-SCALE   = 0               
+SCALE   = 0
 GDPFUN  = sm5s(tmpc)!sm5s(tmpc)!sm5s(tmpc)!sm5s(pmsl)!kntv(wnd@9950%sgma)
 TYPE    = c/f          !c      !c         !c         !b
 CINT    = 3/-99/0      !3/3/21 !3/24/99   !4
 LINE    = 27/1/2       !2/1/2  !16/1/2    !19//3
-FINT    = -18;-15;-12;-9;-6;-3;0      
-FLINE   = 30;29;7;6;4;25;24;0       
+FINT    = -18;-15;-12;-9;-6;-3;0
+FLINE   = 30;29;7;6;4;25;24;0
 HILO    = 0            !0      !0         !26;2/H#;L#/1016-1050;930-1006/2//y
 HLSYM   = 0            !0      !0         !1.5;1.5//22;22/3;3/hw
 CLRBAR  = 1/V/LL       !0
@@ -220,7 +205,7 @@ type    = c/f           !b
 cint    = 13;25;38;50;62!
 line    = 32/1/2/1
 fint    = 13;25;38;50
-fline   = 0;23;22;21;2       
+fline   = 0;23;22;21;2
 hilo    = 0             !0
 HLSYM   = 0             !0
 clrbar  = 1
@@ -239,11 +224,11 @@ CINT    = 10;20;80;90  !30;40;50;60;70
 LINE    = 32//2        !23//2
 FINT    = 10;30;70;90
 FLINE   = 18;8;0;22;23
-HILO    = 
+HILO    =
 HLSYM   =
 CLRBAR  = 1
-WIND    = 
-REFVEC  = 
+WIND    =
+REFVEC  =
 TITLE	= 1/-2/~ ${MDL} @ MEAN LAYER RH|~MEAN LAYER RH!0
 ru
 
@@ -276,7 +261,7 @@ LINE    = 7/5/1/2            !29/5/1/2!7/5/1/2            !29/5/1/2 !20/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!        !2;6/X;N/10-99;10-99!         !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
 WIND    = bk0
 REFVEC  =
@@ -308,7 +293,7 @@ gvcord   = pres
 SKIP     = 0/2;2
 scale    = 0                       !5/0               !5/0    !-1
 gdpfun   = sm5s(mag(kntv(wnd))//jet!sm5s(div(wnd)//dvg!dvg    !sm5s(hght)
-type     = c/f                     !c                 !c      !c 
+type     = c/f                     !c                 !c      !c
 cint     = 70;90;110;130;150;170   !-11;-9;-7;-5;-3;-1!2/2/100!12
 line     = 32/1                    !20/-2/2           !3/1/2  !1//2
 fint     = 70;90;110;130;150;170;190!
@@ -375,15 +360,15 @@ CINT    = 20/70//                   !2/2
 LINE    = 32/1/2/1                  !5/2/2/2
 FINT    = 80;90;110;130;150;170;190 !1;2;3;4;5;6;7
 FLINE   = 0;25;24;29;7;15;20;14     !0;23;22;21;17;16;2;1
-HILO    = 
-HLSYM   = 
+HILO    =
+HLSYM   =
 CLRBAR  = 1/v/ll                    !0
 WIND    = bk0                       !am16/0.3//211/0.4!Bk9/0.75/2
-REFVEC  = 
+REFVEC  =
 TITLE   = 1/-2/~ @ ISOTACHS AND WIND (KTS)|~200 MB WIND!0
 FILTER  = yes
 ru
- 
+
 GAREA   = -66;-127;14.5;-19
 LATLON	= 1//1/1/10
 
@@ -401,7 +386,7 @@ FLINE	= 0;21-30;14-20;5
 HILO	= 31;0/x#/10-500///y
 HLSYM	= 1.5
 CLRBAR	= 1/V/LL
-WIND	= 
+WIND	=
 REFVEC	=
 TITLE	= 1/-2/~ ${MDL} 12-HR TOTAL PCPN|~12-HR TOTAL PCPN!0
 r
@@ -415,25 +400,26 @@ exit
 EOF
 
 export err=$?;err_chk
+
 #####################################################
 # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
+
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    fi
 fi
 
 exit
diff --git a/gempak/ush/gfs_meta_sa2.sh b/gempak/ush/gfs_meta_sa2.sh
index a566031030..00330e7cf0 100755
--- a/gempak/ush/gfs_meta_sa2.sh
+++ b/gempak/ush/gfs_meta_sa2.sh
@@ -1,63 +1,53 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
-# Metafile Script : ukmet_gfs_meta_sa2.sh
+# Metafile Script : gfs_meta_sa2.sh
 #
-# Creates several South American gfs charts, including 500mb and psml 
+# Creates several South American gfs charts, including 500mb and psml
 # comparisons to the ecmwf and ukmet
 #
-# Log :
-# J. Carr/HPC       07/2002   Added this metafile
-# J. Carr/HPC       07/2002   Gif script.
-# M. Klein/HPC      11/2004   Change references to gfs from avn
-# M. Klein/HPC      02/2005   Changed location of working directory to /ptmp
-# A. Robson/HPC  11/01/2006   Converted to sh prior to gif'ing
-# F. Achorn/NCO  11/03/2006   Changed location of working directory to $DATA from ptmp
-#  
-#
-set -x
-#
-echo " start with ukmet_gfs_meta_sa2.sh"
 
-export PS4='SA2:$SECONDS + '
-cp $FIXgempak/datatype.tbl datatype.tbl
+source "${HOMEgfs}/ush/preamble.sh"
 
-export COMPONENT=${COMPONENT:-atmos}
+mkdir SA2
+cd SA2 || exit 1
 
-mdl=gfs
-MDL=GFS
 
-metatype="sa2"
-metaname="${mdl}_${metatype}_${cyc}.meta"
-device="nc | ${metaname}"
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 #
-# IF CYCLE IS NOT 00Z OR 06Z EXIT SCRIPT.
-# Also exit if run from 00z gfs
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-if [ ${cyc} -eq "12" ] || [ ${cyc} -eq "18" ] 
-then
-    exit
-# elif [ ${cyc} -eq "00" ] && [ $(echo $COMIN | awk -F/ '{print $5}' | awk -F. '{print $1}') = "gfs" ]
-elif [ ${cyc} -eq "00" ] && [ ${mdl} = "gfs" ]
-then 
-    # don't want to run from 00z gfs
-    exit
+export HPCGFS="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${HPCGFS} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${HPCGFS}"
 fi
 
-PDY2=$(echo ${PDY} | cut -c3-)
-# export HPCGFS=$COMROOT/nawips/${envir}/gfs.${PDY}
-export HPCGFS=${COMINgempak}/${mdl}.${PDY}/${cyc}/${COMPONENT}/gempak
+mdl=gfs
+MDL=GFS
 
-grid1="F-GFSHPC | ${PDY2}/${cyc}00"
+metatype="sa2"
+metaname="${mdl}_${metatype}_${cyc}.meta"
+device="nc | ${metaname}"
+
+grid1="F-GFSHPC | ${PDY:2}/${cyc}00"
 
 # DEFINE YESTERDAY
-PDYm1=$($NDATE -24 ${PDY}${cyc} | cut -c -8)
-PDY2m1=$(echo ${PDYm1} | cut -c 3-)
+PDYm1="$(date --utc +%Y%m%d -d "${PDY} ${cyc} - 24 hours")"
 
-$GEMEXE/gdplot2_nc << EOF
+HPCECMWF="ecmwf.${PDYm1}"
+HPCUKMET="ukmet.${PDY}"
+if [[ ! -L "${HPCECMWF}" ]]; then
+    ${NLN} "${COMINecmwf}/ecmwf.${PDYm1}/gempak" "${HPCECMWF}"
+fi
+if [[ ! -L "${HPCUKMET}" ]]; then
+    ${NLN} "${COMINukmet}/ukmet.${PDY}/gempak" "${HPCUKMET}"
+fi
+
+"${GEMEXE}/gdplot2_nc" << EOF
 \$MAPFIL= mepowo.gsf
 GDFILE	= ${grid1}
-GDATTIM	= F000-F144-12 
+GDATTIM	= F000-F144-12
 DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
@@ -83,8 +73,8 @@ FLINE   =
 HILO    = !!26;2/H#;L#/1020-1070;900-1012//30;30/y
 HLSYM   = 1.3;1.3//21//hw
 CLRBAR  = 1
-WIND    = !                         ! 
-REFVEC  = 
+WIND    = !                         !
+REFVEC  =
 TITLE	= 1/-2/~ ? ${MDL} MSLP, 1000-500mb THICK|~MSLP, 1000-500 THKN!
 l
 ru
@@ -100,7 +90,7 @@ LINE    = 7/5/1/2            !29/5/1/2!7/5/1/2            !29/5/1/2 !20/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!        !2;6/X;N/10-99;10-99!         !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
 WIND    = 0
 REFVEC  =
@@ -123,8 +113,8 @@ FLINE    = 0!0;23;24;25;30;29;28;27  !11;12;2;10;15;14;13;0
 HILO     = 0!0!0!5/H#;L#
 HLSYM    = 0!!1.0//21//hw!1.5
 CLRBAR   = 0!0!1!0
-WIND     = 
-REFVEC   = 
+WIND     =
+REFVEC   =
 TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ${MDL} @ MB 24-HR HGT FALLS!0
 TEXT     = 1/21////hw
 CLEAR    = YES
@@ -135,7 +125,7 @@ GDATTIM  = f060
 GDPFUN   = sm5s(hght)!(sub(hght^f60,hght^f36))!(sub(hght^f60,hght^f36))!sm5s(hght)
 TITLE    = 1/-1/~ ? ${MDL} @ MB HGT|~500 HGT CHG!1/-2/~ ${MDL} @ MB 24-HR HGT FALLS!0
 l
-run 
+run
 
 GDATTIM  = f084
 GDPFUN   = sm5s(hght)!(sub(hght^f84,hght^f60))!(sub(hght^f84,hght^f60))!sm5s(hght)
@@ -201,27 +191,17 @@ ru
 ex
 EOF
 
-if [ ${cyc} -eq "00" ]; then
-    times="012 036 060 084 108 132"
-else
-    times="006 030 054 078 102 126"
-fi
-
-for gfsfhr in $(echo ${times})
-do
-    if [ ${cyc} == "06" ]; then
-        ecmwffhr="F$(expr ${gfsfhr} + 18)"
+for fhr in $(seq -s ' ' 6 24 126); do
+    gfsfhr="F$(printf "%03g" "${fhr}")"
+    if (( fhr < 100 )); then
+        offset=6
     else
-        ecmwffhr="F$(expr ${gfsfhr} + 12)"
+        offset=18
     fi
-    while [ $(expr length $ecmwffhr) -lt 3 ]
-        do
-            ecmwffhr="F0$(expr ${gfsfhr} + 6)"
-        done
-    gfsfhr="F${gfsfhr}"
-    grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12"
-
-$GEMEXE/gdplot2_nc << EOF10
+    ecmwffhr="F$(printf "%03g" $((fhr + offset)))"
+    grid2="${HPCECMWF}/ecmwf_glob_${PDYm1}12"
+
+    "${GEMEXE}/gdplot2_nc" << EOF10
 \$MAPFIL = mepowo.gsf
 GDFILE	= ${grid1} !${grid2}
 GDATTIM	= ${gfsfhr}!${ecmwffhr}
@@ -236,20 +216,20 @@ PROJ    = mer//3;3;0;1
 GAREA   = -71;-135;20;-20
 LATLON	= 18//1/1/10
 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES                                                                    
-PANEL   = 0                                                                      
-SKIP    = 0            
-SCALE   = -1           
-GDPFUN  = sm5s(hght)!sm5s(hght)         
-TYPE    = c            
-CONTUR  = 1                                                                       
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   =                                                                         
-WIND    =               
-REFVEC  =                                                                         
+GLEVEL  = 500
+GVCORD  = PRES
+PANEL   = 0
+SKIP    = 0
+SCALE   = -1
+GDPFUN  = sm5s(hght)!sm5s(hght)
+TYPE    = c
+CONTUR  = 1
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   =
+WIND    =
+REFVEC  =
 LINE    = 31//2!2//2
 HILO    = 31/H#;L#//5/5;5/y!2/H#;L#//5/5;5/y
 TITLE   = 31/-1/~ ? ${MDL} @ HGHT (WHITE)|~EC VS ${MDL} 500!2/-2/~ ? ECMWF 500 HGHT (RED)
@@ -258,20 +238,20 @@ r
 
 GLEVEL  = 0
 GVCORD  = none
-PANEL   = 0                                                                       
-SKIP    = 0                                           
+PANEL   = 0
+SKIP    = 0
 SCALE   = 0
 GDPFUN  = (pmsl)!(pmsl)
-TYPE    = c                                                                       
-CONTUR  = 7                                                                      
-CINT    = 4                                           
-FINT    =                                                                        
-FLINE   =                                                                        
-HLSYM   = 1.5;1.5//21//hw                                                           
-CLRBAR  = 1                                                                       
-WIND    = 
-REFVEC  =                                                                         
-TEXT    = 1/21//hw                                                                
+TYPE    = c
+CONTUR  = 7
+CINT    = 4
+FINT    =
+FLINE   =
+HLSYM   = 1.5;1.5//21//hw
+CLRBAR  = 1
+WIND    =
+REFVEC  =
+TEXT    = 1/21//hw
 CLEAR   = yes
 GDFILE  = ${grid1}!${grid2}
 GDATTIM = ${gfsfhr}!${ecmwffhr}
@@ -285,27 +265,12 @@ ex
 EOF10
 done
 
-if [ ${cyc} -eq "00" ]; then
-    times="000 012 024 036 048 060 072 096 120 144"
-elif [ ${cyc} -eq "06" ]; then
-    times="006 018 030 042 054 066 090 114 138"
-fi
+for fhr in $(seq -s ' ' 6 12 138); do
+    gfsfhr="F$(printf "%03g" "${fhr}")"
+    ukmetfhr="F$(printf "%03g" $((fhr + 6)))"
+    grid3="${HPCUKMET}/ukmet_${PDY}00f${ukmetfhr}"
 
-for gfsfhr in  $(echo ${times})
-do
-    if [ ${cyc} -eq "06" ]; then
-        ukmetfhr="$(expr ${gfsfhr} + 6)"
-        while [ $(expr length $ukmetfhr) -lt 3 ]
-           do
-              ukmetfhr="0$(expr ${gfsfhr} + 6)"
-           done 
-    else
-        ukmetfhr=${gfsfhr}
-    fi
-    gfsfhr="F${gfsfhr}"
-    grid3="${COMINukmet}.${PDY}/gempak/ukmet_${PDY}00f${ukmetfhr}"
-
-$GEMEXE/gdplot2_nc << EOF25
+    "${GEMEXE}/gdplot2_nc" << EOF25
 \$MAPFIL = mepowo.gsf
 DEVICE  = ${device}
 PANEL   = 0
@@ -313,22 +278,22 @@ TEXT    = 1/21//hw
 CONTUR  = 2
 MAP     = 6/1/1/yes
 CLEAR   = yes
-CLRBAR  = 
-GLEVEL  = 500                                                                     
-GVCORD  = PRES                                                                    
-PANEL   = 0                                                                      
-SKIP    = 0            
-SCALE   = -1           
-GDPFUN  = sm5s(hght)!sm5s(hght)        
-TYPE    = c            
-CONTUR  = 1                                                                       
-CINT    = 6            
-FINT    = 
-FLINE   = 
-HLSYM   =                                                                         
-GVECT   =                                                                         
-WIND    =               
-REFVEC  =                                                                         
+CLRBAR  =
+GLEVEL  = 500
+GVCORD  = PRES
+PANEL   = 0
+SKIP    = 0
+SCALE   = -1
+GDPFUN  = sm5s(hght)!sm5s(hght)
+TYPE    = c
+CONTUR  = 1
+CINT    = 6
+FINT    =
+FLINE   =
+HLSYM   =
+GVECT   =
+WIND    =
+REFVEC  =
 clear   = yes
 GDFILE  = ${grid1}!${grid3}
 GDATTIM = ${gfsfhr}!F${ukmetfhr}
@@ -340,20 +305,20 @@ r
 
 GLEVEL  = 0
 GVCORD  = none
-PANEL   = 0                                                                       
-SKIP    = 0                                           
+PANEL   = 0
+SKIP    = 0
 SCALE   = 0
 GDPFUN  = sm5s(pmsl)!sm5s(pmsl)
-TYPE    = c                                                                       
-CONTUR  = 2                                                                       
-CINT    = 4                                           
-FINT    =                                                                        
-FLINE   =                                                                        
-HLSYM   = 1.5;1.5//21//hw                                                           
-CLRBAR  =                                                                        
-WIND    = 
-REFVEC  =                                                                         
-TEXT    = 1/21//hw                                                                
+TYPE    = c
+CONTUR  = 2
+CINT    = 4
+FINT    =
+FLINE   =
+HLSYM   = 1.5;1.5//21//hw
+CLRBAR  =
+WIND    =
+REFVEC  =
+TEXT    = 1/21//hw
 CLEAR   = yes
 GDFILE  = ${grid1}!${grid3}
 GDATTIM = ${gfsfhr}!F${ukmetfhr}
@@ -362,26 +327,31 @@ HILO    = 31/H#;L#/1020-1060;900-1010/5/10;10!2/H#;L#/1020-1060;900-1010/5/10;10
 TITLE   = 31/-1/~ ? ${MDL} PMSL (WHITE)|~UK VS ${MDL} PMSL!2/-2/~ ? UKMET PMSL (RED)
 l
 r
- 
+
 ex
 EOF25
 done
-
-
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-        DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      fi
-   fi
+export err=$?;err_chk
+
+#####################################################
+# GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
+# WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
+# FOR THIS CASE HERE.
+#####################################################
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
 fi
 
-#export COMIN=/com/nawips/${envir}/ukmet.${PDY}
-echo " end with ukmet_gfs_meta_sa2.sh"
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
+    DBN_ALERT_TYPE=GFS_METAFILE
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    fi
+fi
 
 exit
diff --git a/gempak/ush/gfs_meta_trop.sh b/gempak/ush/gfs_meta_trop.sh
index d0cc0dbd14..1991e04c4e 100755
--- a/gempak/ush/gfs_meta_trop.sh
+++ b/gempak/ush/gfs_meta_trop.sh
@@ -1,59 +1,53 @@
-#! /bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_trop.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# J. Carr/HPC/DTB    3/97   Added WPAC area
-# J. Carr/HPC/DTB    4/97   Put more skipping in for winds
-# J. Carr/HPC/DTB    4/97   Changed pcpn from mm to inches and added hilo
-# J. Carr/HPC/DTB    5/98   Converted gdplot to gdplot2
-# J.L.Partain/MPC    5/98   Mods to make Atl prods same as GFS, MRF, ETA
-# J. Carr/HPC/DTB    8/98   Changed map to medium resolution
-# J. Carr/HPC       02/01   Updated script to run on IBM.
-# J. Carr/HPC      5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC      7/2001   Converted to a korn shell prior to delivering script
-#                           to Production.
-# J. Carr/HPC      7/2001   Submitted as a jif.   
-#
-# B. Gordon          7/02   Converted to run off the GFS due to demise
-#                           of the MRF.
-# J. Carr/PMB     11/2004   Added a ? to all title lines.
-#                           Changed contur from a 1 to a 2.
-#                           Changed interval of 24-HR PCPN from 24 to 6.
-#
 # Set Up Local Variables
 #
-set -x
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/TROP"
+cd "${DATA}/TROP" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-export PS4='TROP:$SECONDS + '
-mkdir -p -m 775 $DATA/TROP
-cd $DATA/TROP
-cp $FIXgempak/datatype.tbl datatype.tbl
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 mdl=gfs
 MDL=GFS
 metatype="trop"
 metaname="${mdl}_${metatype}_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo ${PDY} | cut -c3-)
 #
-for a in ATL PAC WPAC
-do
-    if [ ${a} = "ATL" ] ; then
+for domain in ATL PAC WPAC; do
+    case ${domain} in
+    ATL)
         garea="-6;-111;52;-14"
         proj="MER/0.0;-49.5;0.0"
-    elif [ ${a} = "PAC" ] ; then
+        ;;
+    PAC)
         garea="0;-140;45;-75"
         proj="mer//3;3;0;1"
-    elif [ ${a} = "WPAC" ] ; then
+        ;;
+    WPAC)
         garea="0;90;45;180"
         proj="mer//3;3;0;1"
-    fi
-
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-${MDL} | ${PDY2}/${cyc}00
+        ;;
+    *)
+        echo "FATAL ERROR: Unknown domain in ${BASH_SOURCE[0]}"
+        exit 100
+    esac
+
+    export pgm=gdplot2_nc;. prep_step
+    "${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-${MDL} | ${PDY:2}/${cyc}00
 GDATTIM = F00-F180-12
 DEVICE	= ${device}
 PANEL	= 0
@@ -83,7 +77,7 @@ HLSYM   = 0!1;1//22;22/3;3/hw
 CLRBAR  = 1/V/LL!0
 WIND    = bk0!bk0!bk9/.8/1.4/112
 REFVEC  =
-TITLE   = 1/-2/~ ? ${MDL} PMSL, BL WIND (40m AGL; KTS)|~${a} PMSL & BL WIND!0
+TITLE   = 1/-2/~ ? ${MDL} PMSL, BL WIND (40m AGL; KTS)|~${domain} PMSL & BL WIND!0
 r
 
 GLEVEL  = 850
@@ -95,7 +89,7 @@ LINE    = 29/5/1/2              !7/5/1/2
 HILO    = 2;6/X;N/-99--4;4-99   !
 SCALE   = 5                     !5
 WIND    = bk0                   !bk0     !bk6/.8/2/112!0
-TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${a} @ WIND AND REL VORT!0
+TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${domain} @ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE   = 0;14-21
 TYPE    = c/f!c!b
@@ -110,14 +104,14 @@ HILO    = 2;6/X;N/-99--4;4-99!         !6/L#/880-1004///1
 HLSYM   = 1;1//22;22/3;3/hw
 SCALE   = 5                  !5        !0
 WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112
-TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${a} @ WIND AND REL VORT!0
+TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${domain} @ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE   = 0;14-21
 r
 
 GLEVEL	= 700
 GDPFUN	= vor(wnd)              !vor(wnd)!kntv(wnd)
-TITLE	= 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${a} @ WIND AND REL VORT!0
+TITLE	= 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${domain} @ WIND AND REL VORT!0
 
 GLEVEL  = 700!700!0!700
 GVCORD  = pres!pres!none!pres
@@ -128,7 +122,7 @@ HILO    = 2;6/X;N/-99--4;4-99!         !6/L#/880-1004///1
 HLSYM   = 1;1//22;22/3;3/hw
 SCALE   = 5                  !5        !0
 WIND    = bk0                !bk0      !bk0       !bk9/.8/1.4/112
-TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${a} @ WIND AND REL VORT!0
+TITLE   = 1/-2/~ ? ${MDL} @ WIND AND REL VORT|~${domain} @ WIND AND REL VORT!0
 FINT    = 4;6;8;10;12;14;16;18
 FLINE   = 0;14-21
 TYPE    = c/f                !c        !c         !b
@@ -145,11 +139,11 @@ LINE    = 7/5/1/2      ! 29/5/1/2  ! 7/5/1/2   ! 29/5/1/2 ! 20/1/2/1
 FINT    = 16;20;24;28;32;36;40;44
 FLINE   = 0;23-15
 HILO    = 2;6/X;N/10-99;10-99!        !2;6/X;N/10-99;10-99!         !
-HLSYM   = 
+HLSYM   =
 CLRBAR  = 1
-WIND    = bk0!bk0!bk0!bk0!bk0!bk9/0.9/1.4/112!0 
+WIND    = bk0!bk0!bk0!bk0!bk0!bk9/0.9/1.4/112!0
 REFVEC  =
-TITLE   = 1/-2/~ ? ${MDL} @ WIND AND ABS VORT|~${a} @ WIND AND ABS VORT!0
+TITLE   = 1/-2/~ ? ${MDL} @ WIND AND ABS VORT|~${domain} @ WIND AND ABS VORT!0
 r
 
 GLEVEL  = 300:850       !850      !300
@@ -167,9 +161,9 @@ HLSYM   =
 CLRBAR  = 1
 WIND    = ak0!ak7/.4/1/221/.2!ak6/.4/1/221/.2
 REFVEC  =
-TITLE   = 1/-2/~ ? ${MDL} @  WIND SHEAR (850=Purple, 300=Cyan) |~${a} 850-300MB WIND SHEAR!0
+TITLE   = 1/-2/~ ? ${MDL} @  WIND SHEAR (850=Purple, 300=Cyan) |~${domain} 850-300MB WIND SHEAR!0
 filter  = no
- 
+
 
 glevel  = 250!250
 gvcord  = pres!pres
@@ -187,7 +181,7 @@ hlsym   = 0!0!0!0
 clrbar  = 0!0!1/V/LL!0
 wind    = bk0!bk0!bk0!bk0!bk9/.9/1.4/112
 refvec  = 10
-title   = 1/-2/~ ? ${MDL} @ HGHTS, ISOTACHS, & DIVERG|~${a} @ SPEED & DIVERG!0
+title   = 1/-2/~ ? ${MDL} @ HGHTS, ISOTACHS, & DIVERG|~${domain} @ SPEED & DIVERG!0
 r
 
 glevel  = 400:850!0
@@ -205,8 +199,8 @@ hlsym   = 0!2;1.5//21//hw
 clrbar  = 0
 wind    = bk10/0.9/1.4/112!bk0
 refvec  =
-title   = 1/-2/~ ? ${MDL} 850-400mb MLW and MSLP|~${a} 850-400mb MLW & MSLP!0
- 
+title   = 1/-2/~ ? ${MDL} 850-400mb MLW and MSLP|~${domain} 850-400mb MLW & MSLP!0
+
 
 GDATTIM	= F24-F144-06
 GLEVEL	= 0
@@ -214,7 +208,7 @@ GVCORD	= none
 SKIP	= 0
 SCALE	= 0
 GDPFUN	= p24i
-TYPE	= f 
+TYPE	= f
 CINT	= 0
 LINE	= 0
 FINT	= .01;.1;.25;.5;.75;1;1.25;1.5;1.75;2;2.25;2.5;2.75;3;3.25;3.5;3.75;4
@@ -222,14 +216,14 @@ FLINE	= 0;21-30;14-20;5
 HILO	= 31;0/x#2/.10-8.0///y
 HLSYM	= 1.4//22/2/hw
 CLRBAR	= 1/V/LL
-WIND	= 
+WIND	=
 REFVEC	=
-TITLE	= 1/-2/~ ${MDL} 24-HR TOTAL PCPN|~${a} 24-HR TOTAL PCPN!0
+TITLE	= 1/-2/~ ${MDL} 24-HR TOTAL PCPN|~${domain} 24-HR TOTAL PCPN!0
 r
 
 exit
 EOF
-export err=$?;err_chk
+    export err=$?;err_chk
 
 done
 
@@ -238,21 +232,20 @@ done
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+    exit $(( err + 100 ))
+fi
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    if [[ "${DBN_ALERT_TYPE}" == "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/${mdl}_${PDY}_${cyc}_${metatype}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/${mdl}_${PDY}_${cyc}_${metatype}"
+    fi
 fi
 
-
 exit
diff --git a/gempak/ush/gfs_meta_us.sh b/gempak/ush/gfs_meta_us.sh
index 7a818c338b..78ec25fa92 100755
--- a/gempak/ush/gfs_meta_us.sh
+++ b/gempak/ush/gfs_meta_us.sh
@@ -1,45 +1,40 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_us.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# D.W.Plummer/NCEP   3/97   Added ecmwf comparison.
-# D.W.Plummer/NCEP   3/97   Added $MAPFIL specification for lower resolution
-# D.W.Plummer/NCEP   4/97   Removed run from 3-HOURLY PRECIP
-# J. Carr/HPC        2/99   Changed skip to 0
-# B. Gordon/NCO      5/00   Modified for production on IBM-SP
-#                           Changed gdplot_nc -> gdplot2_nc
-# D. Michaud/NCO     4/01   Modified to Reflect Different Title for
-#                           Parallel runs
-# J. Carr/PMB       11/04   Added a ? to all title lines
-#                           Changed contur from a 1 to a 2.
-#
-cd $DATA
 
-set -xa
+source "${HOMEgfs}/ush/preamble.sh"
+
+cd "${DATA}" || exit 2
+rm -rf "${DATA}/us"
+mkdir -p -m 775 "${DATA}/us"
+cd "${DATA}/us" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
-rm -rf $DATA/us
-mkdir -p -m 775 $DATA/us
-cd $DATA/us
-cp $FIXgempak/datatype.tbl datatype.tbl
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 device="nc | gfs.meta"
-PDY2=$(echo $PDY | cut -c3-)
 
 export fend=F216
 
-if [ "$envir" = "para" ] ; then
+if [[ "${envir}" == "para" ]] ; then
    export m_title="GFSP"
 else
    export m_title="GFS"
 fi
 
 export pgm=gdplot2_nc;. prep_step
-startmsg
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-GFS | ${PDY2}/${cyc}00
-GDATTIM	= F00-$fend-6
+
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-GFS | ${PDY:2}/${cyc}00
+GDATTIM	= F00-${fend}-6
 DEVICE	= ${device}
 PANEL	= 0
 TEXT	= 1/21//hw
@@ -53,81 +48,82 @@ GAREA	= 17.529;-129.296;53.771;-22.374
 PROJ	= str/90;-105;0
 LATLON	= 0
 
-restore $USHgempak/restore/pmsl_thkn.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
+TITLE	= 5/-2/~ ? ${m_title} PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
 l
 run
 
 
-restore $USHgempak/restore/850mb_hght_tmpc.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
 l
 run
 
 
-restore $USHgempak/restore/700mb_hght_relh_omeg.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
 l
 run
 
 
-restore $USHgempak/restore/500mb_hght_absv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
 l
 run
 
 
-restore $USHgempak/restore/250mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 5/-2/~ ? $m_title @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
+TITLE	= 5/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
 l
 run
 
- 
-restore $USHgempak/restore/p06m_pmsl.2.nts
+
+
+restore ${HOMEgfs}/gempak/ush/restore/p06m_pmsl.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-GDATTIM	= F06-$fend-6
-TITLE	= 5/-2/~ ? $m_title 6-HR TOTAL PCPN, MSLP|~6-HR TOTAL PCPN, MSLP!0
+GDATTIM	= F06-${fend}-6
+TITLE	= 5/-2/~ ? ${m_title} 6-HR TOTAL PCPN, MSLP|~6-HR TOTAL PCPN, MSLP!0
 l
 run
 
 HILO    = 31;0/x#2////y
 HLSYM   = 1.5
 
-GDATTIM	= F12-$fend-06
-GDPFUN   = p12i                                                
-TITLE	= 5/-2/~ ? $m_title 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN
+GDATTIM	= F12-${fend}-06
+GDPFUN   = p12i
+TITLE	= 5/-2/~ ? ${m_title} 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN
 l
 run
 
 
-GDATTIM	= F24-$fend-06
-GDPFUN   = p24i                                                
-TITLE	= 5/-2/~ ? $m_title 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
+GDATTIM	= F24-${fend}-06
+GDPFUN   = p24i
+TITLE	= 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
 l
 run
 
 
 GDATTIM	= F72;f78;f84
-GDPFUN   = p72i                                                
-TITLE	= 5/-2/~ ? $m_title 72-HR TOTAL PCPN(IN)|~72-HR TOTAL PCPN
+GDPFUN   = p72i
+TITLE	= 5/-2/~ ? ${m_title} 72-HR TOTAL PCPN(IN)|~72-HR TOTAL PCPN
 l
 run
 
@@ -135,18 +131,18 @@ run
 GAREA   = 26.52;-119.70;50.21;-90.42
 PROJ    = str/90;-105;0/3;3;0;1
 MAP     = 1//2
-GDATTIM	= F24-$fend-06
-GDPFUN  = p24i                                                
-TITLE	= 5/-2/~ ? $m_title 24-HR TOTAL PCPN (IN)|~WEST: 24-HR PCPN
+GDATTIM	= F24-${fend}-06
+GDPFUN  = p24i
+TITLE	= 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN (IN)|~WEST: 24-HR PCPN
 l
 
 
 GAREA   = 24.57;-100.55;47.20;-65.42
 PROJ    = str/90;-90;0/3;3;0;1
 MAP     = 1//2
-GDATTIM	= F24-$fend-06
-GDPFUN  = p24i                                                
-TITLE	= 5/-2/~ ? $m_title 24-HR TOTAL PCPN (IN)|~EAST: 24-HR PCPN
+GDATTIM	= F24-${fend}-06
+GDPFUN  = p24i
+TITLE	= 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN (IN)|~EAST: 24-HR PCPN
 l
 
 exit
@@ -158,24 +154,23 @@ export err=$?;err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l gfs.meta
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
-
-
-if [ $SENDCOM = "YES" ] ; then
-  mv gfs.meta ${COMOUT}/gfs_${PDY}_${cyc}_us
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_us
-    if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-      DBN_ALERT_TYPE=GFS_METAFILE
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-       ${COMOUT}/gfs_${PDY}_${cyc}_us
+if (( err != 0 )) || [[ ! -s gfs.meta ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file gfs.meta"
+    exit $(( err + 100 ))
+fi
+
+mv gfs.meta "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_us"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_us"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
+        DBN_ALERT_TYPE=GFS_METAFILE
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_us"
     fi
-    if [ $fhr -eq 216 ] ; then
-     ${DBNROOT}/bin/dbn_alert MODEL GFS_METAFILE_LAST $job \
-       ${COMOUT}/gfs_${PDY}_${cyc}_us
+    if (( fhr == 216 )) ; then
+        "${DBNROOT}/bin/dbn_alert" MODEL GFS_METAFILE_LAST "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_us"
     fi
-  fi
 fi
 
diff --git a/gempak/ush/gfs_meta_usext.sh b/gempak/ush/gfs_meta_usext.sh
index dc522bb896..efbc7e229d 100755
--- a/gempak/ush/gfs_meta_usext.sh
+++ b/gempak/ush/gfs_meta_usext.sh
@@ -1,63 +1,48 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_usext.sh
 #
-# Log :
-# D.W.Plummer/NCEP   2/97   Add log header
-# D.W.Plummer/NCEP   3/97   Add ecmwf comparison.
-# D.W.Plummer/NCEP   3/97   Added $MAPFIL specification for lower resolution
-# D.W.Plummer/NCEP   4/97   Changed SKIP for grid2
-# D.W.Plummer/NCEP   4/97   Changed gdplot to gdplot2 and related restore files
-# D.W.Plummer/NCEP   4/97   Changed NAOP to NAOP w/ .us suffix
-# D.W.Plummer/NCEP   1/98   Added 12hr 2m min and max temps out to day 6 and 7
-# J.L.Partain/MPC    8/98   Added latlon lines
-# J. Carr/HPC        2/99   Changed skip to a 0
-# J. Carr/HPC        4/2000 Changed the Alaska 5-day pcpn to a 3-5 day pcpn
-#                           Added other pcpn products for the medium range fcstrs.
-# B. Gordon/NCO      4/00   Converted to run as production on the IBM-SP
-# D. Michaud/NCO     4/01   Added logic to display different title for parallel
-#                           runs.
-# B. Gordon          7/02   Converted to run off the GFS due to demise
-#                           of the MRF.
-# J. Carr/PMB       11/04   Added a ? to title lines.
-#                           Changed contur from a 1 to a 2.
-#                           Changed increment in gdattim to every 6 hrs instead of 12.
-#                           Added 3 new products for HPC medium range. (2 48-hr qpf and 1 5 day qpf)
-# M. Klein/HPC      01/10   Add boundary layer winds/isotachs to the metafile for CPC.
-#
-set -xa
-mkdir -p -m 775 $DATA/mrfus
-cd $DATA/mrfus
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-device="nc | mrf.meta"
+source "${HOMEgfs}/ush/preamble.sh"
 
-#XXW cp $FIXgempak/model/gfs/ak_sfstns.tbl alaska.tbl
-cp $FIXgempak/ak_sfstns.tbl alaska.tbl
+mkdir -p -m 775 "${DATA}/mrfus"
+cd "${DATA}/mrfus" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
+cp "${HOMEgfs}/gempak/fix/ak_sfstns.tbl" alaska.tbl
 
-month=$(echo $PDY | cut -c5,6)
-if [ $month -ge 5 -a $month -le 9 ] ; then
-#  fint="40;45;50;55;60;65;70;75;80;85;90;95;100"
-#  fline="26;25;24;23;22;21;20;19;18;17;16;15;14;31"
-  fint="60;65;70;75;80;85;90;95;100;105;110;115;120"
-  fline="26;25;24;23;22;21;20;19;18;17;16;15;14;31"
-else
-  fint="-5;0;5;10;15;20;25;30;35;40;45;50;55;60;65;70;75;80"
-  fline="4;30;29;28;27;26;25;24;23;22;21;20;19;18;17;16;15;14;31"
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
+#
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
 fi
 
-PDY2=$(echo $PDY | cut -c3-)
+device="nc | mrf.meta"
 
-if [ "$envir" = "para" ] ; then
+# Not being used?
+# case $(( 10#${PDY:4:2} )) in
+#     [5-9])
+#         fint="60;65;70;75;80;85;90;95;100;105;110;115;120"
+#         fline="26;25;24;23;22;21;20;19;18;17;16;15;14;31"
+#         ;;
+#     *)
+#         fint="-5;0;5;10;15;20;25;30;35;40;45;50;55;60;65;70;75;80"
+#         fline="4;30;29;28;27;26;25;24;23;22;21;20;19;18;17;16;15;14;31"
+#         ;;
+# esac
+
+if [[ "${envir}" = "para" ]] ; then
    export m_title="GFSP"
 else
    export m_title="GFS"
 fi
 
-export pgm=gdplot2_nc; prep_step; startmsg
+export pgm=gdplot2_nc; prep_step
 
-$GEMEXE/gdplot2_nc << EOF
-GDFILE	= F-GFS | ${PDY2}/${cyc}00
+"${GEMEXE}/gdplot2_nc" << EOF
+GDFILE	= F-GFS | ${PDY:2}/${cyc}00
 GDATTIM	= F000-F384-06
 DEVICE	= ${device}
 PANEL	= 0
@@ -71,46 +56,46 @@ GAREA	= 17.529;-129.296;53.771;-22.374
 PROJ	= str/90;-105;0
 LATLON	= 18/2
 
-restore ${USHgempak}/restore/pmsl_thkn.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts
 CLRBAR  = 1
 HLSYM   = 2;1.5//21//hw
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
+TITLE	= 1/-2/~ ? ${m_title} PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0
 l
 run
 
-restore ${USHgempak}/restore/850mb_hght_tmpc.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
+TITLE	= 1/-2/~ ? ${m_title} @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0
 l
 run
 
-restore ${USHgempak}/restore/700mb_hght_relh_omeg.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
+TITLE	= 1/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0
 l
 run
 
-restore ${USHgempak}/restore/500mb_hght_absv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
+TITLE	= 1/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0
 l
 run
 
-restore ${USHgempak}/restore/500mb_hght_gabsv.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_gabsv.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title @ HGT AND GEO ABS VORT|~@ HGT, GEO ABS VORT!0
+TITLE	= 1/-2/~ ? ${m_title} @ HGT AND GEO ABS VORT|~@ HGT, GEO ABS VORT!0
 l
 run
 
-restore ${USHgempak}/restore/250mb_hght_wnd.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
-TITLE	= 1/-2/~ ? $m_title @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
+TITLE	= 1/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0
 l
 run
 
@@ -131,26 +116,26 @@ HLSYM   =
 CLRBAR  = 1/V/LL!0
 WIND    = bk18/0.8/1/112
 REFVEC  =
-TITLE   = 1/-2/~ ? $m_title BOUNDARY LAYER WINDS (KTS) AND ISOTACHS|~BL WIND, ISOTACHS !0
+TITLE   = 1/-2/~ ? ${m_title} BOUNDARY LAYER WINDS (KTS) AND ISOTACHS|~BL WIND, ISOTACHS !0
 TEXT    = 1/21//hw
 CLEAR   = YES
 l
 run
 
-restore ${USHgempak}/restore/precip.2.nts
+restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts
 CLRBAR  = 1
 TEXT    = 1/21//hw
 HILO    = 31;0/x#2/.25-10///y
 HLSYM   = 1.5
 GDATTIM	= F12-F384-6
 GDPFUN  = p12i
-TITLE	= 1/-2/~ ? $m_title 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN
+TITLE	= 1/-2/~ ? ${m_title} 12-HR TOTAL PCPN (IN)|~12-HR TOTAL PCPN
 l
 run
 
 GDATTIM	= F24-F384-6
 GDPFUN  = p24i
-TITLE	= 5/-2/~ ? $m_title 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
+TITLE	= 5/-2/~ ? ${m_title} 24-HR TOTAL PCPN (IN)|~24-HR TOTAL PCPN
 l
 run
 
@@ -158,34 +143,34 @@ GDATTIM	= F84
 wind    = bk0
 gvcord  = none
 type    = f
-cint    = 
-line    = 
+cint    =
+line    =
 clrbar  = 1/V/LL
 fint    = .01;.1;.25;.5;.75;1;1.5;2;2.5;3;4;5;6;7;8;9;10
 fline   = 0;21-30;14-20;5
 glevel  = 0
 scale   = 0
 gdpfun  = p72i
-refvec  = 
-title   = 1/-2/~ ? $m_title 3-day (F12-F84) PCPN|~DAY 1-3 (F12-F84) PCPN
+refvec  =
+title   = 1/-2/~ ? ${m_title} 3-day (F12-F84) PCPN|~DAY 1-3 (F12-F84) PCPN
 l
 run
 
 GDATTIM = F108
 gdpfun  = p96i
-title   = 1/-2/~ ? $m_title 4-day (F12-F120) PCPN|~DAY 1-4 (F12-F108) PCPN
+title   = 1/-2/~ ? ${m_title} 4-day (F12-F120) PCPN|~DAY 1-4 (F12-F108) PCPN
 l
 run
 
 GDATTIM = F132
-gdpfun  = p120i 
-title   = 1/-2/~ ? $m_title 5-day (F12-F132) PCPN|~DAY 1-5 (F12-F132) PCPN
+gdpfun  = p120i
+title   = 1/-2/~ ? ${m_title} 5-day (F12-F132) PCPN|~DAY 1-5 (F12-F132) PCPN
 l
 run
 
 GDATTIM = F132
 gdpfun  = p48i
-title   = 1/-2/~ ? $m_title 2-day (F84-F132) PCPN|~DAY 4-5 (F84-F132) PCPN
+title   = 1/-2/~ ? ${m_title} 2-day (F84-F132) PCPN|~DAY 4-5 (F84-F132) PCPN
 l
 run
 
@@ -193,7 +178,7 @@ run
 
 GDATTIM = F126
 gdpfun  = p120i
-title   = 1/-2/~ ? $m_title 5-day (F06-F126) PCPN|~DAY 1-5 (F06-F126) PCPN
+title   = 1/-2/~ ? ${m_title} 5-day (F06-F126) PCPN|~DAY 1-5 (F06-F126) PCPN
 l
 run
 
@@ -201,7 +186,7 @@ run
 
 GDATTIM = F126
 gdpfun  = p48i
-title   = 1/-2/~ ? $m_title 2-day (F78-F126) PCPN|~DAY 4-5 (F78-F126) PCPN
+title   = 1/-2/~ ? ${m_title} 2-day (F78-F126) PCPN|~DAY 4-5 (F78-F126) PCPN
 l
 run
 
@@ -209,13 +194,13 @@ run
 
 GDATTIM = F138
 gdpfun  = p48i
-title   = 1/-2/~ ? $m_title 2-day (F90-F138) PCPN|~DAY 4-5 (F90-F138) PCPN
+title   = 1/-2/~ ? ${m_title} 2-day (F90-F138) PCPN|~DAY 4-5 (F90-F138) PCPN
 l
 run
 
 GDATTIM = F156
 gdpfun  = p72i
-title   = 1/-2/~ ? $m_title 3-day (F84-F156) PCPN|~DAY 4-6 (F84-F156) PCPN
+title   = 1/-2/~ ? ${m_title} 3-day (F84-F156) PCPN|~DAY 4-6 (F84-F156) PCPN
 l
 run
 
@@ -225,7 +210,7 @@ PROJ    = mer//3;3;0;1
 STNPLT  = 5|5/1//2|alaska.tbl
 gdattim = f144
 gdpfun  = p72i
-title   = 1/-2/~ ? $m_title 3-day (F72-F144) PCPN|~AK 3-DAY(F72-F144) PCPN
+title   = 1/-2/~ ? ${m_title} 3-day (F72-F144) PCPN|~AK 3-DAY(F72-F144) PCPN
 l
 run
 
@@ -240,21 +225,18 @@ export err=$?; err_chk
 # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
 # FOR THIS CASE HERE.
 #####################################################
-ls -l mrf.meta
-export err=$?;export pgm="GEMPAK CHECK FILE"; err_chk
-
-if [ $SENDCOM = "YES" ] ; then
-  mv mrf.meta ${COMOUT}/gfs_${PDY}_${cyc}_usext
-  if [ $SENDDBN = "YES" ] ; then
-    $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-     $COMOUT/gfs_${PDY}_${cyc}_usext
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
-        DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/gfs_${PDY}_${cyc}_usext
-      fi
-  fi
+if (( err != 0 )) || [[ ! -s mrf.meta ]] &> /dev/null; then
+    echo "FATAL ERROR: Failed to create gempak meta file mrf.meta"
+    exit $(( err + 100 ))
 fi
 
-#
-
+mv mrf.meta "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_usext"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_usext"
+    if [[ ${DBN_ALERT_TYPE} == "GFS_METAFILE_LAST" ]] ; then
+        DBN_ALERT_TYPE=GFS_METAFILE
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfs_${PDY}_${cyc}_usext"
+    fi
+fi
diff --git a/gempak/ush/gfs_meta_ver.sh b/gempak/ush/gfs_meta_ver.sh
index d63f6bc6df..eb8b5b15c6 100755
--- a/gempak/ush/gfs_meta_ver.sh
+++ b/gempak/ush/gfs_meta_ver.sh
@@ -1,372 +1,68 @@
-#!/bin/sh
+#! /usr/bin/env bash
 #
 # Metafile Script : gfs_meta_ver_new
 #
-# Log :
-# J. Carr/HPC     1/98   Added new metafile
-# J. Carr/HPC     5/98   Converted to gdplot2
-# J. Carr/HPC     8/98   Changed map to medium resolution
-# J. Carr/HPC     2/99   Changed skip to 0
-# J. Carr/HPC     6/99   Added latlon and a filter to map
-# J. Carr/HPC     7/99   Added South American area.
-# J. Carr/HPC   2/2001   Edited to run on the IBM.
-# J. Carr/HPC   5/2001   Added a mn variable for a/b side dbnet root variable.
-# J. Carr/HPC   8/2001   Changed to a korn shell for turnover to production.
-# J. Carr/HPC   8/2001   Submitted.
-# J. Carr/PMB  11/2004   Inserted a ? into all title/TITLE lines.
-#                        Changed contur from 1 to a 2.
-#                        Added logic to take the script from f126 to f228.
-#                        This will remove need for mrfver.
-#                        Removed logic which differentiated cycles since all cycles run to F384.
-#                        Added a South American area for International desk.
-#
 # Set up Local Variables
 #
-set -x
-export PS4='VER:$SECONDS + '
-mkdir -p -m 775 $DATA/VER
-cd $DATA/VER
-cp $FIXgempak/datatype.tbl datatype.tbl
 
-export COMPONENT=${COMPONENT:-atmos}
+source "${HOMEgfs}/ush/preamble.sh"
+
+mkdir -p -m 775 "${DATA}/VER"
+cd "${DATA}/VER" || exit 2
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
-mdl=gfs
 MDL=GFS
-metatype="ver"
 metaname="gfsver_${cyc}.meta"
 device="nc | ${metaname}"
-PDY2=$(echo ${PDY} | cut -c3-)
+
+#
+# Link data into DATA to sidestep gempak path limits
+# TODO: Replace this
 #
-# DEFINE 1 CYCLE AGO
-dc1=$($NDATE -06 ${PDY}${cyc} | cut -c -10)
-date1=$(echo ${dc1} | cut -c -8)
-sdate1=$(echo ${dc1} | cut -c 3-8)
-cycle1=$(echo ${dc1} | cut -c 9,10)
-# DEFINE 2 CYCLES AGO
-dc2=$($NDATE -12 ${PDY}${cyc} | cut -c -10)
-date2=$(echo ${dc2} | cut -c -8)
-sdate2=$(echo ${dc2} | cut -c 3-8)
-cycle2=$(echo ${dc2} | cut -c 9,10)
-# DEFINE 3 CYCLES AGO
-dc3=$($NDATE -18 ${PDY}${cyc} | cut -c -10)
-date3=$(echo ${dc3} | cut -c -8)
-sdate3=$(echo ${dc3} | cut -c 3-8)
-cycle3=$(echo ${dc3} | cut -c 9,10)
-# DEFINE 4 CYCLES AGO
-dc4=$($NDATE -24 ${PDY}${cyc} | cut -c -10)
-date4=$(echo ${dc4} | cut -c -8)
-sdate4=$(echo ${dc4} | cut -c 3-8)
-cycle4=$(echo ${dc4} | cut -c 9,10)
-# DEFINE 5 CYCLES AGO
-dc5=$($NDATE -30 ${PDY}${cyc} | cut -c -10)
-date5=$(echo ${dc5} | cut -c -8)
-sdate5=$(echo ${dc5} | cut -c 3-8)
-cycle5=$(echo ${dc5} | cut -c 9,10)
-# DEFINE 6 CYCLES AGO
-dc6=$($NDATE -36 ${PDY}${cyc} | cut -c -10)
-date6=$(echo ${dc6} | cut -c -8)
-sdate6=$(echo ${dc6} | cut -c 3-8)
-cycle6=$(echo ${dc6} | cut -c 9,10)
-# DEFINE 7 CYCLES AGO
-dc7=$($NDATE -42 ${PDY}${cyc} | cut -c -10)
-date7=$(echo ${dc7} | cut -c -8)
-sdate7=$(echo ${dc7} | cut -c 3-8)
-cycle7=$(echo ${dc7} | cut -c 9,10)
-# DEFINE 8 CYCLES AGO
-dc8=$($NDATE -48 ${PDY}${cyc} | cut -c -10)
-date8=$(echo ${dc8} | cut -c -8)
-sdate8=$(echo ${dc8} | cut -c 3-8)
-cycle8=$(echo ${dc8} | cut -c 9,10)
-# DEFINE 9 CYCLES AGO
-dc9=$($NDATE -54 ${PDY}${cyc} | cut -c -10)
-date9=$(echo ${dc9} | cut -c -8)
-sdate9=$(echo ${dc9} | cut -c 3-8)
-cycle9=$(echo ${dc9} | cut -c 9,10)
-# DEFINE 10 CYCLES AGO
-dc10=$($NDATE -60 ${PDY}${cyc} | cut -c -10)
-date10=$(echo ${dc10} | cut -c -8)
-sdate10=$(echo ${dc10} | cut -c 3-8)
-cycle10=$(echo ${dc10} | cut -c 9,10)
-# DEFINE 11 CYCLES AGO
-dc11=$($NDATE -66 ${PDY}${cyc} | cut -c -10)
-date11=$(echo ${dc11} | cut -c -8)
-sdate11=$(echo ${dc11} | cut -c 3-8)
-cycle11=$(echo ${dc11} | cut -c 9,10)
-# DEFINE 12 CYCLES AGO
-dc12=$($NDATE -72 ${PDY}${cyc} | cut -c -10)
-date12=$(echo ${dc12} | cut -c -8)
-sdate12=$(echo ${dc12} | cut -c 3-8)
-cycle12=$(echo ${dc12} | cut -c 9,10)
-# DEFINE 13 CYCLES AGO
-dc13=$($NDATE -78 ${PDY}${cyc} | cut -c -10)
-date13=$(echo ${dc13} | cut -c -8)
-sdate13=$(echo ${dc13} | cut -c 3-8)
-cycle13=$(echo ${dc13} | cut -c 9,10)
-# DEFINE 14 CYCLES AGO
-dc14=$($NDATE -84 ${PDY}${cyc} | cut -c -10)
-date14=$(echo ${dc14} | cut -c -8)
-sdate14=$(echo ${dc14} | cut -c 3-8)
-cycle14=$(echo ${dc14} | cut -c 9,10)
-# DEFINE 15 CYCLES AGO
-dc15=$($NDATE -90 ${PDY}${cyc} | cut -c -10)
-date15=$(echo ${dc15} | cut -c -8)
-sdate15=$(echo ${dc15} | cut -c 3-8)
-cycle15=$(echo ${dc15} | cut -c 9,10)
-# DEFINE 16 CYCLES AGO
-dc16=$($NDATE -96 ${PDY}${cyc} | cut -c -10)
-date16=$(echo ${dc16} | cut -c -8)
-sdate16=$(echo ${dc16} | cut -c 3-8)
-cycle16=$(echo ${dc16} | cut -c 9,10)
-# DEFINE 17 CYCLES AGO
-dc17=$($NDATE -102 ${PDY}${cyc} | cut -c -10)
-date17=$(echo ${dc17} | cut -c -8)
-sdate17=$(echo ${dc17} | cut -c 3-8)
-cycle17=$(echo ${dc17} | cut -c 9,10)
-# DEFINE 18 CYCLES AGO
-dc18=$($NDATE -108 ${PDY}${cyc} | cut -c -10)
-date18=$(echo ${dc18} | cut -c -8)
-sdate18=$(echo ${dc18} | cut -c 3-8)
-cycle18=$(echo ${dc18} | cut -c 9,10)
-# DEFINE 19 CYCLES AGO
-dc19=$($NDATE -114 ${PDY}${cyc} | cut -c -10)
-date19=$(echo ${dc19} | cut -c -8)
-sdate19=$(echo ${dc19} | cut -c 3-8)
-cycle19=$(echo ${dc19} | cut -c 9,10)
-# DEFINE 20 CYCLES AGO
-dc20=$($NDATE -120 ${PDY}${cyc} | cut -c -10)
-date20=$(echo ${dc20} | cut -c -8)
-sdate20=$(echo ${dc20} | cut -c 3-8)
-cycle20=$(echo ${dc20} | cut -c 9,10)
-# DEFINE 21 CYCLES AGO
-dc21=$($NDATE -126 ${PDY}${cyc} | cut -c -10)
-date21=$(echo ${dc21} | cut -c -8)
-sdate21=$(echo ${dc21} | cut -c 3-8)
-cycle21=$(echo ${dc21} | cut -c 9,10)
-# DEFINE 22 CYCLES AGO
-dc22=$($NDATE -132 ${PDY}${cyc} | cut -c -10)
-date22=$(echo ${dc22} | cut -c -8)
-sdate22=$(echo ${dc22} | cut -c 3-8)
-cycle22=$(echo ${dc22} | cut -c 9,10)
-# DEFINE 23 CYCLES AGO
-dc23=$($NDATE -138 ${PDY}${cyc} | cut -c -10)
-date23=$(echo ${dc23} | cut -c -8)
-sdate23=$(echo ${dc23} | cut -c 3-8)
-cycle23=$(echo ${dc23} | cut -c 9,10)
-# DEFINE 24 CYCLES AGO
-dc24=$($NDATE -144 ${PDY}${cyc} | cut -c -10)
-date24=$(echo ${dc24} | cut -c -8)
-sdate24=$(echo ${dc24} | cut -c 3-8)
-cycle24=$(echo ${dc24} | cut -c 9,10)
-# DEFINE 25 CYCLES AGO
-dc25=$($NDATE -150 ${PDY}${cyc} | cut -c -10)
-date25=$(echo ${dc25} | cut -c -8)
-sdate25=$(echo ${dc25} | cut -c 3-8)
-cycle25=$(echo ${dc25} | cut -c 9,10)
-# DEFINE 26 CYCLES AGO
-dc26=$($NDATE -156 ${PDY}${cyc} | cut -c -10)
-date26=$(echo ${dc26} | cut -c -8)
-sdate26=$(echo ${dc26} | cut -c 3-8)
-cycle26=$(echo ${dc26} | cut -c 9,10)
-# DEFINE 27 CYCLES AGO
-dc27=$($NDATE -162 ${PDY}${cyc} | cut -c -10)
-date27=$(echo ${dc27} | cut -c -8)
-sdate27=$(echo ${dc27} | cut -c 3-8)
-cycle27=$(echo ${dc27} | cut -c 9,10)
-# DEFINE 28 CYCLES AGO
-dc28=$($NDATE -168 ${PDY}${cyc} | cut -c -10)
-date28=$(echo ${dc28} | cut -c -8)
-sdate28=$(echo ${dc28} | cut -c 3-8)
-cycle28=$(echo ${dc28} | cut -c 9,10)
-# DEFINE 29 CYCLES AGO
-dc29=$($NDATE -174 ${PDY}${cyc} | cut -c -10)
-date29=$(echo ${dc29} | cut -c -8)
-sdate29=$(echo ${dc29} | cut -c 3-8)
-cycle29=$(echo ${dc29} | cut -c 9,10)
-# DEFINE 30 CYCLES AGO
-dc30=$($NDATE -180 ${PDY}${cyc} | cut -c -10)
-date30=$(echo ${dc30} | cut -c -8)
-sdate30=$(echo ${dc30} | cut -c 3-8)
-cycle30=$(echo ${dc30} | cut -c 9,10)
-# DEFINE 31 CYCLES AGO
-dc31=$($NDATE -192 ${PDY}${cyc} | cut -c -10)
-date31=$(echo ${dc31} | cut -c -8)
-sdate31=$(echo ${dc31} | cut -c 3-8)
-cycle31=$(echo ${dc31} | cut -c 9,10)
-# DEFINE 32 CYCLES AGO
-dc32=$($NDATE -204 ${PDY}${cyc} | cut -c -10)
-date32=$(echo ${dc32} | cut -c -8)
-sdate32=$(echo ${dc32} | cut -c 3-8)
-cycle32=$(echo ${dc32} | cut -c 9,10)
-# DEFINE 33 CYCLES AGO
-dc33=$($NDATE -216 ${PDY}${cyc} | cut -c -10)
-date33=$(echo ${dc33} | cut -c -8)
-sdate33=$(echo ${dc33} | cut -c 3-8)
-cycle33=$(echo ${dc33} | cut -c 9,10)
+export COMIN="${RUN}.${PDY}${cyc}"
+if [[ ! -L ${COMIN} ]]; then
+    ${NLN} "${COM_ATMOS_GEMPAK_1p00}" "${COMIN}"
+fi
 
 # SET CURRENT CYCLE AS THE VERIFICATION GRIDDED FILE.
-vergrid="F-${MDL} | ${PDY2}/${cyc}00"
+vergrid="F-${MDL} | ${PDY:2}/${cyc}00"
 fcsthr="f00"
 
-# SET WHAT RUNS TO COMPARE AGAINST BASED ON MODEL CYCLE TIME.
-#if [ ${cyc} -eq 00 ] ; then
-#    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc16} ${dc17} ${dc18} ${dc19} ${dc20} ${dc21}"
-#elif [ ${cyc} -eq 12 ] ; then
-#    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc16} ${dc17} ${dc18} ${dc19} ${dc20} ${dc21}"
-#else
-#    verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc16} ${dc17} ${dc18} ${dc19} ${dc20} ${dc21}"
-#fi 
-
-verdays="${dc1} ${dc2} ${dc3} ${dc4} ${dc5} ${dc6} ${dc7} ${dc8} ${dc9} ${dc10} ${dc11} ${dc12} ${dc13} ${dc14} ${dc15} ${dc16} ${dc17} ${dc18} ${dc19}
-${dc20} ${dc21} ${dc22} ${dc23} ${dc24} ${dc25} ${dc26} ${dc27} ${dc28} ${dc29} ${dc30} ${dc31} ${dc32} ${dc33}"
-
-
-#GENERATING THE METAFILES.
 MDL2="GFSHPC"
-for verday in ${verdays}
-    do
-    cominday=$(echo ${verday} | cut -c -8)
-    #XXW export HPCGFS=$COMROOT/nawips/prod/${mdl}.${cominday}
-    # BV export HPCGFS=$COMROOT/nawips/${envir}/${mdl}.${cominday}
-    export HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cyc}/${COMPONENT}/gempak
+#GENERATING THE METAFILES.
+# seq won't give us any splitting problems, ignore warnings
+# shellcheck disable=SC2207,SC2312
+lookbacks=($(IFS=$'\n' seq 6 6 180) $(IFS=$'\n' seq 192 12 216))
+for lookback in "${lookbacks[@]}"; do
+    init_time="$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${lookback} hours")"
+    init_PDY=${init_time:0:8}
+    init_cyc=${init_time:8:2}
+
+    if (( init_time <= ${SDATE:-0} )); then
+        echo "Skipping ver for ${init_time} because it is before the experiment began"
+        if (( lookback == "${lookbacks[0]}" )); then
+            echo "First forecast time, no metafile produced"
+            exit 0
+        else
+            break
+        fi
+    fi
+
+    dgdattim="f$(printf "%03g" "${lookback}")"
 
-    if [ ${verday} -eq ${dc1} ] ; then
-        dgdattim=f006
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle1}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate1}/${cycle1}00"
-    elif [ ${verday} -eq ${dc2} ] ; then
-        dgdattim=f012
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle2}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate2}/${cycle2}00"
-    elif [ ${verday} -eq ${dc3} ] ; then
-        dgdattim=f018
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle3}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate3}/${cycle3}00"
-    elif [ ${verday} -eq ${dc4} ] ; then
-        dgdattim=f024
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle4}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate4}/${cycle4}00"
-    elif [ ${verday} -eq ${dc5} ] ; then
-        dgdattim=f030
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle5}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate5}/${cycle5}00"
-    elif [ ${verday} -eq ${dc6} ] ; then
-        dgdattim=f036
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle6}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate6}/${cycle6}00"
-    elif [ ${verday} -eq ${dc7} ] ; then
-        dgdattim=f042
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle7}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate7}/${cycle7}00"
-    elif [ ${verday} -eq ${dc8} ] ; then
-        dgdattim=f048
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle8}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate8}/${cycle8}00"
-    elif [ ${verday} -eq ${dc9} ] ; then
-        dgdattim=f054
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle9}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate9}/${cycle9}00"
-    elif [ ${verday} -eq ${dc10} ] ; then
-        dgdattim=f060
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle10}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate10}/${cycle10}00"
-    elif [ ${verday} -eq ${dc11} ] ; then
-        dgdattim=f066
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle11}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate11}/${cycle11}00"
-    elif [ ${verday} -eq ${dc12} ] ; then
-        dgdattim=f072
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle12}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate12}/${cycle12}00"
-    elif [ ${verday} -eq ${dc13} ] ; then
-        dgdattim=f078
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle13}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate13}/${cycle13}00"
-    elif [ ${verday} -eq ${dc14} ] ; then
-        dgdattim=f084
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle14}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate14}/${cycle14}00"
-    elif [ ${verday} -eq ${dc15} ] ; then
-        dgdattim=f090
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle15}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate15}/${cycle15}00"
-    elif [ ${verday} -eq ${dc16} ] ; then
-        dgdattim=f096
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle16}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate16}/${cycle16}00"
-    elif [ ${verday} -eq ${dc17} ] ; then
-        dgdattim=f102
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle17}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate17}/${cycle17}00"
-    elif [ ${verday} -eq ${dc18} ] ; then
-        dgdattim=f108
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle18}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate18}/${cycle18}00"
-    elif [ ${verday} -eq ${dc19} ] ; then
-        dgdattim=f114
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle19}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate19}/${cycle19}00"
-    elif [ ${verday} -eq ${dc20} ] ; then
-        dgdattim=f120
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle20}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate20}/${cycle20}00"
-    elif [ ${verday} -eq ${dc21} ] ; then
-        dgdattim=f126
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle21}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate21}/${cycle21}00"
-    elif [ ${verday} -eq ${dc22} ] ; then
-        dgdattim=f132
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle22}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate22}/${cycle22}00"
-    elif [ ${verday} -eq ${dc23} ] ; then
-        dgdattim=f138
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle23}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate23}/${cycle23}00"
-    elif [ ${verday} -eq ${dc24} ] ; then
-        dgdattim=f144
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle24}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate24}/${cycle24}00"
-    elif [ ${verday} -eq ${dc25} ] ; then
-        dgdattim=f150
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle25}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate25}/${cycle25}00"
-    elif [ ${verday} -eq ${dc26} ] ; then
-        dgdattim=f156
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle26}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate26}/${cycle26}00"
-    elif [ ${verday} -eq ${dc27} ] ; then
-        dgdattim=f162
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle27}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate27}/${cycle27}00"
-    elif [ ${verday} -eq ${dc28} ] ; then
-        dgdattim=f168
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle28}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate28}/${cycle28}00"
-    elif [ ${verday} -eq ${dc29} ] ; then
-        dgdattim=f174
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle29}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate29}/${cycle29}00"
-    elif [ ${verday} -eq ${dc30} ] ; then
-        dgdattim=f180
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle30}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate30}/${cycle30}00"
-    elif [ ${verday} -eq ${dc31} ] ; then
-        dgdattim=f192
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle31}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate31}/${cycle31}00"
-    elif [ ${verday} -eq ${dc32} ] ; then
-        dgdattim=f204
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle32}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate32}/${cycle32}00"
-    elif [ ${verday} -eq ${dc33} ] ; then
-        dgdattim=f216
-        HPCGFS=${COMINgempak}/${mdl}.${cominday}/${cycle33}/${COMPONENT}/gempak
-        grid="F-${MDL2} | ${sdate33}/${cycle33}00"
+    # Create symlink in DATA to sidestep gempak path limits
+    HPCGFS="${RUN}.${init_time}"
+    if [[ ! -L "${HPCGFS}" ]]; then
+        YMD=${init_PDY} HH=${init_cyc} GRID="1p00" declare_from_tmpl source_dir:COM_ATMOS_GEMPAK_TMPL
+        ${NLN} "${source_dir}" "${HPCGFS}"
     fi
 
-# 500 MB HEIGHT METAFILE
+    grid="F-${MDL2} | ${init_PDY}/${init_cyc}00"
 
-export pgm=gdplot2_nc;. prep_step; startmsg
-$GEMEXE/gdplot2_nc << EOFplt
+    # 500 MB HEIGHT METAFILE
+
+    export pgm=gdplot2_nc;. prep_step
+    "${GEMEXE}/gdplot2_nc" << EOFplt
 PROJ     = STR/90.0;-95.0;0.0
 GAREA    = 5.1;-124.6;49.6;-11.9
 map      = 1//2
@@ -417,7 +113,7 @@ title    = 5/-1/~ ? GFS PMSL
 clear    = no
 r
 
-!PROJ     = 
+!PROJ     =
 !GAREA    = bwus
 !gdfile   = ${vergrid}
 !gdattim  = ${fcsthr}
@@ -443,7 +139,7 @@ r
 ! SOUTH AMERICAN AREA.
 ! 500 MB
 
-PROJ     = 
+PROJ     =
 GAREA    = samps
 map      = 1//2
 clear    = yes
@@ -498,29 +194,29 @@ r
 
 ex
 EOFplt
-export err=$?;err_chk
-#####################################################
-# GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
-# WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
-# FOR THIS CASE HERE.
-#####################################################
-ls -l $metaname
-export err=$?;export pgm="GEMPAK CHECK FILE";err_chk
+    export err=$?;err_chk
+
+    #####################################################
+    # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE
+    # WHEN IT CAN NOT PRODUCE THE DESIRED GRID.  CHECK
+    # FOR THIS CASE HERE.
+    #####################################################
+    if (( err != 0 )) || [[ ! -s "${metaname}" ]] &> /dev/null; then
+        echo "FATAL ERROR: Failed to create gempak meta file ${metaname}"
+        exit $(( err + 100 ))
+    fi
 
 done
 
-if [ $SENDCOM = "YES" ] ; then
-   mv ${metaname} ${COMOUT}/gfsver_${PDY}_${cyc}
-   if [ $SENDDBN = "YES" ] ; then
-      ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-      ${COMOUT}/gfsver_${PDY}_${cyc}
-      if [ $DBN_ALERT_TYPE = "GFS_METAFILE_LAST" ] ; then
+mv "${metaname}" "${COM_ATMOS_GEMPAK_META}/gfsver_${PDY}_${cyc}"
+if [[ "${SENDDBN}" == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+        "${COM_ATMOS_GEMPAK_META}/gfsver_${PDY}_${cyc}"
+    if [[ "${DBN_ALERT_TYPE}" = "GFS_METAFILE_LAST" ]] ; then
         DBN_ALERT_TYPE=GFS_METAFILE
-        ${DBNROOT}/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-        ${COMOUT}/gfsver_${PDY}_${cyc}
-      fi
-   fi
+        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+            "${COM_ATMOS_GEMPAK_META}/gfsver_${PDY}_${cyc}"
+    fi
 fi
 
-
 exit
diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG
index 6ad5c8f31b..a1e0c9f1d5 100755
--- a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG
+++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal analdiag"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
 export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"}
 
 
@@ -24,11 +23,11 @@ export gcyc=${GDATE:8:2}
 export GDUMP="gdas"
 export GDUMP_ENS="enkf${GDUMP}"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
 export APREFIX="${RUN}.t${cyc}z."
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
 
 ###############################################################
diff --git a/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/jobs/JGDAS_ATMOS_CHGRES_FORENKF
index 1bbed53586..5747675fe2 100755
--- a/jobs/JGDAS_ATMOS_CHGRES_FORENKF
+++ b/jobs/JGDAS_ATMOS_CHGRES_FORENKF
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal echgres"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${RUN/enkf}
 export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"}
 
 
@@ -15,11 +14,11 @@ export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"}
 # Begin JOB SPECIFIC work
 ##############################################
 
-export APREFIX="${CDUMP}.t${cyc}z."
+export APREFIX="${RUN/enkf}.t${cyc}z."
 export APREFIX_ENS="${RUN}.t${cyc}z."
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY
-MEMDIR="mem001" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY
+MEMDIR="mem001" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL
 
 ###############################################################
 # Run relevant script
diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK
index 1535e07ae3..f5c00b9c98 100755
--- a/jobs/JGDAS_ATMOS_GEMPAK
+++ b/jobs/JGDAS_ATMOS_GEMPAK
@@ -3,98 +3,57 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak"
 
-# TODO (#1219) This j-job is not part of the rocoto suite
-
-################################
-# Set up the HOME directory
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
-export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
-export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
-
 ############################################
 # Set up model and cycle specific variables
 ############################################
-
-export fend=09
-export finc=3
-export fstart=00
-export GRIB=pgrb2f
+export model=${model:-gdas}
+export GRIB=${GRIB:-pgrb2f}
 export EXT=""
-export DBN_ALERT_TYPE=GDAS_GEMPAK
+export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GDAS_GEMPAK}
 
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-###################################
-# Specify NET and RUN Name and model
-####################################
-export model=${model:-gdas}
-
 ##############################################
 # Define COM directories
 ##############################################
-for grid in 0p25 0p50 1p00; do
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
+for grid in 0p25 1p00; do
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
 done
 
-for grid in 1p00 0p25; do
+for grid in 0p25 1p00; do
   prod_dir="COM_ATMOS_GEMPAK_${grid}"
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL"
 
   if [[ ! -d "${!prod_dir}" ]] ; then
     mkdir -m 775 -p "${!prod_dir}"
   fi
 done
 
-
-# TODO: These actions belong in an ex-script not a j-job
-if [[ -f poescript ]]; then
-   rm -f poescript
-fi
-
 ########################################################
-# Execute the script.
-echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" >> poescript
+# Execute the script for one degree grib
 ########################################################
 
+"${SCRgfs}/exgdas_atmos_nawips.sh" "1p00" "${FHR3}" "GDAS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}"
+
 ########################################################
 # Execute the script for quater-degree grib
-echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas_0p25 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" >> poescript
 ########################################################
 
-cat poescript
-
-chmod 775 ${DATA}/poescript
-export MP_PGMMODEL=mpmd
-export MP_CMDFILE=${DATA}/poescript
+"${SCRgfs}/exgdas_atmos_nawips.sh" "0p25" "${FHR3}" "GDAS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}"
 
-ntasks=${NTASKS_GEMPAK:-$(cat ${DATA}/poescript | wc -l)}
-ptile=${PTILE_GEMPAK:-4}
-threads=${NTHREADS_GEMPAK:-1}
-export OMP_NUM_THREADS=${threads}
-APRUN="mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp"
-
-APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}}
-
-${APRUN_GEMPAKCFP} ${DATA}/poescript
 export err=$?; err_chk
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
-
diff --git a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
index 6948d29df6..dca629c845 100755
--- a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
+++ b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC
@@ -4,29 +4,13 @@
 # GDAS GEMPAK META NCDC PRODUCT GENERATION
 ############################################
 
-# TODO (#1222) This j-job is not part of the rocoto
-
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak"
 
-################################
-# Set up the HOME directory
-################################
-export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}}
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
-export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
-export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
-
-#
 # Now set up GEMPAK/NTRANS environment
-#
 
-cp ${FIXgempak}/datatype.tbl datatype.tbl
+# datatype.tbl specifies the paths and filenames of files
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 ###################################
 # Specify NET and RUN Name and model
@@ -50,29 +34,37 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 ##############################################
 # Define COM directories
 ##############################################
-export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak}
-export COMINgdas=${COMINgdas:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}}
-export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak/meta}
-export COMOUTncdc=${COMOUTncdc:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}}
+GRID=1p00 YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_1p00:COM_ATMOS_GEMPAK_TMPL"
 
-export COMINukmet=${COMINukmet:-$(compath.py ${envir}/ukmet/${ukmet_ver})/ukmet}
-export COMINecmwf=${COMINecmwf:-$(compath.py ${envir}/ecmwf/${ecmwf_ver})/ecmwf}
+GRID="meta" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_META:COM_ATMOS_GEMPAK_TMPL"
+if [[ ! -d "${COM_ATMOS_GEMPAK_META}" ]]; then
+  mkdir -m 775 -p "${COM_ATMOS_GEMPAK_META}"
+fi
 
-export COMOUTukmet=${COMOUT}
-export COMOUTecmwf=${COMOUT}
+if (( cyc%12 == 0 )); then
+  GRID="gif" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_GIF:COM_ATMOS_GEMPAK_TMPL"
+  if [[ ! -d "${COM_ATMOS_GEMPAK_GIF}" ]]; then
+    mkdir -m 775 -p "${COM_ATMOS_GEMPAK_GIF}"
+  fi
+fi
 
-mkdir -m 775 -p ${COMOUT} ${COMOUTncdc} ${COMOUTukmet} ${COMOUTecmwf}
+export COMINukmet="${COMINukmet:-$(compath.py "${envir}/ukmet/${ukmet_ver}")/ukmet}"
+export COMINecmwf="${COMINecmwf:-$(compath.py "${envir}/ecmwf/${ecmwf_ver}")/ecmwf}"
 
 export pgmout=OUTPUT.$$
 
-
 ########################################################
 # Execute the script.
-${USHgempak}/gdas_meta_na.sh
-${USHgempak}/gdas_ecmwf_meta_ver.sh
-${USHgempak}/gdas_meta_loop.sh
-${USHgempak}/gdas_ukmet_meta_ver.sh
+"${HOMEgfs}/gempak/ush/gdas_meta_na.sh"
 export err=$?; err_chk
+"${HOMEgfs}/gempak/ush/gdas_meta_loop.sh"
+export err=$?; err_chk
+if [[ "${cyc}" == '06' ]]; then
+  "${HOMEgfs}/gempak/ush/gdas_ecmwf_meta_ver.sh"
+  export err=$?; err_chk
+  "${HOMEgfs}/gempak/ush/gdas_ukmet_meta_ver.sh"
+  export err=$?; err_chk
+fi
 ########################################################
 
 ############################################
@@ -81,21 +73,23 @@ export err=$?; err_chk
 
 ########################################################
 # Execute the script.
-${SRCgfs}/exgdas_atmos_gempak_gif_ncdc.sh
+if (( cyc%12 == 0 )); then
+  "${SCRgfs}/exgdas_atmos_gempak_gif_ncdc.sh"
+fi
 export err=$?; err_chk
 ########################################################
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
 
diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN
index 65479a9fec..446112eaa0 100755
--- a/jobs/JGDAS_ATMOS_VERFOZN
+++ b/jobs/JGDAS_ATMOS_VERFOZN
@@ -17,8 +17,8 @@ export gcyc=${GDATE:8:2}
 #---------------------------------------------
 # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE
 #
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_OZNMON
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_OZNMON
 
 export oznstat="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.oznstat"
 export TANKverf_ozn=${TANKverf_ozn:-${COM_ATMOS_OZNMON}}
diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD
index d440f91b6e..b777b1ebe2 100755
--- a/jobs/JGDAS_ATMOS_VERFRAD
+++ b/jobs/JGDAS_ATMOS_VERFRAD
@@ -18,9 +18,9 @@ export gcyc=${GDATE:8:2}
 # COMOUT - WHERE GSI OUTPUT RESIDES
 # TANKverf - WHERE OUTPUT DATA WILL RESIDE
 #############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RADMON
-YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_RADMON_PREV:COM_ATMOS_RADMON_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_RADMON
+YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ATMOS_RADMON_PREV:COM_ATMOS_RADMON_TMPL
 
 export biascr="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias"
 export radstat="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat"
diff --git a/jobs/JGDAS_ENKF_ARCHIVE b/jobs/JGDAS_ENKF_ARCHIVE
index f986fd38b2..29ef9c1812 100755
--- a/jobs/JGDAS_ENKF_ARCHIVE
+++ b/jobs/JGDAS_ENKF_ARCHIVE
@@ -7,18 +7,16 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "earc" -c "base earc"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${RUN/enkf}
-
-YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP
-MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \
-  COM_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL \
-  COM_ATMOS_HISTORY_ENSSTAT:COM_ATMOS_HISTORY_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_TOP
+MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+  COMIN_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL \
+  COMIN_ATMOS_HISTORY_ENSSTAT:COM_ATMOS_HISTORY_TMPL
 
 ###############################################################
 # Run archive script
 ###############################################################
 
-"${SCRgfs}/exgdas_enkf_earc.sh"
+"${SCRgfs}/exgdas_enkf_earc.py"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG
index 40f2968869..cc8c933cc8 100755
--- a/jobs/JGDAS_ENKF_DIAG
+++ b/jobs/JGDAS_ENKF_DIAG
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs analdiag edi
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
 export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"}
 export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"}
 
@@ -26,19 +25,19 @@ export GDUMP_ENS="enkf${GDUMP}"
 
 export CASE=${CASE_ENS}
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
 export APREFIX="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP_ENS}.t${gcyc}z."
 GPREFIX_DET="${GDUMP}.t${gcyc}z."
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
-MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
+MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_OBS_PREV:COM_OBS_TMPL \
     COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL
 
-MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
 
 
diff --git a/jobs/JGDAS_ENKF_ECEN b/jobs/JGDAS_ENKF_ECEN
index cd77eebb55..38bf847b38 100755
--- a/jobs/JGDAS_ENKF_ECEN
+++ b/jobs/JGDAS_ENKF_ECEN
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ecen" -c "base ecen"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
 
 ##############################################
 # Begin JOB SPECIFIC work
@@ -23,19 +22,19 @@ export GDUMP_ENS="enkf${GDUMP}"
 
 export CASE=${CASE_ENS}
 
-export OPREFIX="${CDUMP}.t${cyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
+export APREFIX="${RUN/enkf}.t${cyc}z."
 export APREFIX_ENS="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
 export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z."
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx \
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
   COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL
 
-MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \
+MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
   COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL
 
-MEMDIR="ensstat" RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR="ensstat" RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
   COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL
 
 
diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST
deleted file mode 100755
index 53408df8cf..0000000000
--- a/jobs/JGDAS_ENKF_FCST
+++ /dev/null
@@ -1,84 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs"
-
-
-##############################################
-# Set variables used in the script
-##############################################
-export CDUMP=${RUN/enkf}
-
-##############################################
-# Begin JOB SPECIFIC work
-##############################################
-
-export CASE=${CASE_ENS}
-
-YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP
-
-
-# Forecast length for EnKF forecast
-export FHMIN=${FHMIN_ENKF}
-export FHOUT=${FHOUT_ENKF}
-export FHMAX=${FHMAX_ENKF}
-
-# Get ENSBEG/ENSEND from ENSGRP and NMEM_EFCSGRP
-if [[ $CDUMP == "gfs" ]]; then
-    export NMEM_EFCSGRP=${NMEM_EFCSGRP_GFS:-${NMEM_EFCSGRP:-1}}
-fi
-export ENSEND=$((NMEM_EFCSGRP * 10#${ENSGRP}))
-export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1))
-
-if [[ ${DO_WAVE} == "YES" ]]; then
-  declare -rx RUNwave="${RUN}wave"
-fi
-
-###############################################################
-# Run relevant script
-
-${ENKFFCSTSH:-${SCRgfs}/exgdas_enkf_fcst.sh}
-status=$?
-[[ ${status} -ne 0 ]] && exit ${status}
-
-
-# Double check the status of members in ENSGRP
-EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}"
-npass=0
-if [ -f ${EFCSGRP} ]; then
-    npass=$(grep "PASS" ${EFCSGRP} | wc -l)
-fi
-echo "${npass}/${NMEM_EFCSGRP} members successfull in efcs.grp${ENSGRP}"
-if [ ${npass} -ne ${NMEM_EFCSGRP} ]; then
-    echo "FATAL ERROR: Failed members in group ${ENSGRP}, ABORT!"
-    cat ${EFCSGRP}
-    exit 99
-fi
-
-
-##############################################
-# Send Alerts
-##############################################
-if [ ${SENDDBN} = YES ] ; then
-   ${DBNROOT}/bin/dbn_alert MODEL ENKF1_MSC_fcsstat ${job} ${EFCSGRP}
-fi
-
-
-##############################################
-# End JOB SPECIFIC work
-##############################################
-
-##############################################
-# Final processing
-##############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
-fi
-
-##########################################
-# Remove the Temporary working directory
-##########################################
-cd ${DATAROOT}
-[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA}
-
-exit 0
diff --git a/jobs/JGDAS_ENKF_POST b/jobs/JGDAS_ENKF_POST
index 0f7039d614..3a3b5b0c71 100755
--- a/jobs/JGDAS_ENKF_POST
+++ b/jobs/JGDAS_ENKF_POST
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "epos" -c "base epos"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${RUN/enkf}
 
 
 ##############################################
diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS
index 7c02512989..3cfe48bb2b 100755
--- a/jobs/JGDAS_ENKF_SELECT_OBS
+++ b/jobs/JGDAS_ENKF_SELECT_OBS
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${RUN/enkf}
 export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"}
 export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"}
 
@@ -24,26 +23,26 @@ export gcyc=${GDATE:8:2}
 export GDUMP="gdas"
 export GDUMP_ENS="enkf${GDUMP}"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
 export APREFIX="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP_ENS}.t${gcyc}z."
-APREFIX_DET="${CDUMP}.t${cyc}z."
+APREFIX_DET="${RUN/enkf}.t${cyc}z."
 GPREFIX_DET="${GDUMP}.t${gcyc}z."
 
 export GSUFFIX=".ensmean.nc"
 
 # Generate COM variables from templates
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
-MEMDIR='ensstat' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
+MEMDIR='ensstat' YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 declare -rx COM_ATMOS_ANALYSIS_ENS="${COM_ATMOS_ANALYSIS}"
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -r COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL
 
-MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \
 
-RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} generate_com -r COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL
+RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} declare_from_tmpl -r COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL
 
 mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
 
diff --git a/jobs/JGDAS_ENKF_SFC b/jobs/JGDAS_ENKF_SFC
index 3214812db8..1ed10f20c0 100755
--- a/jobs/JGDAS_ENKF_SFC
+++ b/jobs/JGDAS_ENKF_SFC
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esfc" -c "base esfc"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
 
 ##############################################
 # Begin JOB SPECIFIC work
@@ -21,22 +20,22 @@ export gcyc=${GDATE:8:2}
 export GDUMP="gdas"
 export GDUMP_ENS="enkf${GDUMP}"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+export APREFIX="${RUN/enkf}.t${cyc}z."
 
 export CASE=${CASE_ENS}
 
-export OPREFIX="${CDUMP}.t${cyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN/enkf}.t${cyc}z."
+export APREFIX="${RUN/enkf}.t${cyc}z."
 export APREFIX_ENS="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
 export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z."
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS \
+RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS \
   COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
   COM_OBS_PREV:COM_OBS_TMPL \
   COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL
 
diff --git a/jobs/JGDAS_ENKF_UPDATE b/jobs/JGDAS_ENKF_UPDATE
index 1050529165..213b49081a 100755
--- a/jobs/JGDAS_ENKF_UPDATE
+++ b/jobs/JGDAS_ENKF_UPDATE
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "eupd" -c "base anal eupd"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
 
 
 ##############################################
@@ -25,10 +24,10 @@ export GDUMP_ENS="enkf${GDUMP}"
 export APREFIX="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP_ENS}.t${gcyc}z."
 
-MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \
+MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
   COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL
 
-MEMDIR="ensstat" RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR="ensstat" RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
   COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL
 
 
diff --git a/jobs/JGDAS_FIT2OBS b/jobs/JGDAS_FIT2OBS
index 7638e4f0c8..7e000c95cf 100755
--- a/jobs/JGDAS_FIT2OBS
+++ b/jobs/JGDAS_FIT2OBS
@@ -8,8 +8,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "fit2obs" -c "base fit2obs"
 # Set variables used in the script
 ##############################################
 
-export CDUMP=${RUN/enkf}
-
 # Ignore spelling warning; nothing is misspelled
 # shellcheck disable=SC2153
 CDATE=$(${NDATE} -"${VBACKUP_FITS}" "${PDY}${cyc}") # set CDATE to lookback cycle for use in fit2obs package
@@ -19,8 +17,8 @@ vcyc=${CDATE:8:2}
 
 # These are used by fit2obs, so we can't change them to the standard COM variable names
 # shellcheck disable=SC2153
-YMD=${vday} HH=${vcyc} generate_com -rx COM_INA:COM_ATMOS_ANALYSIS_TMPL
-RUN=${CDUMP} YMD=${vday} HH=${vcyc} generate_com -rx COM_PRP:COM_OBS_TMPL
+YMD=${vday} HH=${vcyc} declare_from_tmpl -rx COM_INA:COM_ATMOS_ANALYSIS_TMPL
+RUN=${RUN/enkf} YMD=${vday} HH=${vcyc} declare_from_tmpl -rx COM_PRP:COM_OBS_TMPL
 
 # We want to defer variable expansion, so ignore warning about single quotes
 # shellcheck disable=SC2016
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT
index afac9fbc25..875fe9d0ee 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT
+++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT
@@ -19,12 +19,12 @@ export GDUMP=${GDUMP:-"gdas"}
 export GPREFIX="${GDUMP}.t${gcyc}z."
 # Ignore possible spelling error (nothing is misspelled)
 # shellcheck disable=SC2153
-export APREFIX="${CDUMP}.t${cyc}z."
+export APREFIX="${RUN}.t${cyc}z."
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
 
 
 ##############################################
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN
similarity index 58%
rename from jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY
rename to jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN
index 4f4251b34f..7b8bb84809 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY
+++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN
@@ -1,29 +1,28 @@
 #!/bin/bash
 source "${HOMEgfs}/ush/preamble.sh"
-export WIPE_DATA="NO"
-export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun"
-
+source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalecen" -c "base ocnanal ocnanalecen"
 
 ##############################################
 # Set variables used in the script
 ##############################################
+# Ignore possible spelling error (nothing is misspelled)
+# shellcheck disable=SC2153
+GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours")
+export gPDY=${GDATE:0:8}
+export gcyc=${GDATE:8:2}
 
+YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
+   COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \
+   COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL
 
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS
-
-mkdir -p "${COM_OCEAN_ANALYSIS}"
-
-export COMOUT=${COM_OCEAN_ANALYSIS}
-
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASOCNMBATVRFYSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh}
+EXSCRIPT=${GDASOCNCENPY:-${HOMEgfs}/scripts/exgdas_global_marine_analysis_ecen.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
@@ -40,8 +39,9 @@ if [[ -e "${pgmout}" ]] ; then
 fi
 
 ##########################################
-# Do not remove the Temporary working directory (do this in POST)
+# Remove the Temporary working directory
 ##########################################
 cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}"
 
 exit 0
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST
index 6034fc5425..00597f14f8 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST
+++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST
@@ -8,14 +8,18 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalpost" -c "base ocnanalpost"
 ##############################################
 # Set variables used in the script
 ##############################################
+# TODO remove this CDUMP declaration when the GDAS script
+#      exgdas_global_marine_analysis_post.py is updated to look for RUN instead
+#      of CDUMP.
 export CDUMP=${CDUMP:-${RUN:-"gfs"}}
 export CDATE=${CDATE:-${PDY}${cyc}}
 export GDUMP=${GDUMP:-"gdas"}
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS COM_ICE_RESTART
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS COM_ICE_ANALYSIS COM_ICE_RESTART
 
 mkdir -p "${COM_OCEAN_ANALYSIS}"
+mkdir -p "${COM_ICE_ANALYSIS}"
 mkdir -p "${COM_ICE_RESTART}"
 
 ##############################################
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP
index 2e49a9f14d..664df3aad6 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP
+++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP
@@ -7,7 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanal
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${CDUMP:-${RUN:-"gfs"}}
 # Ignore possible spelling error (nothing is misspelled)
 # shellcheck disable=SC2153
 GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours")
@@ -16,25 +15,30 @@ export gPDY=${GDATE:0:8}
 export gcyc=${GDATE:8:2}
 export GDUMP=${GDUMP:-"gdas"}
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+export APREFIX="${RUN}.t${cyc}z."
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
    COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \
    COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL \
    COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
 
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMIN_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \
+   COMIN_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL
+
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 
 # Add UFSDA to PYTHONPATH
 ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/"
-pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/"
+# shellcheck disable=SC2311
+pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/"
 PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}:${pyiodaPATH}"
 export PYTHONPATH
 
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY
index aba76d7d1a..0d90c46184 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY
+++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY
@@ -6,7 +6,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanal
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${CDUMP:-${RUN:-"gfs"}}
 export GDUMP=${GDUMP:-"gdas"}
 # Ignore possible spelling error (nothing is misspelled)
 # shellcheck disable=SC2153
@@ -14,11 +13,11 @@ GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours")
 export gPDY=${GDATE:0:8}
 export gcyc=${GDATE:8:2}
 
-RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL
+RUN=${GDUMP} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL
 # To allow extraction of statistics from diag files
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 
 
 ##############################################
diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
index 516c7a403b..25641ea286 100755
--- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
+++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG
@@ -5,17 +5,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips"
 
 export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1}
 
-################################
-# Set up the HOME directory
-################################
-export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}}
-export USHgfs=${USHgfs:-${HOMEgfs}/ush}
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-
 ###################################
 # Specify NET and RUN Name and model
 ####################################
@@ -29,8 +18,8 @@ export SENDDBN=${SENDDBN:-NO}
 export SENDAWIP=${SENDAWIP:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO
-GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_WMO
+GRID="0p25" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
 
 if [[ ! -d "${COM_ATMOS_WMO}" ]] ; then
   mkdir -m 775 -p "${COM_ATMOS_WMO}"
@@ -47,7 +36,7 @@ export pgmout=OUTPUT.$$
 
 ########################################################
 # Execute the script.
-"${HOMEgfs}/scripts/exgfs_atmos_awips_20km_1p0deg.sh" "${fcsthrs}"
+"${SCRgfs}/exgfs_atmos_awips_20km_1p0deg.sh" "${fcsthrs}"
 export err=$?; err_chk
 ########################################################
 
diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2
deleted file mode 100755
index 5bd7749997..0000000000
--- a/jobs/JGFS_ATMOS_AWIPS_G2
+++ /dev/null
@@ -1,65 +0,0 @@
-#! /usr/bin/env bash
-
-########################################
-# GFS_AWIPS_G2 AWIPS PRODUCT GENERATION
-########################################
-
-source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips"
-
-export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1}
-
-################################
-# Set up the HOME directory
-################################
-export USHgfs=${USHgfs:-${HOMEgfs}/ush}
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
-
-###################################
-# Specify NET and RUN Name and model
-####################################
-export model=${model:-gfs}
-export COMPONENT="atmos"
-
-##############################################
-# Define COM directories
-##############################################
-export SENDDBN=${SENDDBN:-NO}
-export SENDAWIP=${SENDAWIP:-NO}
-export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
-
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO
-GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
-
-mkdir -m 775 -p "${COM_ATMOS_WMO}"
-
-export pgmout=OUTPUT.$$
-
-
-########################################################
-# Execute the script.
-#########################################################
-mkdir -m 775 awips_g1
-cd ${DATA}/awips_g1
-${HOMEgfs}/scripts/exgfs_atmos_grib_awips.sh ${fcsthrs}
-export err=$?; err_chk
-
-############################################
-# print exec I/O output
-############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
-fi
-
-###################################
-# Remove temp directories
-###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
-fi
-
diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS
index 5ac97e079c..de130bf9aa 100755
--- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS
+++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS
@@ -3,6 +3,10 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis"
 
+# Hack to temporary skip this as the tracker has not been build
+#   on Hercules Rocky 9 yet
+# TODO: Remove this after tracker has been built for Rocky 9 #2639
+if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi
 
 ##############################################
 # Set variables used in the exglobal script
@@ -27,8 +31,8 @@ export SCRIPTens_tracker=${SCRIPTens_tracker:-${HOMEens_tracker}/scripts}
 ##############################################
 # Define COM directories
 ##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GENESIS
-YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GENESIS
+YMD=${PDY} HH=${cyc} GRID="0p25" declare_from_tmpl -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
 
 # The following variables are used by the tracker scripts which are outside
 #   of global-workflow and therefore can't be standardized at this time
diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER
index a91d8e3c5b..067de2c4aa 100755
--- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER
+++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER
@@ -3,6 +3,10 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker"
 
+# Hack to temporary skip this as the tracker has not been build
+#   on Hercules Rocky 9 yet
+# TODO: Remove this after tracker has been built for Rocky 9 #2639
+if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi
 
 export COMPONENT="atmos"
 
@@ -11,7 +15,6 @@ export COMPONENT="atmos"
 # Set variables used in the exglobal script
 ##############################################
 export CDATE=${CDATE:-${PDY}${cyc}}
-export CDUMP=${RUN/enkf}
 
 
 ####################################
@@ -32,8 +35,8 @@ export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush}
 ##############################################
 # Define COM and Data directories
 ##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_TRACK COM_ATMOS_GENESIS
-YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_TRACK COM_ATMOS_GENESIS
+YMD=${PDY} HH=${cyc} GRID="0p25" declare_from_tmpl -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
 
 if [[ ! -d "${COM_ATMOS_TRACK}" ]]; then mkdir -p "${COM_ATMOS_TRACK}"; fi
 
@@ -66,7 +69,7 @@ fi
 #############################################################
 # Execute the script
 export pert="p01"
-export cmodel=${CDUMP}
+export cmodel=${RUN/enkf}
 export loopnum=1
 
 #-----------input data checking -----------------
diff --git a/jobs/JGFS_ATMOS_FBWIND b/jobs/JGFS_ATMOS_FBWIND
index e04b06c0d6..2071954b0d 100755
--- a/jobs/JGFS_ATMOS_FBWIND
+++ b/jobs/JGFS_ATMOS_FBWIND
@@ -6,18 +6,7 @@
 # GFS FBWIND PRODUCT GENERATION
 ############################################
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "fbwind" -c "base"
-
-################################
-# Set up the HOME directory
-################################
-export USHgfs=${USHgfs:-${HOMEgfs}/ush}
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "fbwind" -c "base fbwind"
 
 ###################################
 # Specify NET and RUN Name and model
@@ -28,33 +17,34 @@ export COMPONENT="atmos"
 ##############################################
 # Define COM directories
 ##############################################
-export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}}
-export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}}
-export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo}
+
+GRID="0p25" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT:COM_ATMOS_WMO_TMPL
+if [[ ! -d "${COMOUT}" ]]; then
+  mkdir -m 775 -p "${COMOUT}"
+fi
 
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-mkdir -m 775 -p ${COMOUT} ${COMOUTwmo}
-
 
 ########################################################
 # Execute the script.
-${HOMEgfs}/scripts/exgfs_atmos_fbwind.sh
+"${SCRgfs}/exgfs_atmos_fbwind.sh"
 export err=$?;err_chk
 ########################################################
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
 
diff --git a/jobs/JGFS_ATMOS_FSU_GENESIS b/jobs/JGFS_ATMOS_FSU_GENESIS
index 8b1600de88..1b92816b61 100755
--- a/jobs/JGFS_ATMOS_FSU_GENESIS
+++ b/jobs/JGFS_ATMOS_FSU_GENESIS
@@ -3,6 +3,11 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis_fsu" -c "base genesis_fsu"
 
+# Hack to temporary skip this as the tracker has not been build
+#   on Hercules Rocky 9 yet
+# TODO: Remove this after tracker has been built for Rocky 9 #2639
+if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi
+
 export COMPONENT="atmos"
 
 
@@ -10,7 +15,6 @@ export COMPONENT="atmos"
 # Set variables used in the exglobal script
 ##############################################
 export CDATE=${CDATE:-${PDY}${cyc}}
-export CDUMP=${CDUMP:-${RUN:-"gfs"}}
 
 
 ####################################
@@ -33,8 +37,8 @@ export PYTHONPATH=${USHens_tracker}/FSUgenesisPY:${PYTHONPATH}
 ##############################################
 # Define COM and Data directories
 ##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GENESIS
-YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GENESIS
+YMD=${PDY} HH=${cyc} GRID="0p25" declare_from_tmpl -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL
 
 # The following variables are used by the tracker scripts which are outside
 #   of global-workflow and therefore can't be standardized at this time
diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK
index ddf10342d2..9988378fe5 100755
--- a/jobs/JGFS_ATMOS_GEMPAK
+++ b/jobs/JGFS_ATMOS_GEMPAK
@@ -3,59 +3,37 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak"
 
-
-################################
-# Set up the HOME directory
-################################
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
-export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
-export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-
-# For half-degree P Grib files
-export DO_HD_PGRB=${DO_HD_PGRB:-YES}
-
 ############################################
 # Set up model and cycle specific variables
 ############################################
-export finc=${finc:-3}
-export fstart=${fstart:-0}
 export model=${model:-gfs}
 export GRIB=${GRIB:-pgrb2f}
 export EXT=""
 export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GFS_GEMPAK}
 
-###################################
-# Specify NET and RUN Name and model
-####################################
-export model=${model:-gfs}
+export SENDDBN=${SENDDBN:-NO}
+export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
+
+# For half-degree P Grib files
+export DO_HD_PGRB=${DO_HD_PGRB:-YES}
 
 ##############################################
 # Define COM directories
 ##############################################
-export SENDDBN=${SENDDBN:-NO}
-export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
-
 for grid in 0p25 0p50 1p00; do
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
 done
 
 for grid in 1p00 0p50 0p25 40km 35km_atl 35km_pac; do
   prod_dir="COM_ATMOS_GEMPAK_${grid}"
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL"
 
   if [[ ! -d "${!prod_dir}" ]] ; then
     mkdir -m 775 -p "${!prod_dir}"
   fi
 done
 
-# TODO: These actions belong in an ex-script not a j-job
-if [[ -f poescript ]]; then
-   rm -f poescript
-fi
-
+fhr=10#${FHR3}
 ocean_domain_max=180
 if (( ocean_domain_max > FHMAX_GFS )); then
   ocean_domain_max=${FHMAX_GFS}
@@ -64,90 +42,103 @@ fi
 #################################################################
 # Execute the script for the 384 hour 1 degree grib
 ##################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.2 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.3 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.4 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.5 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.6 " >> poescript
+fhmin=0
+fhmax=240
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 3 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}"
+  fi
+fi
+
+fhmin=252
+fhmax=384
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 12 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}"
+  fi
+fi
 
 #################################################################
 # Execute the script for the half-degree grib
 ##################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.2 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.3 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.4 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.5 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.6 " >> poescript
+fhmin=0
+fhmax=240
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 3 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}"
+  fi
+fi
 
-#################################################################
-# Execute the script for the quater-degree grib
-####################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.2 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.3 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.4 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.5 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.6 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.7 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.8 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.9 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.10 " >> poescript
+fhmin=246
+fhmax=276
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 6 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}"
+  fi
+fi
 
-####################################################################
-# Execute the script to create the 35km Pacific grids for OPC
-#####################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.2 " >> poescript
+fhmin=288
+fhmax=384
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 12 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}"
+  fi
+fi
 
+#################################################################
+# Execute the script for the quater-degree grib
 ####################################################################
-# Execute the script to create the 35km Atlantic grids for OPC
-#####################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.2 " >> poescript
+fhmin=0
+fhmax=120
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 1 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}"
+  fi
+fi
 
-#####################################################################
-# Execute the script to create the 40km grids for HPC
-######################################################################
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.1 " >> poescript
-echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.2 " >> poescript
-
-if [[ ${CFP_MP:-"NO"} == "YES" ]]; then
-  # Add task number to the MPMD script
-  nl -n ln -v 0 poescript > poescript.new
-  mv poescript.new poescript
+fhmin=123
+fhmax=240
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 3 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}"
+  fi
 fi
 
-cat poescript
+fhmin=252
+fhmax=384
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 12 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}"
+  fi
+fi
 
-chmod 775 ${DATA}/poescript
-export MP_PGMMODEL=mpmd
-export MP_CMDFILE=${DATA}/poescript
+####################################################################
+# Execute the script to create the 35km and 40km grids
+#####################################################################
+fhmin=0
+fhmax="${ocean_domain_max}"
+if (( fhr >= fhmin && fhr <= fhmax )); then
+  if ((fhr % 3 == 0)); then
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_pac" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_35km_pac}"
 
-ntasks=$(cat ${DATA}/poescript | wc -l)
-ptile=${PTILE_GEMPAK:-4}
-threads=${NTHREADS_GEMPAK:-1}
-export OMP_NUM_THREADS=${threads}
-APRUN=${APRUN:-"mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp"}
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_atl" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_35km_atl}"
 
-APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}}
-APRUNCFP=${APRUN_GEMPAKCFP}
+    "${SCRgfs}/exgfs_atmos_nawips.sh" "40km" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_40km}"
+  fi
+fi
 
-${APRUNCFP} ${DATA}/poescript
 export err=$?; err_chk
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
-
diff --git a/jobs/JGFS_ATMOS_GEMPAK_META b/jobs/JGFS_ATMOS_GEMPAK_META
index 8e1c05763f..480dc0f1d6 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_META
+++ b/jobs/JGFS_ATMOS_GEMPAK_META
@@ -6,7 +6,7 @@
 # GFS GEMPAK META PRODUCT GENERATION
 ############################################
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak"
 
 
 ###############################################
@@ -18,25 +18,19 @@ export MP_LABELIO=yes
 export MP_PULSE=0
 export MP_DEBUG_NOTIMEOUT=yes
 
-################################
-# Set up the HOME directory
-################################
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix}
-export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
-export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-
-cp  ${FIXgempak}/datatype.tbl datatype.tbl
+cp "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl
 
 #############################################
 #set the fcst hrs for all the cycles
 #############################################
-export fhbeg=00
+export fhbeg=0
 export fhend=384
 export fhinc=12
 
+if (( fhend > FHMAX_GFS )); then
+  export fhend=${FHMAX_GFS}
+fi
+
 ###################################
 # Specify NET and RUN Name and model
 ####################################
@@ -51,37 +45,37 @@ export DBN_ALERT_TYPE=GFS_METAFILE
 ##############################################
 # Define COM directories
 ##############################################
-export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak}
-export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak/meta}
-export COMINgempak=${COMINgempak:-$(compath.py ${envir}/${NET}/${gfs_ver})}
-
-export COMINukmet=${COMINukmet:-$(compath.py ${envir}/ukmet/${ukmet_ver})/ukmet}
-export COMINecmwf=${COMINecmwf:-$(compath.py ${envir}/ecmwf/${ecmwf_ver})/ecmwf}
-export COMINnam=${COMINnam:-$(compath.py ${envir}/nam/${nam_ver})/nam}
+export COMINukmet=${COMINukmet:-$(compath.py "${envir}/ukmet/${ukmet_ver}")/ukmet}
+export COMINecmwf=${COMINecmwf:-$(compath.py "${envir}/ecmwf/${ecmwf_ver}")/ecmwf}
+export COMINnam=${COMINnam:-$(compath.py "${envir}/nam/${nam_ver}")/nam}
 
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-mkdir -m 775 -p ${COMOUT}
+GRID=1p00 YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_1p00:COM_ATMOS_GEMPAK_TMPL"
 
+GRID="meta" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_META:COM_ATMOS_GEMPAK_TMPL"
+if [[ ! -d "${COM_ATMOS_GEMPAK_META}" ]] ; then
+  mkdir -m 775 -p "${COM_ATMOS_GEMPAK_META}"
+fi
 
 ########################################################
 # Execute the script.
-${SRCgfs}/exgfs_atmos_gempak_meta.sh
+"${SCRgfs}/exgfs_atmos_gempak_meta.sh"
 export err=$?; err_chk
 ########################################################
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
 
diff --git a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
index 58b24c5e49..d62c3320a1 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
+++ b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF
@@ -1,41 +1,20 @@
 #! /usr/bin/env bash
 
-# TODO (#1222) This job is not part of the rocoto suite
-
 ############################################
 # GFS GEMPAK NCDC PRODUCT GENERATION
 ############################################
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_gif" -c "base"
-
-
-################################
-# Set up the HOME directory
-################################
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/gempak/fix}
-export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush}
-export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts}
-export UTILgfs=${UTILgfs:-${HOMEgfs}/util}
-
-######################################
-# Set up the GEMPAK directory
-#######################################
-export HOMEgempak=${HOMEgempak:-${HOMEgfs}/gempak}
-export FIXgempak=${FIXgempak:-${HOMEgempak}/fix}
-export USHgempak=${USHgempak:-${HOMEgempak}/ush}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_gif" -c "base gempak"
 
 export MP_PULSE=0
 export MP_TIMEOUT=2000
 
-
-#
 # Set up model and cycle specific variables
-#
 export MODEL=GFS
 export fend=384
+if (( fend > FHMAX_GFS )); then
+  export fend="${FHMAX_GFS}"
+fi
 
 # set increment to 6 hours  --  3 hours is available.
 export finc=6
@@ -50,37 +29,40 @@ export COMPONENT="atmos"
 ##############################################
 # Define COM directories
 ##############################################
-export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak}
-export COMINgfs=${COMINgfs:-$(compath.py ${envir}/${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}}
-export COMINobsproc=${COMINobsproc:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}}
-export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}}
-export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo}
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_OBS"
+GRID=1p00 YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_1p00:COM_ATMOS_GEMPAK_TMPL"
+
+for grid in gif upper_air; do
+  gempak_dir="COM_ATMOS_GEMPAK_${grid^^}"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "${gempak_dir}:COM_ATMOS_GEMPAK_TMPL"
+  if [[ ! -d "${!gempak_dir}" ]]; then mkdir -m 775 -p "${!gempak_dir}"; fi
+done
+
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_WMO
+if [[ ! -d "${COM_ATMOS_WMO}" ]]; then mkdir -m 775 -p "${COM_ATMOS_WMO}"; fi
 
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-mkdir -m 775 -p ${COMOUT} ${COMOUTwmo}
-
 export pgmout=OUTPUT.$$
 
-
 ########################################################
 # Execute the script.
-${SRCgfs}/exgfs_atmos_gempak_gif_ncdc_skew_t.sh
+"${SCRgfs}/exgfs_atmos_gempak_gif_ncdc_skew_t.sh"
 export err=$?; err_chk
 ########################################################
 
 ############################################
 # print exec I/O output
 ############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ###################################
 # Remove temp directories
 ###################################
-if [ "${KEEPDATA}" != "YES" ] ; then
-  rm -rf ${DATA}
+if [[ "${KEEPDATA}" != "YES" ]] ; then
+  rm -rf "${DATA}"
 fi
 
diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
index 47415a39ff..a82d2805eb 100755
--- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
+++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC
@@ -1,43 +1,24 @@
 #! /usr/bin/env bash
 
-############################################
-# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION
-############################################
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base"
-
-
-################################
-# Set up the HOME directory
-################################
-export EXECgfs="${EXECgfs:-${HOMEgfs}/exec}"
-export PARMgfs="${PARMgfs:-${HOMEgfs}/parm}"
-export EXPDIR="${EXPDIR:-${HOMEgfs}/parm/config}"
-export FIXgempak="${FIXgempak:-${HOMEgfs}/gempak/fix}"
-export USHgempak="${USHgempak:-${HOMEgfs}/gempak/ush}"
-export SRCgfs="${SRCgfs:-${HOMEgfs}/scripts}"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base gempak"
 
-# For half-degree P Grib files
-#export DO_HD_PGRB=YES
-
-###################################
-# Specify NET and RUN Name and model
-####################################
+############################################
+# Set up model and cycle specific variables
+############################################
 export COMPONENT="atmos"
-export finc=3
-export model=gfs
+export model=${model:-gfs}
 export EXT=""
 
+export SENDDBN=${SENDDBN:-NO}
+export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
+
 ##############################################
 # Define COM directories
 ##############################################
-export COMIN="${COMIN:-$(compath.py "${envir}"/"${NET}"/"${gfs_ver}")/${RUN}.${PDY}/${cyc}/${COMPONENT}}"
-export COMOUT="${COMOUT:-$(compath.py -o "${NET}"/"${gfs_ver}"/"${NET}"."${PDY}")/${cyc}/${COMPONENT}/gempak}"
-
-export SENDDBN="${SENDDBN:-NO}"
-export DBNROOT="${DBNROOT:-${UTILROOT}/fakedbn}"
-
-mkdir -m 775 -p "${COMOUT}"
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GOES
+GRID=0p25 YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_0p25:COM_ATMOS_GEMPAK_TMPL"
+if [[ ! -d "${COM_ATMOS_GEMPAK_0p25}" ]]; then mkdir -m 775 -p "${COM_ATMOS_GEMPAK_0p25}"; fi
 
 #################################################################
 # Execute the script for the regular grib
@@ -49,20 +30,13 @@ cd "${DATA_SPECIAL}" || exit 1
 export DBN_ALERT_TYPE=GFS_GOESSIM_GEMPAK
 export RUN2=gfs_goessim
 export GRIB=goessimpgrb2.0p25.f
-export EXT=" "
-export fend=180
-export finc=3
-export fstart=000
-
-echo "RUNS the Program"
+export EXT=""
 
-########################################################
-# Execute the script.
-"${SRCgfs}/exgfs_atmos_goes_nawips.sh"
+"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}"
 
 #################################################################
 # Execute the script for the 221 grib
-
+#################################################################
 export DATA_SPECIAL221="${DATA}/SPECIAL221"
 mkdir -p "${DATA_SPECIAL221}"
 cd "${DATA_SPECIAL221}" || exit 1
@@ -71,23 +45,12 @@ export DBN_ALERT_TYPE=GFS_GOESSIM221_GEMPAK
 export RUN2=gfs_goessim221
 export GRIB=goessimpgrb2f
 export EXT=".grd221"
-export fend=180
-export finc=3
-export fstart=000
 
-echo "RUNS the Program"
+"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}"
 
-########################################################
-# Execute the script.
-"${SRCgfs}/exgfs_atmos_goes_nawips.sh"
 export err=$?; err_chk
-########################################################
 
-echo "end of program"
 cd "${DATA}" || exit 1
-echo "######################################"
-echo "  SPECIAL.OUT "
-echo "######################################"
 
 ############################################
 # print exec I/O output
@@ -102,4 +65,3 @@ fi
 if [[ "${KEEPDATA}" != "YES" ]] ; then
   rm -rf "${DATA}"
 fi
-
diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
index a98835ada2..72dba0679d 100755
--- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
+++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS
@@ -6,21 +6,10 @@
 # GFS PGRB2_SPECIAL_POST PRODUCT GENERATION
 ############################################
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "npoess" -c "base"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "npoess" -c "base npoess"
 
 export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1}
 
-################################
-# Set up the HOME directory
-################################
-export USHgfs=${USHgfs:-${HOMEgfs}/ush}
-export EXECgfs=${EXECgfs:-${HOMEgfs}/exec}
-export PARMgfs=${PARMgfs:-${HOMEgfs}/parm}
-export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config}
-export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo}
-export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product}
-export FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-
 ###################################
 # Specify NET and RUN Name and model
 ####################################
@@ -32,14 +21,14 @@ export model=${model:-gfs}
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES
-GRID="0p50" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_GRID_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_MASTER COM_ATMOS_GOES
+GRID="0p50" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_GRID_TMPL
 
 mkdir -m 775 -p "${COM_ATMOS_GOES}"
 
 #############################################################
 # Execute the script
-"${HOMEgfs}/scripts/exgfs_atmos_grib2_special_npoess.sh"
+"${SCRgfs}/exgfs_atmos_grib2_special_npoess.sh"
 export err=$?;err_chk
 #############################################################
 
diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND
index 721dd27628..13adb11d7d 100755
--- a/jobs/JGFS_ATMOS_POSTSND
+++ b/jobs/JGFS_ATMOS_POSTSND
@@ -7,8 +7,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "postsnd" -c "base postsnd"
 ##############################################
 # Set variables used in the exglobal script
 ##############################################
-export CDUMP=${RUN/enkf}
-
 
 ########################################
 # Runs GFS BUFR SOUNDINGS
@@ -18,23 +16,12 @@ export model=${model:-gfs}
 export SENDDBN=${SENDDBN:-YES}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-###################################
-# Set up the source directories
-###################################
-
-export HOMEbufrsnd=${HOMEbufrsnd:-${HOMEgfs}}
-export EXECbufrsnd=${EXECbufrsnd:-${HOMEbufrsnd}/exec}
-export FIXbufrsnd=${FIXbufrsnd:-${HOMEbufrsnd}/fix/product}
-export PARMbufrsnd=${PARMbufrsnd:-${HOMEbufrsnd}/parm/product}
-export USHbufrsnd=${USHbufrsnd:-${HOMEbufrsnd}/ush}
-export SCRbufrsnd=${SCRbufrsnd:-${HOMEbufrsnd}/scripts}
-
 ##############################
 # Define COM Directories
 ##############################
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY COM_ATMOS_BUFR \
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY COM_ATMOS_BUFR \
   COM_ATMOS_WMO COM_ATMOS_GEMPAK
 
 [[ ! -d ${COM_ATMOS_BUFR} ]] && mkdir -p "${COM_ATMOS_BUFR}"
@@ -44,7 +31,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY COM_ATMOS_BUFR \
 
 ########################################################
 # Execute the script.
-${SCRbufrsnd}/exgfs_atmos_postsnd.sh
+${SCRgfs}/exgfs_atmos_postsnd.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/JGFS_ATMOS_VERIFICATION b/jobs/JGFS_ATMOS_VERIFICATION
index 23a450cd55..48133364e5 100755
--- a/jobs/JGFS_ATMOS_VERIFICATION
+++ b/jobs/JGFS_ATMOS_VERIFICATION
@@ -9,8 +9,8 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "metp" -c "base metp"
 ## HOMEgfs   : /full/path/to/workflow
 ## EXPDIR : /full/path/to/config/files
 ## CDATE  : current analysis date (YYYYMMDDHH)
-## CDUMP  : cycle name (gdas / gfs)
 ## PDY    : current date (YYYYMMDD)
+## RUN    : cycle name (gdas / gfs)
 ## cyc    : current cycle (HH)
 ## SDATE_GFS  : first date of GFS cycle (YYYYMMDDHHMM)
 ## METPCASE : METplus verification use case (g2g1 | g2o1 | pcp1)
@@ -29,7 +29,7 @@ export VDATE=${VDATE:0:8}
 # shellcheck disable=SC2041
 for grid in '1p00'; do
   prod_dir="COM_ATMOS_GRIB_${grid}"
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
 done
 
 # TODO: If none of these are on, why are we running this job?
diff --git a/jobs/JGFS_ATMOS_WAFS b/jobs/JGFS_ATMOS_WAFS
new file mode 100755
index 0000000000..35a916bf1a
--- /dev/null
+++ b/jobs/JGFS_ATMOS_WAFS
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+########################################
+# GFS AWIPS PRODUCT GENERATION
+########################################
+date
+export PS4='$SECONDS + ' 
+set -xa
+
+export KEEPDATA=${KEEPDATA:-NO}
+
+############################################
+# Working Directory
+############################################
+export DATA=${DATA:-${DATAROOT}/${jobid:?}}
+mkdir -p $DATA
+cd $DATA
+
+############################################
+# Output for executables
+############################################
+export pgmout=OUTPUT.$$
+
+############################################
+# Load the UTILITIES module
+############################################
+#### module load prod_util
+#### module load grib_util
+
+###########################################
+# Run setpdy and initialize PDY variables
+###########################################
+export cycle=t${cyc}z 
+setpdy.sh
+. ./PDY
+
+export RERUN=${RERUN:-NO}
+
+############################################
+# Set up the NET and RUN
+############################################
+export NET=${NET:-gfs}
+export RUN=${RUN:-gfs}
+export COMPONENT=${COMPONENT:-atmos}
+
+############################################
+# Specify HOME Directory
+############################################
+export gfs_ver=${gfs_ver:-v16.3.0}
+export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}}
+export EXECgfs=$HOMEgfs/exec
+export FIXgfs=$HOMEgfs/fix/wafs
+export PARMgfs=$HOMEgfs/parm/wafs
+export USHgfs=$HOMEgfs/ush
+export SCRIPTSgfs=$HOMEgfs/scripts
+
+################################################
+# Set up the input/output directory
+################################################
+export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT}
+export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT} 
+export PCOM=${PCOM:-$COMOUT/wmo}
+
+if [ $SENDCOM = YES ] ; then
+  mkdir -p $COMOUT $PCOM
+fi
+
+############################################
+# print current environment
+############################################
+env
+
+############################################
+# Execute the script.
+############################################
+
+${SCRIPTSgfs}/exgfs_atmos_wafs_grib.sh $fcsthrs
+export err=$?; err_chk
+
+echo "JOB $job HAS COMPLETED NORMALLY!"
+
+############################################
+# print exec output
+############################################
+if [ -e "$pgmout" ] ; then
+  cat $pgmout
+fi
+
+############################################
+# remove temporary working directory
+############################################
+if [ $KEEPDATA != YES ] ; then
+    rm -rf $DATA
+fi
+
+date
diff --git a/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 b/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25
new file mode 100755
index 0000000000..7367ce5a2c
--- /dev/null
+++ b/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25
@@ -0,0 +1,153 @@
+#!/bin/sh
+########################################################
+# This job runs the code to blend US's and UK's WAFS products at 0.25 deg 
+########################################################
+
+date
+export PS4='$SECONDS + ' 
+set -x
+
+# keep the working directory or not
+export KEEPDATA=${KEEPDATA:-NO}
+
+############################################
+# Working Directory
+############################################
+export DATA=${DATA:-${DATAROOT}/${jobid:?}}
+mkdir -p $DATA
+cd $DATA
+
+############################################
+# Output for executables
+############################################
+export pgmout=OUTPUT.$$
+
+###########################################
+# Run setpdy and initialize PDY variables
+###########################################
+export cycle=t${cyc}z 
+setpdy.sh
+. ./PDY
+
+export RERUN=${RERUN:-NO}
+
+############################################
+# Set up the NET and RUN
+############################################
+export NET=${NET:-gfs}
+export RUN=${RUN:-gfs}
+export COMPONENT=${COMPONENT:-atmos}
+
+############################################
+# Specify HOME Directory
+############################################
+export gfs_ver=${gfs_ver:-v16.3.0}
+export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}}
+export EXECgfs=$HOMEgfs/exec
+export FIXgfs=$HOMEgfs/fix/wafs
+export PARMgfs=$HOMEgfs/parm/wafs
+export USHgfs=$HOMEgfs/ush
+export SCRIPTSgfs=$HOMEgfs/scripts
+
+################################################
+# Set up the INPUT and OUTPUT directories
+################################################
+export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT}
+export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT}
+export PCOM=${PCOM:-$COMOUT/wmo}
+
+if [ $SENDCOM = YES ] ; then
+  mkdir -p $COMOUT $PCOM
+fi
+
+export COMINus=${COMINus:-$COMIN}
+export COMINuk=${COMINuk:-$DCOMROOT/$PDY/wgrbbul/ukmet_wafs}
+
+############################################
+# print current environment
+############################################
+env
+
+##############################################
+# Set up the forecast hours
+##############################################
+export SHOUR=${SHOUR:-06}
+export EHOUR=${EHOUR:-48}
+export FHOUT_GFS=${FHOUT_GFS:-1}
+
+###############################################
+# Specify Timeout Behavior of WAFS blending
+#
+# SLEEP_TIME - Amount of time to wait for
+#              a input file before exiting
+# SLEEP_INT  - Amount of time to wait between
+#              checking for input files
+###############################################
+# export SLEEP_TIME=300   # changed to 60 to avoid hitting wall_clock when miss umket wafs files ... 
+# JY -0129: export SLEEP_TIME=600
+export SLEEP_TIME=900
+export SLEEP_INT=10
+
+####################################
+# Check if this is a restart
+####################################
+if test -f $COMOUT/$RUN.t${cyc}z.control.wafsblending_0p25
+then
+  modelrecvy=`cat < $COMOUT/${RUN}.t${cyc}z.control.wafsblending_0p25`
+  recvy_pdy=`echo $modelrecvy | cut -c1-8`
+  recvy_cyc=`echo $modelrecvy | cut -c9-10`
+  recvy_shour=`echo $modelrecvy | cut -c11-`
+
+  if [ $FHOUT_GFS -eq 3 ] ; then
+      FHINC=03
+  else
+      if [ $recvy_shour -lt 24 ] ; then
+	  FHINC=01
+      else
+	  FHINC=03
+      fi
+  fi
+
+  if test $RERUN = "NO"
+  then
+    if [ $recvy_shour -lt $EHOUR ]
+    then
+      new_shour=`expr $recvy_shour + $FHINC`
+    fi
+    if test $new_shour -ge $SHOUR
+    then
+      export SHOUR=$new_shour
+      if [ $SHOUR -lt 10 ]; then SHOUR=0$SHOUR; fi
+    fi
+    if test $recvy_shour -ge $EHOUR
+    then
+      echo "WAFS blending Already Completed to $EHOUR"
+    else
+      echo "Starting: PDY=$PDY cycle=t${recvy_cyc}z SHOUR=$SHOUR      ."
+    fi
+  fi
+fi
+
+############################################
+# Execute the script.
+############################################
+${SCRIPTSgfs}/exgfs_atmos_wafs_blending_0p25.sh
+export err=$?; err_chk
+
+echo "JOB $job HAS COMPLETED NORMALLY."
+
+############################################
+# print exec output
+############################################
+if [ -e "$pgmout" ] ; then
+  cat $pgmout
+fi
+
+############################################
+# remove temporary working directory
+############################################
+if [ $KEEPDATA != YES ] ; then
+    rm -rf $DATA
+fi
+
+date
diff --git a/jobs/JGFS_ATMOS_WAFS_GCIP b/jobs/JGFS_ATMOS_WAFS_GCIP
new file mode 100755
index 0000000000..d4e1a4529f
--- /dev/null
+++ b/jobs/JGFS_ATMOS_WAFS_GCIP
@@ -0,0 +1,140 @@
+#!/bin/sh
+
+############################################
+# GFS GCIP PRODUCT GENERATION
+############################################
+
+date
+export PS4='$SECONDS + ' 
+set -xa
+
+# keep the working directory or not
+export KEEPDATA=${KEEPDATA:-NO}
+
+############################################
+# Working Directory
+############################################
+export DATA=${DATA:-${DATAROOT}/${jobid:?}}
+mkdir -p $DATA
+cd $DATA
+
+############################################
+# Output for executables
+############################################
+export pgmout=OUTPUT.$$
+
+############################################
+# Load the UTILITIES module
+############################################
+#### module load prod_util
+#### module load grib_util
+
+############################################
+# Run setpdy and initialize PDY variables
+############################################
+export cycle=t${cyc}z 
+setpdy.sh
+. ./PDY
+
+############################################
+# Set up the NET and RUN
+############################################
+export NET=${NET:-gfs}
+export RUN=${RUN:-gfs}
+export COMPONENT=${COMPONENT:-atmos}
+
+############################################
+# Specify HOME Directory
+############################################
+export gfs_ver=${gfs_ver:-v16.3.0}
+export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}}
+export EXECgfs=$HOMEgfs/exec
+export FIXgfs=$HOMEgfs/fix/wafs
+export PARMgfs=$HOMEgfs/parm/wafs
+export USHgfs=$HOMEgfs/ush
+export SCRIPTSgfs=$HOMEgfs/scripts
+
+# For BUFR dump, TMPDIR must be defined
+export TMPDIR=$DATA  # will be overwritten in exgfs script for parallel runs on ffhr
+# For BUFR dump, these two environment variables are defined by module load
+# HOMEobsproc_shared_bufr_dumplist <= module load bufr_dumplist/1.5.0
+# HOMEobsproc_dump   <= module load dumpjb/4.0.0
+
+
+################################################
+# Set up the input/output directory
+################################################
+# model data
+export COMINgfs=${COMINgfs:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT}
+
+# satellite data
+#ftp://satepsanone.nesdis.noaa.gov/2day/gmosaic/
+# Have to change IP address to digital ones, which BSUB can identify
+#export COMINsat=${COMINsat:-ftp://140.90.213.161/2day/gmosaic}
+export COMINsat=${COMINsat:-$DCOMROOT/$PDY/mcidas}
+
+#  radar data
+export radarl2_ver=${radarl2_ver:-v1.2}
+export COMINradar=${COMINradar:-$(compath.py ${envir}/radarl2/$radarl2_ver)/radar.$PDY}
+
+# metar/ships/lightning/pireps
+# data are dumped by $USHobsproc_dump/dumpjb
+#
+
+# COMOUT
+export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT}
+
+mkdir -p $COMOUT
+
+###############################################
+# Specify Timeout Behavior of WAFS GCIP
+#
+# SLEEP_TIME - how long to wait for inputs before exiting
+# SLEEP_INT  - time interval for checking for inputs
+###############################################
+# JY export SLEEP_TIME=300
+export SLEEP_TIME=600
+export SLEEP_INT=10
+
+############################################
+# Execute the script, parallel run for 000 003
+############################################
+export MPIRUN=${MPIRUN:-"mpiexec -l -np 2 --cpu-bind verbose,core cfp"}
+
+# GCIP runs f000 f003 for each cycle, 4 times/day,
+# to make the output valid every 3 hours 
+if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then
+  echo 0 ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 000 >> gcip.cmdfile
+  echo 1 ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 003 >> gcip.cmdfile
+else
+  echo ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 000 >> gcip.cmdfile
+  echo ${SCRIPTSgfs}/exgfs_atmos_wafs_gcip.sh 003 >> gcip.cmdfile
+  export MP_PGMMODEL=mpmd
+fi
+
+$MPIRUN gcip.cmdfile
+
+export err=$?
+if [ $err -eq 0 ] ; then
+  echo "JOB $job HAS COMPLETED NORMALLY!"
+elif [ $err -eq 1 ] ; then
+  echo "WARNING!!! JOB $job incomplete.  Missing satellite data."
+else
+  echo "JOB $job FAILED!!!!"
+fi
+
+############################################
+# print exec output
+############################################
+if [ -e "$pgmout" ] ; then
+  cat $pgmout
+fi
+
+############################################
+# remove temporary working directory
+############################################
+if [ $KEEPDATA != YES ] ; then
+    rm -rf $DATA
+fi
+
+date
diff --git a/jobs/JGFS_ATMOS_WAFS_GRIB2 b/jobs/JGFS_ATMOS_WAFS_GRIB2
new file mode 100755
index 0000000000..ed4c92979e
--- /dev/null
+++ b/jobs/JGFS_ATMOS_WAFS_GRIB2
@@ -0,0 +1,124 @@
+#!/bin/sh
+
+########################################
+# GFS AWIPS PRODUCT GENERATION
+########################################
+
+date
+export PS4='$SECONDS + ' 
+set -x
+
+# keep the working directory or not
+export KEEPDATA=${KEEPDATA:-NO}
+
+############################################
+# Working Directory
+############################################
+export DATA=${DATA:-${DATAROOT}/${jobid:?}}
+mkdir -p $DATA
+cd $DATA
+
+############################################
+# Output for executables
+############################################
+export pgmout=OUTPUT.$$
+
+############################################
+# Load the UTILITIES module
+############################################
+#### module load prod_util
+#### module load grib_util
+
+###########################################
+# Run setpdy and initialize PDY variables
+###########################################
+export cycle=t${cyc}z 
+setpdy.sh
+. ./PDY
+
+############################################
+# Set up the NET and RUN
+############################################
+export NET=${NET:-gfs}
+export RUN=${RUN:-gfs}
+export COMPONENT=${COMPONENT:-atmos}
+
+############################################
+# Specify HOME Directory 
+############################################
+export gfs_ver=${gfs_ver:-v16.3.0}
+export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}}
+export EXECgfs=$HOMEgfs/exec
+export FIXgfs=$HOMEgfs/fix/wafs
+export PARMgfs=$HOMEgfs/parm/wafs
+export USHgfs=$HOMEgfs/ush
+export SCRIPTSgfs=$HOMEgfs/scripts
+
+################################################
+# Set up the input/output directory
+################################################
+#### if [ $envir = "prod" ] || [ $envir = "para" ] ; then
+####  export COMIN=${COMIN:-$COMROOT/${NET}/${envir}/$RUN.$PDY}
+#### else
+####   export COMIN=${COMIN:-$COMROOT/${NET}/prod/$RUN.$PDY}
+#### fi
+
+export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT}
+export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT}
+export PCOM=${PCOM:-$COMOUT/wmo}
+
+if [ $SENDCOM = YES ] ; then
+  mkdir -p $COMOUT $PCOM
+fi
+
+############################################
+# print current environment
+############################################
+env
+
+##############################################
+# Set up the forecast hours
+##############################################
+export FHOURS=${FHOURS:-"00 06 09 12 15 18 21 24 27 30 33 36 42 48 54 60 66 72"}
+
+############################################
+# Execute the script.
+############################################
+
+NP=`echo $FHOURS | wc -w`
+export MPIRUN=${MPIRUN:-"mpiexec -np $NP -cpu-bind verbose,core cfp"}
+
+rm wafsgrib2.cmdfile
+ic=0
+for fcsthrs in $FHOURS ; do
+  if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then
+    echo $ic ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2.sh $fcsthrs >> wafsgrib2.cmdfile
+  else
+    echo ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2.sh $fcsthrs >> wafsgrib2.cmdfile
+    export MP_PGMMODEL=mpmd
+  fi
+  ic=`expr $ic + 1`
+done
+
+$MPIRUN wafsgrib2.cmdfile
+
+export err=$?; err_chk
+
+echo "JOB $job HAS COMPLETED NORMALLY!"
+
+############################################
+# print exec output
+############################################
+if [ -e "$pgmout" ] ; then
+  cat $pgmout
+fi
+
+############################################
+# remove temporary working directory
+############################################
+if [ $KEEPDATA != YES ] ; then
+    rm -rf $DATA
+fi
+
+date
+
diff --git a/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 b/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25
new file mode 100755
index 0000000000..64570bbf5d
--- /dev/null
+++ b/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25
@@ -0,0 +1,133 @@
+#!/bin/sh
+
+########################################
+# GFS WAFS GRIB 0P25 PRODUCT GENERATION
+########################################
+
+date
+export PS4='$SECONDS + ' 
+set -x
+
+# keep the working directory or not
+export KEEPDATA=${KEEPDATA:-NO}
+
+############################################
+# Working Directory
+############################################
+export DATA=${DATA:-${DATAROOT}/${jobid:?}}
+mkdir -p $DATA
+cd $DATA
+
+############################################
+# Output for executables
+############################################
+export pgmout=OUTPUT.$$
+
+###########################################
+# Run setpdy and initialize PDY variables
+###########################################
+export cycle=t${cyc}z 
+setpdy.sh
+. ./PDY
+
+############################################
+# Set up the NET and RUN
+############################################
+export NET=${NET:-gfs}
+export RUN=${RUN:-gfs}
+export COMPONENT=${COMPONENT:-atmos}
+
+############################################
+# Specify HOME Directory 
+############################################
+export gfs_ver=${gfs_ver:-v16.3.0}
+export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}}
+export EXECgfs=$HOMEgfs/exec
+export FIXgfs=$HOMEgfs/fix/wafs
+export PARMgfs=$HOMEgfs/parm/wafs
+export USHgfs=$HOMEgfs/ush
+export SCRIPTSgfs=$HOMEgfs/scripts
+
+################################################
+# Set up the input/output directory
+################################################
+export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT}
+export COMOUT=${COMOUT:-$(compath.py -o $NET/$gfs_ver)/$RUN.$PDY/$cyc/$COMPONENT}
+export PCOM=${PCOM:-$COMOUT/wmo}
+
+if [ $SENDCOM = YES ] ; then
+  mkdir -p $COMOUT $PCOM
+fi
+
+############################################
+# print current environment
+############################################
+env
+
+##############################################
+# Set up the forecast hours
+##############################################
+#export SHOUR=${SHOUR:-06}
+# Will change to 120 for 2023 ICAO standard
+#export EHOUR=${EHOUR:-120}
+#export EHOUR=${EHOUR:-36}
+
+export FHOUT_GFS=${FHOUT_GFS:-1}
+if [ $FHOUT_GFS -eq 3 ] ; then #27
+    export FHOURS=${FHOURS:-"6 9 12 15 18 21 24 27 30 33 36 39 42 45 48 54 60 66 72 78 84 90 96 102 108 114 120"}
+else #39
+    export FHOURS=${FHOURS:-"6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 27 30 33 36 39 42 45 48 54 60 66 72 78 84 90 96 102 108 114 120"}
+fi
+
+###############################################
+# Specify Timeout Behavior of WAFS blending
+#
+# SLEEP_TIME - Amount of time to wait for
+#              a input file before exiting
+# SLEEP_INT  - Amount of time to wait between
+#              checking for input files
+###############################################
+# export SLEEP_TIME=300   # changed to 60 to avoid hitting wall_clock when miss umket wafs files ... 
+export SLEEP_TIME=600
+export SLEEP_INT=10
+
+############################################
+# Execute the script.
+############################################
+NP=`echo $FHOURS | wc -w`
+export MPIRUN=${MPIRUN:-"mpiexec -np $NP -cpu-bind verbose,core cfp"}
+
+rm wafsgrib2_0p25.cmdfile
+ic=0
+for fcsthrs in $FHOURS ; do
+  if [ `echo $MPIRUN | cut -d " " -f1` = 'srun' ] ; then
+    echo $ic ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2_0p25.sh $fcsthrs >> wafsgrib2_0p25.cmdfile
+  else
+    echo ${SCRIPTSgfs}/exgfs_atmos_wafs_grib2_0p25.sh $fcsthrs >> wafsgrib2_0p25.cmdfile
+    export MP_PGMMODEL=mpmd
+  fi
+  ic=`expr $ic + 1`
+done
+
+$MPIRUN wafsgrib2_0p25.cmdfile
+
+export err=$?; err_chk
+
+echo "JOB $job HAS COMPLETED NORMALLY!"
+
+############################################
+# print exec output
+############################################
+if [ -e "$pgmout" ] ; then
+  cat $pgmout
+fi
+
+############################################
+# remove temporary working directory
+############################################
+if [ $KEEPDATA != YES ] ; then
+    rm -rf $DATA
+fi
+
+date
+
diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
index ff8e2e9569..455f572da5 100755
--- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
+++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
@@ -20,9 +20,9 @@ GDUMP="gdas"
 ##############################################
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
@@ -31,7 +31,7 @@ mkdir -m 775 -p "${COM_CHEM_ANALYSIS}"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASAEROFINALPY:-${HOMEgfs}/scripts/exglobal_aero_analysis_finalize.py}
+EXSCRIPT=${GDASAEROFINALPY:-${SCRgfs}/exglobal_aero_analysis_finalize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
index 79320b77ee..b2a2893bc0 100755
--- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
+++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
@@ -19,9 +19,9 @@ GDUMP="gdas"
 ##############################################
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_CHEM_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
@@ -30,7 +30,7 @@ mkdir -m 775 -p "${COM_CHEM_ANALYSIS}"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASAEROINITPY:-${HOMEgfs}/scripts/exglobal_aero_analysis_initialize.py}
+EXSCRIPT=${GDASAEROINITPY:-${SCRgfs}/exglobal_aero_analysis_initialize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_RUN b/jobs/JGLOBAL_AERO_ANALYSIS_RUN
index 853909dc03..43749b78c5 100755
--- a/jobs/JGLOBAL_AERO_ANALYSIS_RUN
+++ b/jobs/JGLOBAL_AERO_ANALYSIS_RUN
@@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlr
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASAERORUNSH:-${HOMEgfs}/scripts/exglobal_aero_analysis_run.py}
+EXSCRIPT=${GDASAERORUNSH:-${SCRgfs}/exglobal_aero_analysis_run.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE
index e6c016e703..401feba35f 100755
--- a/jobs/JGLOBAL_ARCHIVE
+++ b/jobs/JGLOBAL_ARCHIVE
@@ -7,30 +7,54 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "arch" -c "base arch"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP=${RUN/enkf}
-
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMOS_GEMPAK \
-  COM_ATMOS_GENESIS COM_ATMOS_HISTORY COM_ATMOS_INPUT COM_ATMOS_MASTER COM_ATMOS_RESTART \
-  COM_ATMOS_TRACK COM_ATMOS_WMO \
-  COM_CHEM_HISTORY COM_CHEM_ANALYSIS\
-  COM_MED_RESTART \
-  COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART \
-  COM_OBS COM_TOP \
-  COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \
-  COM_OCEAN_ANALYSIS \
-  COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \
-  COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+	COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \
+	COMIN_ATMOS_BUFR:COM_ATMOS_BUFR_TMPL \
+	COMIN_ATMOS_GEMPAK:COM_ATMOS_GEMPAK_TMPL \
+	COMIN_ATMOS_GENESIS:COM_ATMOS_GENESIS_TMPL \
+	COMIN_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL \
+	COMIN_ATMOS_INPUT:COM_ATMOS_INPUT_TMPL \
+	COMIN_ATMOS_MASTER:COM_ATMOS_MASTER_TMPL \
+	COMIN_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL \
+	COMIN_ATMOS_TRACK:COM_ATMOS_TRACK_TMPL \
+	COMIN_ATMOS_WMO:COM_ATMOS_WMO_TMPL \
+	COMIN_CHEM_HISTORY:COM_CHEM_HISTORY_TMPL \
+	COMIN_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL \
+	COMIN_MED_RESTART:COM_MED_RESTART_TMPL \
+	COMIN_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL \
+	COMIN_ICE_HISTORY:COM_ICE_HISTORY_TMPL \
+	COMIN_ICE_INPUT:COM_ICE_INPUT_TMPL \
+	COMIN_ICE_RESTART:COM_ICE_RESTART_TMPL \
+	COMIN_ICE_GRIB:COM_ICE_GRIB_TMPL \
+	COMIN_OBS:COM_OBS_TMPL \
+	COMIN_TOP:COM_TOP_TMPL \
+	COMIN_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL \
+	COMIN_OCEAN_RESTART:COM_OCEAN_RESTART_TMPL \
+	COMIN_OCEAN_GRIB:COM_OCEAN_GRIB_TMPL \
+	COMIN_OCEAN_NETCDF:COM_OCEAN_NETCDF_TMPL \
+	COMIN_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \
+        COMIN_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \
+        COMIN_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL \
+	COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL \
+	COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+	COMIN_WAVE_STATION:COM_WAVE_STATION_TMPL \
+	COMIN_WAVE_RESTART:COM_WAVE_RESTART_TMPL \
+	COMIN_ATMOS_OZNMON:COM_ATMOS_OZNMON_TMPL \
+	COMIN_ATMOS_RADMON:COM_ATMOS_RADMON_TMPL \
+	COMIN_ATMOS_MINMON:COM_ATMOS_MINMON_TMPL \
+	COMIN_CONF:COM_CONF_TMPL \
+	COMOUT_ATMOS_TRACK:COM_ATMOS_TRACK_TMPL
 
 for grid in "0p25" "0p50" "1p00"; do
-  YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
-  YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
+    YMD=${PDY} HH=${cyc} GRID=${grid} declare_from_tmpl -rx \
+       "COMIN_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
 done
 
 ###############################################################
 # Run archive script
 ###############################################################
 
-${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive.sh}
+${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive.py}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
index 37a49e0ae0..549e087694 100755
--- a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
+++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE
@@ -15,7 +15,7 @@ GDUMP_ENS="enkf${GDUMP}"
 # Begin JOB SPECIFIC work
 ##############################################
 # Generate COM variable from template
-MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} generate_com -rx \
+MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_ENS:COM_ATMOS_ANALYSIS_TMPL
 
 mkdir -m 755 -p "${COM_ATMOS_ANALYSIS_ENS}"
@@ -23,7 +23,7 @@ mkdir -m 755 -p "${COM_ATMOS_ANALYSIS_ENS}"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMENSFINALPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_finalize.py}
+EXSCRIPT=${GDASATMENSFINALPY:-${SCRgfs}/exglobal_atmens_analysis_finalize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT b/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT
new file mode 100755
index 0000000000..7179ae0624
--- /dev/null
+++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT
@@ -0,0 +1,35 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+export WIPE_DATA="NO"
+export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensanl atmensanlfv3inc"
+
+##############################################
+# Set variables used in the script
+##############################################
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+###############################################################
+# Run relevant script
+
+EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_fv3_increment.py}
+${EXSCRIPT}
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
+fi
+
+exit 0
diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
index c50214aad1..38093ddc35 100755
--- a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
+++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE
@@ -17,15 +17,15 @@ GDUMP="gdas"
 # Begin JOB SPECIFIC work
 ##############################################
 # Generate COM variables from templates
-RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
+RUN=${GDUMP} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL
 
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMENSINITPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_initialize.py}
+EXSCRIPT=${GDASATMENSINITPY:-${SCRgfs}/exglobal_atmens_analysis_initialize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
similarity index 83%
rename from jobs/JGLOBAL_ATMENS_ANALYSIS_RUN
rename to jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
index 0d10c76b05..060b7abd06 100755
--- a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN
+++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF
@@ -3,7 +3,7 @@
 source "${HOMEgfs}/ush/preamble.sh"
 export WIPE_DATA="NO"
 export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}}
-source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl atmensanlletkf"
 
 ##############################################
 # Set variables used in the script
@@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atme
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMENSRUNSH:-${HOMEgfs}/scripts/exglobal_atmens_analysis_run.py}
+EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_letkf.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS
index 9e5850bfc3..5776aa6d13 100755
--- a/jobs/JGLOBAL_ATMOS_ANALYSIS
+++ b/jobs/JGLOBAL_ATMOS_ANALYSIS
@@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal"
 # Set variables used in the script
 ##############################################
 export CDATE=${CDATE:-${PDY}${cyc}}
-export CDUMP=${RUN/enkf}
+export rCDUMP=${RUN/enkf}
 export COMPONENT="atmos"
 export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"}
 export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"}
@@ -25,19 +25,19 @@ export gcyc=${GDATE:8:2}
 export GDUMP="gdas"
 export GDUMP_ENS="enkf${GDUMP}"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${rCDUMP}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+export APREFIX="${rCDUMP}.t${cyc}z."
 export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z."
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_ATMOS_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
 
-MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL
 
 mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
@@ -79,7 +79,7 @@ export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles"
 # Copy fix file for obsproc  # TODO: Why is this necessary?
 if [[ ${RUN} = "gfs" ]]; then
     mkdir -p ${ROTDIR}/fix
-    cp ${FIXgsi}/prepobs_errtable.global ${ROTDIR}/fix/
+    cp ${FIXgfs}/gsi/prepobs_errtable.global ${ROTDIR}/fix/
 fi
 
 
diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC
index 65a571a974..5b6073254a 100755
--- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC
+++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC
@@ -7,7 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc"
 ##############################################
 # Set variables used in the script
 ##############################################
-export CDUMP="${RUN/enkf}"
+export rCDUMP="${RUN/enkf}"
 export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"}
 
 
@@ -23,16 +23,16 @@ export gcyc=${GDATE:8:2}
 export GDUMP="gdas"
 export GDUMP_ENS="enkf${GDUMP}"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${rCDUMP}.t${cyc}z."
 export GPREFIX="${GDUMP}.t${gcyc}z."
 export APREFIX="${RUN}.t${cyc}z."
 export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z."
 
-RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
+RUN=${rCDUMP} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_RESTART
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS COM_ATMOS_RESTART
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_OBS_PREV:COM_OBS_TMPL \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
 
diff --git a/jobs/JGLOBAL_ATMOS_ENSSTAT b/jobs/JGLOBAL_ATMOS_ENSSTAT
new file mode 100755
index 0000000000..e09410d581
--- /dev/null
+++ b/jobs/JGLOBAL_ATMOS_ENSSTAT
@@ -0,0 +1,48 @@
+#! /usr/bin/env bash
+
+#
+# Caculate the mean, spread, and other probabilistic fields.
+#
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "atmos_ensstat" -c "base atmos_ensstat"
+
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+# Construct COM variables from templates
+# Input directories loop over members, so this is done downstream
+
+for grid in '0p25' '0p50' '1p00'; do
+  prod_dir="COMOUT_ATMOS_GRIB_${grid}"
+  MEMDIR="ensstat" GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
+  if [[ ! -d "${!prod_dir}" ]]; then mkdir -m 775 -p "${!prod_dir}"; fi
+done
+
+###############################################################
+# Run exglobal script
+"${SCRgfs}/exglobal_atmos_ensstat.sh"
+status=$?
+(( status != 0 )) && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]]; then
+  cat "${pgmout}"
+fi
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA:-NO}" = "NO" ]] && rm -rf "${DATA}"
+
+
+exit 0
diff --git a/jobs/JGLOBAL_ATMOS_POST_MANAGER b/jobs/JGLOBAL_ATMOS_POST_MANAGER
index 7c726bc2ad..902fb9ced0 100755
--- a/jobs/JGLOBAL_ATMOS_POST_MANAGER
+++ b/jobs/JGLOBAL_ATMOS_POST_MANAGER
@@ -12,24 +12,15 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "post" -c "base post"
 export NET=${NET:-gfs}
 export RUN=${RUN:-gfs}
 
-####################################
-# Specify Execution Areas
-####################################
-export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}}
-export EXECgfs=${HOMEgfs:-${HOMEgfs}/exec}
-export FIXgfs=${HOMEgfs:-${HOMEgfs}/fix}
-export PARMgfs=${HOMEgfs:-${HOMEgfs}/parm}
-export USHgfs=${HOMEgfs:-${HOMEgfs}/ush}
-
 ###########################
 # Set up EXT variable
 ###########################
 export EXT_FCST=NO
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY
 
 ########################################################
 # Execute the script.
-${HOMEgfs}/scripts/exglobal_atmos_pmgr.sh
+${SCRgfs}/exglobal_atmos_pmgr.sh
 ########################################################
 
diff --git a/jobs/JGLOBAL_ATMOS_PRODUCTS b/jobs/JGLOBAL_ATMOS_PRODUCTS
index 24e7edacdd..8c062a8fed 100755
--- a/jobs/JGLOBAL_ATMOS_PRODUCTS
+++ b/jobs/JGLOBAL_ATMOS_PRODUCTS
@@ -9,11 +9,11 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmos_products" -c "base atmos_produc
 ##############################################
 
 # Construct COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER
 
 for grid in '0p25' '0p50' '1p00'; do
   prod_dir="COM_ATMOS_GRIB_${grid}"
-  GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
   if [[ ! -d "${!prod_dir}" ]]; then mkdir -m 775 -p "${!prod_dir}"; fi
 done
 
@@ -22,7 +22,7 @@ export PREFIX="${RUN}.t${cyc}z."
 
 ###############################################################
 # Run exglobal script
-"${HOMEgfs}/scripts/exglobal_atmos_products.sh"
+"${SCRgfs}/exglobal_atmos_products.sh"
 status=$?
 (( status != 0 )) && exit "${status}"
 
diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL
index 0d709e56dd..2822b1e94b 100755
--- a/jobs/JGLOBAL_ATMOS_SFCANL
+++ b/jobs/JGLOBAL_ATMOS_SFCANL
@@ -4,40 +4,35 @@ source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl"
 
 
-##############################################
-# Set variables used in the script
-##############################################
-export CDUMP="${RUN/enkf}"
-
-
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 # Ignore possible spelling error (nothing is misspelled)
 # shellcheck disable=SC2153
-GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}")
-# shellcheck disable=
-gPDY=${GDATE:0:8}
-gcyc=${GDATE:8:2}
-export GDUMP="gdas"
+GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H)
+export GDATE
+
+RUN="gdas" YMD=${GDATE:0:8} HH=${GDATE:8:2} declare_from_tmpl -rx \
+  COMIN_OBS_PREV:COM_OBS_TMPL \
+  COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
+
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+  COMIN_OBS:COM_OBS_TMPL \
+  COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \
+  COMIN_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL
 
-export OPREFIX="${CDUMP}.t${cyc}z."
-export GPREFIX="${GDUMP}.t${gcyc}z."
-export APREFIX="${CDUMP}.t${cyc}z."
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+  COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS COM_ATMOS_RESTART \
-    COM_LAND_ANALYSIS
+mkdir -p "${COMOUT_ATMOS_RESTART}"
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
-    COM_OBS_PREV:COM_OBS_TMPL \
-    COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
 ###############################################################
 # Run relevant script
 
 ${SFCANALSH:-${SCRgfs}/exglobal_atmos_sfcanl.sh}
 status=$?
-[[ ${status} -ne 0 ]] && exit ${status}
+(( status != 0 )) && exit "${status}"
 
 
 ##############################################
@@ -48,14 +43,14 @@ status=$?
 # Final processing
 ##############################################
 if [[ -e "${pgmout}" ]] ; then
-  cat ${pgmout}
+  cat "${pgmout}"
 fi
 
 ##########################################
 # Remove the Temporary working directory
 ##########################################
-cd ${DATAROOT}
-[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA}
+cd "${DATAROOT}"
+[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}"
 
 
 exit 0
diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC
index 82c3a5c755..906c195164 100755
--- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC
+++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC
@@ -8,7 +8,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep"
 # Set variables used in the exglobal script
 ##############################################
 export CDATE=${CDATE:-${PDY}${cyc}}
-export CDUMP=${RUN/enkf}
 
 
 ##############################################
@@ -29,7 +28,7 @@ export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}}   # path to tropical cyclone reco
 ##############################################
 # Define COM directories
 ##############################################
-generate_com COM_OBS
+declare_from_tmpl COM_OBS
 if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi
 
 export CRES=$(echo ${CASE} | cut -c2-)
diff --git a/jobs/JGLOBAL_ATMOS_UPP b/jobs/JGLOBAL_ATMOS_UPP
index 9364f33225..1aa62cdbb3 100755
--- a/jobs/JGLOBAL_ATMOS_UPP
+++ b/jobs/JGLOBAL_ATMOS_UPP
@@ -12,14 +12,14 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "upp" -c "base upp"
 ##############################################
 
 # Construct COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER
 if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -m 775 -p "${COM_ATMOS_MASTER}"; fi
 
 
 ###############################################################
 # Run relevant exglobal script
 
-"${HOMEgfs}/scripts/exglobal_atmos_upp.py"
+"${SCRgfs}/exglobal_atmos_upp.py"
 status=$?
 (( status != 0 )) && exit "${status}"
 
diff --git a/jobs/JGLOBAL_ATMOS_VMINMON b/jobs/JGLOBAL_ATMOS_VMINMON
index dbd76aed5b..8ad9b91792 100755
--- a/jobs/JGLOBAL_ATMOS_VMINMON
+++ b/jobs/JGLOBAL_ATMOS_VMINMON
@@ -17,9 +17,9 @@ export gcyc=${GDATE:8:2}
 #############################################
 # TANKverf - WHERE OUTPUT DATA WILL RESIDE
 #############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON
-YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_MINMON
+YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL
 
 export gsistat="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat"
 export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}}
diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
index 52a782d7c4..2acb931aa3 100755
--- a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
+++ b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE
@@ -20,9 +20,9 @@ GDUMP="gdas"
 ##############################################
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
@@ -33,7 +33,7 @@ mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMFINALPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_finalize.py}
+EXSCRIPT=${GDASATMFINALPY:-${SCRgfs}/exglobal_atm_analysis_finalize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT b/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
new file mode 100755
index 0000000000..9a9a476065
--- /dev/null
+++ b/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT
@@ -0,0 +1,37 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+export WIPE_DATA="NO"
+export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atmanlfv3inc"
+
+##############################################
+# Set variables used in the script
+##############################################
+
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+
+###############################################################
+# Run relevant script
+
+EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_fv3_increment.py}
+${EXSCRIPT}
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
+fi
+
+exit 0
diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
index 4ef5e6392d..5b26483922 100755
--- a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
+++ b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE
@@ -20,14 +20,14 @@ GDUMP_ENS="enkf${GDUMP}"
 ##############################################
 
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_ATMOS_ANALYSIS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \
     COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
-MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL
 
 mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
@@ -36,7 +36,7 @@ mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMINITPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_initialize.py}
+EXSCRIPT=${GDASATMINITPY:-${SCRgfs}/exglobal_atm_analysis_initialize.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_RUN b/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
similarity index 84%
rename from jobs/JGLOBAL_ATM_ANALYSIS_RUN
rename to jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
index bbfdbe4a1f..552eccf911 100755
--- a/jobs/JGLOBAL_ATM_ANALYSIS_RUN
+++ b/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL
@@ -3,7 +3,7 @@
 source "${HOMEgfs}/ush/preamble.sh"
 export WIPE_DATA="NO"
 export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}}
-source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlvar"
 
 ##############################################
 # Set variables used in the script
@@ -18,7 +18,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun"
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASATMRUNSH:-${HOMEgfs}/scripts/exglobal_atm_analysis_run.py}
+EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_variational.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_ATM_PREP_IODA_OBS b/jobs/JGLOBAL_ATM_PREP_IODA_OBS
index ef0e682468..78414522fc 100755
--- a/jobs/JGLOBAL_ATM_PREP_IODA_OBS
+++ b/jobs/JGLOBAL_ATM_PREP_IODA_OBS
@@ -12,13 +12,13 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "prepatmiodaobs" -c "base prepatmiodao
 # Begin JOB SPECIFIC work
 ##############################################
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
 
 
 ###############################################################
 # Run relevant script
-EXSCRIPT=${BUFR2IODASH:-${HOMEgfs}/ush/run_bufr2ioda.py}
-${EXSCRIPT} "${PDY}${cyc}" "${RUN}" "${DMPDIR}" "${IODAPARM}" "${COM_OBS}/"
+EXSCRIPT=${BUFR2IODASH:-${USHgfs}/run_bufr2ioda.py}
+${EXSCRIPT} "${PDY}${cyc}" "${RUN}" "${DMPDIR}" "${PARMgfs}/gdas/ioda/bufr2ioda" "${COM_OBS}/"
 status=$?
 [[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}")
 
diff --git a/jobs/JGLOBAL_CLEANUP b/jobs/JGLOBAL_CLEANUP
index ad938ccf60..f3cfcae511 100755
--- a/jobs/JGLOBAL_CLEANUP
+++ b/jobs/JGLOBAL_CLEANUP
@@ -3,15 +3,15 @@
 source "${HOMEgfs}/ush/preamble.sh"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup"
 
-"${HOMEgfs}/scripts/exglobal_cleanup.sh"
+"${SCRgfs}/exglobal_cleanup.sh"
 status=$?
-[[ ${status} -ne 0 ]] && exit "${status}"
+(( status != 0 )) && exit "${status}"
 
 ##########################################
 # Remove the Temporary working directory
 ##########################################
-cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1)
-[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}"
+# DATAROOT="${STMP}/RUNDIRS/${PSLOT}/${RUN}.${PDY}${cyc}"
+# is removed in exglobal_cleanup.sh, nothing to do here.
 
 exit 0
 
diff --git a/jobs/JGLOBAL_EXTRACTVARS b/jobs/JGLOBAL_EXTRACTVARS
new file mode 100755
index 0000000000..3478ca3976
--- /dev/null
+++ b/jobs/JGLOBAL_EXTRACTVARS
@@ -0,0 +1,47 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "extractvars" -c "base extractvars"
+
+# Set COM Paths
+for grid in '0p25' '0p50' '1p00'; do
+  prod_dir="COMIN_ATMOS_GRIB_${grid}"
+  GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL"
+  if [[ ! -d "${!prod_dir}" ]]; then mkdir -p "${!prod_dir}"; fi
+done
+
+YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+    "COMIN_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL" \
+    "COMIN_OCEAN_GRIB:COM_OCEAN_GRIB_TMPL" \
+    "COMIN_OCEAN_NETCDF:COM_OCEAN_NETCDF_TMPL" \
+    "COMIN_ICE_HISTORY:COM_ICE_HISTORY_TMPL" \
+    "COMIN_ICE_GRIB:COM_ICE_GRIB_TMPL" \
+    "COMIN_ICE_NETCDF:COM_ICE_NETCDF_TMPL" \
+    "COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL"
+
+if [[ "${DO_ATM}" == "YES" ]]; then
+  if [[ ! -d "${ARC_RFCST_PROD_ATMOS_F2D}" ]]; then mkdir -p "${ARC_RFCST_PROD_ATMOS_F2D}"; fi
+  if [[ ! -d "${ARC_RFCST_PROD_ATMOS_F3D}" ]]; then mkdir -p "${ARC_RFCST_PROD_ATMOS_F3D}"; fi
+fi
+if [[ "${DO_OCN}" == "YES" ]]; then
+  if [[ ! -d "${ARC_RFCST_PROD_OCN}" ]]; then mkdir -p "${ARC_RFCST_PROD_OCN}"; fi
+fi
+if [[ "${DO_ICE}" == "YES" ]]; then
+  if [[ ! -d "${ARC_RFCST_PROD_ICE}" ]]; then mkdir -p "${ARC_RFCST_PROD_ICE}"; fi
+fi
+if [[ "${DO_WAVE}" == "YES" ]]; then
+  if [[ ! -d "${ARC_RFCST_PROD_WAV}" ]]; then mkdir -p "${ARC_RFCST_PROD_WAV}"; fi
+fi
+
+# Execute the Script
+"${SCRgfs}/exglobal_extractvars.sh"
+status=$?
+(( status != 0 )) && exit "${status}"
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || true
+[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}"
+
+exit 0
diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST
index b2825af54f..9998470618 100755
--- a/jobs/JGLOBAL_FORECAST
+++ b/jobs/JGLOBAL_FORECAST
@@ -1,40 +1,30 @@
 #! /usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "fcst" -c "base fcst"
 
-##############################################
-# Set variables used in the script
-##############################################
-export CDUMP=${RUN/enkf}
+if (( 10#${ENSMEM:-0} > 0 )); then
+  export DATAjob="${DATAROOT}/${RUN}efcs${ENSMEM}.${PDY:-}${cyc}"
+  export DATA="${DATAjob}/${jobid}"
+  source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs"
+else
+  export DATAjob="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}"
+  export DATA="${DATAjob}/${jobid}"
+  source "${HOMEgfs}/ush/jjob_header.sh" -e "fcst" -c "base fcst"
+fi
+
+# Create the directory to hold restarts and output from the model in stmp
+export DATArestart="${DATAjob}/restart"
+if [[ ! -d "${DATArestart}" ]]; then mkdir -p "${DATArestart}"; fi
+export DATAoutput="${DATAjob}/output"
+if [[ ! -d "${DATAoutput}" ]]; then mkdir -p "${DATAoutput}"; fi
 
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 
 # Restart conditions for GFS cycle come from GDAS
-rCDUMP=${CDUMP}
-[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas"
-
-# Forecast length for GFS forecast
-case ${RUN} in
-  *gfs | *gefs)
-    # shellcheck disable=SC2153
-    export FHMAX=${FHMAX_GFS}
-    # shellcheck disable=SC2153
-    export FHOUT=${FHOUT_GFS}
-    export FHMAX_HF=${FHMAX_HF_GFS}
-    export FHOUT_HF=${FHOUT_HF_GFS}
-    ;;
-  *gdas)
-    export FHMAX_HF=0
-    export FHOUT_HF=0
-    ;;
-  *)
-    echo "FATAL ERROR: Unsupported RUN '${RUN}'"
-    exit 1
-esac
-
+rCDUMP="${RUN}"
+export rCDUMP="${RUN/gfs/gdas}"
 
 # Ignore possible spelling error (nothing is misspelled)
 # shellcheck disable=SC2153
@@ -45,44 +35,71 @@ declare -rx gPDY="${GDATE:0:8}"
 declare -rx gcyc="${GDATE:8:2}"
 
 # Construct COM variables from templates (see config.com)
-YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \
-  COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP COM_CONF
-
-RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
-  COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
-
-if [[ ${DO_WAVE} == "YES" ]]; then
-  YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY
-  RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
-    COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL
-  declare -rx RUNwave="${RUN}wave"
+YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+  COMIN_ATMOS_INPUT:COM_ATMOS_INPUT_TMPL \
+  COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \
+  COMOUT_CONF:COM_CONF_TMPL \
+  COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL \
+  COMOUT_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL \
+  COMOUT_ATMOS_MASTER:COM_ATMOS_MASTER_TMPL
+
+RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \
+  COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
+
+if [[ "${DO_WAVE}" == "YES" ]]; then
+  YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+    COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+    COMOUT_WAVE_RESTART:COM_WAVE_RESTART_TMPL \
+    COMOUT_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL
+  RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \
+    COMIN_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL
 fi
 
-if [[ ${DO_OCN} == "YES" ]]; then
-  YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \
-    COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS
-  RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
-    COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL
+if [[ "${DO_OCN}" == "YES" ]]; then
+  YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+    COMIN_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \
+    COMIN_OCEAN_INPUT:COM_OCEAN_INPUT_TMPL \
+    COMOUT_MED_RESTART:COM_MED_RESTART_TMPL \
+    COMOUT_OCEAN_RESTART:COM_OCEAN_RESTART_TMPL \
+    COMOUT_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL
+  RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \
+    COMIN_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL \
+    COMIN_MED_RESTART_PREV:COM_MED_RESTART_TMPL
 fi
 
-if [[ ${DO_ICE} == "YES" ]]; then
-  YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART
-  RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \
-    COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
+if [[ "${DO_ICE}" == "YES" ]]; then
+  YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+    COMIN_ICE_INPUT:COM_ICE_INPUT_TMPL \
+    COMIN_ICE_ANALYSIS:COM_ICE_ANALYSIS_TMPL \
+    COMOUT_ICE_RESTART:COM_ICE_RESTART_TMPL \
+    COMOUT_ICE_HISTORY:COM_ICE_HISTORY_TMPL
+  RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \
+    COMIN_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
 fi
 
-if [[ ${DO_AERO} == "YES" ]]; then
-  YMD=${PDY} HH=${cyc} generate_com -rx COM_CHEM_HISTORY
+if [[ "${DO_AERO}" == "YES" ]]; then
+  YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \
+    COMOUT_CHEM_HISTORY:COM_CHEM_HISTORY_TMPL
 fi
 
 
 ###############################################################
 # Run relevant exglobal script
-
-${FORECASTSH:-${SCRgfs}/exglobal_forecast.sh}
+###############################################################
+"${FORECASTSH:-${SCRgfs}/exglobal_forecast.sh}"
 status=$?
-[[ ${status} -ne 0 ]] && exit ${status}
-
+(( status != 0 )) && exit "${status}"
+
+# Send DBN alerts for EnKF
+# TODO: Should these be in post manager instead?
+if [[ "${RUN}" =~ "enkf" ]] && [[ "${SENDDBN:-}" == YES ]]; then
+  for (( fhr = FHOUT; fhr <= FHMAX; fhr + FHOUT )); do
+    if (( fhr % 3 == 0 )); then
+      fhr3=$(printf %03i "${fhr}")
+      "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${fhr3}.nc"
+    fi
+  done
+fi
 
 ##############################################
 # End JOB SPECIFIC work
@@ -91,15 +108,14 @@ status=$?
 ##############################################
 # Final processing
 ##############################################
-if [ -e "${pgmout}" ] ; then
-  cat ${pgmout}
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
 fi
 
 ##########################################
 # Remove the Temporary working directory
 ##########################################
-cd ${DATAROOT}
-[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA}
-
+cd "${DATAROOT}" || true
+[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}"  # do not remove DATAjob. It contains DATAoutput
 
 exit 0
diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF
new file mode 100755
index 0000000000..38dc3049f9
--- /dev/null
+++ b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF
@@ -0,0 +1,50 @@
+#!/bin/bash
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanalletkf" -c "base ocnanal marineanalletkf"
+
+##############################################
+# Set variables used in the script
+##############################################
+# Ignore possible spelling error (nothing is misspelled)
+# shellcheck disable=SC2153
+GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours")
+
+gPDY=${GDATE:0:8}
+gcyc=${GDATE:8:2}
+
+YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
+   COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \
+   COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL
+
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL 
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+###############################################################
+# Run relevant script
+
+EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exgdas_global_marine_analysis_letkf.py}
+${EXSCRIPT}
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
+fi
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}"
+
+exit 0
diff --git a/jobs/JGLOBAL_MARINE_BMAT b/jobs/JGLOBAL_MARINE_BMAT
new file mode 100755
index 0000000000..3dacec9278
--- /dev/null
+++ b/jobs/JGLOBAL_MARINE_BMAT
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+if (( 10#${ENSMEM:-0} > 0 )); then
+  export DATAjob="${DATAROOT}/${RUN}marinebmat.${PDY:-}${cyc}"
+  export DATA="${DATAjob}/${jobid}"
+  # Create the directory to hold ensemble perturbations
+  export DATAenspert="${DATAjob}/enspert"
+  if [[ ! -d "${DATAenspert}" ]]; then mkdir -p "${DATAenspert}"; fi
+fi
+
+# source config.base, config.ocnanal and config.marinebmat
+# and pass marinebmat to ${machine}.env
+source "${HOMEgfs}/ush/jjob_header.sh" -e "marinebmat" -c "base ocnanal marinebmat"
+
+##############################################
+# Set variables used in the script
+##############################################
+# shellcheck disable=SC2153
+GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours")
+gPDY=${GDATE:0:8}
+gcyc=${GDATE:8:2}
+export GDUMP="gdas"
+export GDUMP_ENS="enkf${GDUMP}"
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+# Generate COM variables from templates
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
+   COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \
+   COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL
+
+RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
+    COMIN_OCEAN_HISTORY_ENS_PREV:COM_OCEAN_HISTORY_TMPL \
+    COMIN_ICE_HISTORY_ENS_PREV:COM_ICE_HISTORY_TMPL
+
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMOUT_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \
+   COMOUT_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL
+
+mkdir -p "${COMOUT_OCEAN_BMATRIX}"
+mkdir -p "${COMOUT_ICE_BMATRIX}"
+
+###############################################################
+# Run relevant script
+
+EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRgfs}/exglobal_marinebmat.py}
+${EXSCRIPT}
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
+fi
+
+exit 0
diff --git a/jobs/JGLOBAL_OCEANICE_PRODUCTS b/jobs/JGLOBAL_OCEANICE_PRODUCTS
new file mode 100755
index 0000000000..4303e26c47
--- /dev/null
+++ b/jobs/JGLOBAL_OCEANICE_PRODUCTS
@@ -0,0 +1,40 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products"
+
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+# Construct COM variables from templates
+YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx "COM_${COMPONENT^^}_HISTORY"
+YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx "COM_${COMPONENT^^}_GRIB"
+YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx "COM_${COMPONENT^^}_NETCDF"
+
+###############################################################
+# Run exglobal script
+"${SCRgfs}/exglobal_oceanice_products.py"
+status=$?
+(( status != 0 )) && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]]; then
+  cat "${pgmout}"
+fi
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA:-NO}" == "NO" ]] && rm -rf "${DATA}"
+
+
+exit 0
diff --git a/jobs/JGLOBAL_PREP_EMISSIONS b/jobs/JGLOBAL_PREP_EMISSIONS
new file mode 100755
index 0000000000..84edac8e50
--- /dev/null
+++ b/jobs/JGLOBAL_PREP_EMISSIONS
@@ -0,0 +1,35 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "prep_emissions" -c "base prep_emissions"
+
+##############################################
+# Set variables used in the script
+##############################################
+# TODO: Set local variables used in this script e.g. GDATE may be needed for previous cycle
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+# Generate COM variables from templates
+# TODO: Add necessary COMIN, COMOUT variables for this job
+
+###############################################################
+# Run relevant script
+EXSCRIPT=${PREP_EMISSIONS_PY:-${SCRgfs}/exglobal_prep_emissions.py}
+${EXSCRIPT}
+status=$?
+(( status != 0 )) && ( echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}" )
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]] ; then
+  cat "${pgmout}"
+fi
+
+exit 0
diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGLOBAL_PREP_OBS_AERO
similarity index 62%
rename from jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT
rename to jobs/JGLOBAL_PREP_OBS_AERO
index 4a8242abfb..7fe701898f 100755
--- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT
+++ b/jobs/JGLOBAL_PREP_OBS_AERO
@@ -1,34 +1,28 @@
-#!/bin/bash
-source "${HOMEgfs}/ush/preamble.sh"
-export WIPE_DATA="NO"
-
-export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun"
+#! /usr/bin/env bash
 
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "prepobsaero" -c "base prepobsaero"
 
 ##############################################
 # Set variables used in the script
 ##############################################
 
+export COMIN_OBS="${DATA}"
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT_OBS:COM_OBS_TMPL
 
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS
-
-mkdir -p "${COM_OCEAN_ANALYSIS}"
-
-export COMOUT=${COM_OCEAN_ANALYSIS}
-
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASOCNBMATSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh}
+EXSCRIPT=${GDASPREPAEROOBSPY:-${SCRgfs}/exglobal_prep_obs_aero.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
+
 ##############################################
 # End JOB SPECIFIC work
 ##############################################
@@ -41,8 +35,9 @@ if [[ -e "${pgmout}" ]] ; then
 fi
 
 ##########################################
-# Do not remove the Temporary working directory (do this in POST)
+# Remove the Temporary working directory
 ##########################################
-cd "${DATAROOT}" || exit 1
+cd "${DATAROOT}" || exit
+[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}"
 
 exit 0
diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS
index a100aca89c..339d90567e 100755
--- a/jobs/JGLOBAL_PREP_OCEAN_OBS
+++ b/jobs/JGLOBAL_PREP_OCEAN_OBS
@@ -8,19 +8,16 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "prepoceanobs" -c "base prepoceanobs"
 ##############################################
 
 export COMIN_OBS="${DATA}"
-YMD=${PDY} HH=${cyc} generate_com -rx COMOUT_OBS:COM_OBS_TMPL
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT_OBS:COM_OBS_TMPL
 
 ##############################################
 # Begin JOB SPECIFIC work
 ##############################################
 
-# Add prep_marine_obs.py to PYTHONPATH
-export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH}
-
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${GDASPREPOCNOBSPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py}
+EXSCRIPT=${GDASPREPOCNOBSPY:-${SCRgfs}/exglobal_prep_ocean_obs.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
@@ -38,7 +35,7 @@ if [[ -e "${pgmout}" ]] ; then
 fi
 
 ##########################################
-# Handle the temporary working directory 
+# Handle the temporary working directory
 ##########################################
 cd "${DATAROOT}" || (echo "FATAL ERROR: ${DATAROOT} does not exist. ABORT!"; exit 1)
 [[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}"
diff --git a/jobs/JGLOBAL_PREP_LAND_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS
similarity index 78%
rename from jobs/JGLOBAL_PREP_LAND_OBS
rename to jobs/JGLOBAL_PREP_SNOW_OBS
index 025adae529..f5ea3fc122 100755
--- a/jobs/JGLOBAL_PREP_LAND_OBS
+++ b/jobs/JGLOBAL_PREP_SNOW_OBS
@@ -1,7 +1,8 @@
 #! /usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "preplandobs" -c "base preplandobs"
+export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "prepsnowobs" -c "base prepsnowobs"
 
 ##############################################
 # Set variables used in the script
@@ -17,14 +18,14 @@ GDUMP="gdas"
 # Begin JOB SPECIFIC work
 ##############################################
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
 ###############################################################
 # Run relevant script
-EXSCRIPT=${GDASLANDPREPSH:-${HOMEgfs}/scripts/exglobal_prep_land_obs.py}
+EXSCRIPT=${GDASSNOWPREPPY:-${SCRgfs}/exglobal_prep_snow_obs.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}")
diff --git a/jobs/JGLOBAL_LAND_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS
similarity index 73%
rename from jobs/JGLOBAL_LAND_ANALYSIS
rename to jobs/JGLOBAL_SNOW_ANALYSIS
index 3ff7e72a35..b7d8c37060 100755
--- a/jobs/JGLOBAL_LAND_ANALYSIS
+++ b/jobs/JGLOBAL_SNOW_ANALYSIS
@@ -1,7 +1,8 @@
 #! /usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "landanl" -c "base landanl"
+export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}}
+source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl"
 
 ##############################################
 # Set variables used in the script
@@ -17,17 +18,17 @@ GDUMP="gdas"
 # Begin JOB SPECIFIC work
 ##############################################
 # Generate COM variables from templates
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_LAND_ANALYSIS COM_CONF
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_SNOW_ANALYSIS COM_CONF
 
-RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
 
-mkdir -m 775 -p "${COM_LAND_ANALYSIS}" "${COM_CONF}"
+mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}"
 
 ###############################################################
 # Run relevant script
 
-EXSCRIPT=${LANDANLPY:-${HOMEgfs}/scripts/exglobal_land_analysis.py}
+EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_analysis.py}
 ${EXSCRIPT}
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC
index 4c94990fde..52225ac9d3 100755
--- a/jobs/JGLOBAL_STAGE_IC
+++ b/jobs/JGLOBAL_STAGE_IC
@@ -5,12 +5,13 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic"
 
 # Restart conditions for GFS cycle come from GDAS
 # shellcheck disable=SC2153
-rCDUMP=${CDUMP}
-[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas"
+rCDUMP=${RUN}
+# shellcheck disable=SC2153
+[[ ${RUN} = "gfs" ]] && export rCDUMP="gdas"
 export rCDUMP
 
 # Execute the Script
-"${HOMEgfs}/scripts/exglobal_stage_ic.sh"
+"${SCRgfs}/exglobal_stage_ic.sh"
 
 ##########################################
 # Remove the Temporary working directory
diff --git a/jobs/JGLOBAL_WAVE_GEMPAK b/jobs/JGLOBAL_WAVE_GEMPAK
index 89c389fa11..9822e4d416 100755
--- a/jobs/JGLOBAL_WAVE_GEMPAK
+++ b/jobs/JGLOBAL_WAVE_GEMPAK
@@ -13,13 +13,13 @@ export DBN_ALERT_TYPE=GFS_WAVE_GEMPAK
 export SENDDBN=${SENDDBN:-YES}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_GEMPAK
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_GRID COM_WAVE_GEMPAK
 
 if [[ ! -d ${COM_WAVE_GEMPAK} ]]; then mkdir -p "${COM_WAVE_GEMPAK}"; fi
 
 ########################################################
 # Execute the script.
-${HOMEgfs}/scripts/exgfs_wave_nawips.sh
+${SCRgfs}/exgfs_wave_nawips.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 ###################################
diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT
index 7ad742f25a..3a0a8b43a8 100755
--- a/jobs/JGLOBAL_WAVE_INIT
+++ b/jobs/JGLOBAL_WAVE_INIT
@@ -9,23 +9,18 @@ export errchk=${errchk:-err_chk}
 
 export MP_PULSE=0
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL
 
-mkdir -m 775 -p ${COM_WAVE_PREP}
+if [[ ! -d "${COMOUT_WAVE_PREP}" ]]; then mkdir -p "${COMOUT_WAVE_PREP}"; fi
 
 # Set mpi serial command
 export wavempexec=${wavempexec:-"mpirun -n"}
 export wave_mpmd=${wave_mpmd:-"cfp"}
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_init.sh
+${SCRgfs}/exgfs_wave_init.sh
 
 ##########################################
 # Remove the Temporary working directory
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT
index 9d404077fd..808ba7d9f3 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNT
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT
@@ -8,16 +8,13 @@ export errchk=${errchk:-err_chk}
 
 export MP_PULSE=0
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+   COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+   COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
 
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
 
 # Set wave model ID tag to include member number
 # if ensemble; waveMEMB var empty in deterministic
@@ -34,7 +31,7 @@ export DOBLL_WAV='NO' # Bulletin post
 export DOBNDPNT_WAV='YES'  # Do boundary points
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh
+${SCRgfs}/exgfs_wave_post_pnt.sh
 err=$?
 if [ ${err} -ne 0 ]; then
   echo "FATAL ERROR: ex-script of GWES_POST failed!"
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
index 3de49fcc3b..c85b1cb5f3 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
@@ -12,16 +12,13 @@ export CDATE=${PDY}${cyc}
 
 export MP_PULSE=0
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+   COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+   COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
 
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
 
 # Set wave model ID tag to include member number
 # if ensemble; waveMEMB var empty in deterministic
@@ -38,7 +35,7 @@ export DOBLL_WAV='YES' # Bulletin post
 export DOBNDPNT_WAV='YES'  #boundary points
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh
+${SCRgfs}/exgfs_wave_post_pnt.sh
 err=$?
 if [ ${err} -ne 0 ]; then
   echo "FATAL ERROR: ex-script of GFS_WAVE_POST_PNT failed!"
diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT
index 1b573435a3..769159be61 100755
--- a/jobs/JGLOBAL_WAVE_POST_PNT
+++ b/jobs/JGLOBAL_WAVE_POST_PNT
@@ -8,16 +8,13 @@ export errchk=${errchk:-err_chk}
 
 export MP_PULSE=0
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+   COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+   COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+   COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
 
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
 
 # Set wave model ID tag to include member number
 # if ensemble; waveMEMB var empty in deterministic
@@ -35,7 +32,7 @@ export DOBNDPNT_WAV='NO'  #not boundary points
 
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh
+${SCRgfs}/exgfs_wave_post_pnt.sh
 err=$?
 if [ ${err} -ne 0 ]; then
   echo "FATAL ERROR: ex-script of GWES_POST failed!"
diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS
index 231b793de7..53ac4b2083 100755
--- a/jobs/JGLOBAL_WAVE_POST_SBS
+++ b/jobs/JGLOBAL_WAVE_POST_SBS
@@ -8,16 +8,16 @@ export errchk=${errchk:-err_chk}
 
 export MP_PULSE=0
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_GRID
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+    COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+    COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+    COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL \
+    COMOUT_WAVE_GRID:COM_WAVE_GRID_TMPL
 
-mkdir -p "${COM_WAVE_GRID}"
+for out_dir in "${COMOUT_WAVE_PREP}" "${COMOUT_WAVE_GRID}"; do
+    if [[ ! -d "${out_dir}" ]]; then mkdir -p "${out_dir}"; fi
+done
 
 
 # Set wave model ID tag to include member number
@@ -32,7 +32,7 @@ export WAV_MOD_TAG=${RUN}wave${waveMEMB}
 export CFP_VERBOSE=1
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_post_gridded_sbs.sh
+${SCRgfs}/exgfs_wave_post_gridded_sbs.sh
 err=$?
 if [ ${err} -ne 0 ]; then
   echo "FATAL ERROR: ex-script of GWES_POST failed!"
diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
index 3a2947af56..ebecf716af 100755
--- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
+++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
@@ -13,14 +13,16 @@ export SENDDBN_NTC=${SENDDBN_NTC:-YES}
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_STATION COM_WAVE_WMO
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+    COMIN_WAVE_STATION:COM_WAVE_STATION_TMPL \
+    COMOUT_WAVE_WMO:COM_WAVE_WMO_TMPL
 
-if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi
+if [[ ! -d ${COMOUT_WAVE_WMO} ]]; then mkdir -p "${COMOUT_WAVE_WMO}"; fi
 
 ###################################
 # Execute the Script
 
-${HOMEgfs}/scripts/exgfs_wave_prdgen_bulls.sh
+${SCRgfs}/exgfs_wave_prdgen_bulls.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
index 4b32c709bf..208b36c535 100755
--- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
+++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
@@ -13,16 +13,17 @@ export SENDDBN_NTC=${SENDDBN_NTC:-YES}
 export SENDDBN=${SENDDBN:-NO}
 export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
 
-YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_WMO
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+    COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL \
+    COMOUT_WAVE_WMO:COM_WAVE_WMO_TMPL
 
-if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi
+if [[ ! -d ${COMOUT_WAVE_WMO} ]]; then mkdir -p "${COMOUT_WAVE_WMO}"; fi
 
-mkdir -p "${COM_WAVE_WMO}"
 
 ###################################
 # Execute the Script
 ###################################
-${HOMEgfs}/scripts/exgfs_wave_prdgen_gridded.sh
+${SCRgfs}/exgfs_wave_prdgen_gridded.sh
 status=$?
 [[ ${status} -ne 0 ]] && exit ${status}
 
diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP
index f246045f53..5d4e76dc8a 100755
--- a/jobs/JGLOBAL_WAVE_PREP
+++ b/jobs/JGLOBAL_WAVE_PREP
@@ -6,8 +6,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "waveprep" -c "base wave waveprep"
 # Add default errchk = err_chk
 export errchk=${errchk:-err_chk}
 
-export CDUMP=${RUN/enkf}
-
 # Set rtofs PDY
 export RPDY=${PDY}
 
@@ -16,19 +14,16 @@ export MP_PULSE=0
 # CDO required for processing RTOFS currents
 export CDO=${CDO_ROOT}/bin/cdo
 
-# Path to HOME Directory
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
-export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave}
-export USHwave=${USHwave:-${HOMEgfs}/ush}
-export EXECwave=${EXECwave:-${HOMEgfs}/exec}
-
 # Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_WAVE_PREP
-generate_com -rx COM_RTOFS
-[[ ! -d ${COM_WAVE_PREP} ]] && mkdir -m 775 -p "${COM_WAVE_PREP}"
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+	COMIN_OBS:COM_OBS_TMPL \
+	COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+	COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL \
+	COMIN_RTOFS:COM_RTOFS_TMPL
+if [[ ! -d ${COMOUT_WAVE_PREP} ]]; then mkdir -p "${COMOUT_WAVE_PREP}"; fi
 
 # Execute the Script
-${HOMEgfs}/scripts/exgfs_wave_prep.sh
+${SCRgfs}/exgfs_wave_prep.sh
 
 ##########################################
 # Remove the Temporary working directory
diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh
index 16bb6887fd..39dea71810 100755
--- a/jobs/rocoto/aeroanlfinal.sh
+++ b/jobs/rocoto/aeroanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
 export job="aeroanlfinal"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh
index 9aaf255782..7a1cf885c1 100755
--- a/jobs/rocoto/aeroanlinit.sh
+++ b/jobs/rocoto/aeroanlinit.sh
@@ -11,12 +11,6 @@ status=$?
 export job="aeroanlinit"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh
index bcd86e3fbf..529bb2d7d1 100755
--- a/jobs/rocoto/aeroanlrun.sh
+++ b/jobs/rocoto/aeroanlrun.sh
@@ -11,12 +11,6 @@ status=$?
 export job="aeroanlrun"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN"
diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh
index d949b7d76f..083e319bf5 100755
--- a/jobs/rocoto/arch.sh
+++ b/jobs/rocoto/arch.sh
@@ -8,6 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
+###############################################################
+# setup python path for workflow utilities and tasks
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python"
+export PYTHONPATH
+
 export job="arch"
 export jobid="${job}.$$"
 
diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh
index 3d3c3ba9e6..a12894ed1e 100755
--- a/jobs/rocoto/atmanlfinal.sh
+++ b/jobs/rocoto/atmanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
 export job="atmanlfinal"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/atmanlfv3inc.sh b/jobs/rocoto/atmanlfv3inc.sh
new file mode 100755
index 0000000000..5261c15f09
--- /dev/null
+++ b/jobs/rocoto/atmanlfv3inc.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="atmanlfv3inc"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh
index 13c7d8710b..5329200590 100755
--- a/jobs/rocoto/atmanlinit.sh
+++ b/jobs/rocoto/atmanlinit.sh
@@ -11,12 +11,6 @@ status=$?
 export job="atmanlinit"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/atmanlvar.sh b/jobs/rocoto/atmanlvar.sh
new file mode 100755
index 0000000000..7df7f59dd1
--- /dev/null
+++ b/jobs/rocoto/atmanlvar.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="atmanlvar"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh
index 5ffaa92754..fc29bdd9af 100755
--- a/jobs/rocoto/atmensanlfinal.sh
+++ b/jobs/rocoto/atmensanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
 export job="atmensanlfinal"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/atmensanlfv3inc.sh b/jobs/rocoto/atmensanlfv3inc.sh
new file mode 100755
index 0000000000..7f57e8d618
--- /dev/null
+++ b/jobs/rocoto/atmensanlfv3inc.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="atmensanlfv3inc"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh
index 2c2204548a..1cd8129df6 100755
--- a/jobs/rocoto/atmensanlinit.sh
+++ b/jobs/rocoto/atmensanlinit.sh
@@ -11,12 +11,6 @@ status=$?
 export job="atmensanlinit"
 export jobid="${job}.$$"
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/atmensanlletkf.sh b/jobs/rocoto/atmensanlletkf.sh
new file mode 100755
index 0000000000..0ca86bfb43
--- /dev/null
+++ b/jobs/rocoto/atmensanlletkf.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="atmensanlletkf"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/atmos_ensstat.sh b/jobs/rocoto/atmos_ensstat.sh
new file mode 100755
index 0000000000..76ed7f0a72
--- /dev/null
+++ b/jobs/rocoto/atmos_ensstat.sh
@@ -0,0 +1,25 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+## atmosphere products driver script
+## FHRLST : forecast hour list to post-process (e.g. -f001, f000, f000_f001_f002, ...)
+###############################################################
+
+# Source FV3GFS workflow modules
+. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+status=$?
+if (( status != 0 )); then exit "${status}"; fi
+
+export job="atmos_ensstat"
+export jobid="${job}.$$"
+
+export FORECAST_HOUR=$(( 10#${FHR3} ))
+
+###############################################################
+# Execute the JJOB
+###############################################################
+"${HOMEgfs}/jobs/JGLOBAL_ATMOS_ENSSTAT"
+
+exit $?
diff --git a/jobs/rocoto/atmos_products.sh b/jobs/rocoto/atmos_products.sh
index 472f202de8..f6adbcf861 100755
--- a/jobs/rocoto/atmos_products.sh
+++ b/jobs/rocoto/atmos_products.sh
@@ -15,21 +15,13 @@ if (( status != 0 )); then exit "${status}"; fi
 export job="atmos_products"
 export jobid="${job}.$$"
 
-###############################################################
-# shellcheck disable=SC2153,SC2001
-IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array
+# Negatation needs to be before the base
+fhr3_base="10#${FHR3}"
+export FORECAST_HOUR=$(( ${fhr3_base/10#-/-10#} ))
 
-#---------------------------------------------------------------
+###############################################################
 # Execute the JJOB
-for fhr in "${fhrs[@]}"; do
-    # The analysis fhr is -001.  Performing math on negative, leading 0 integers is tricky.
-    # The negative needs to be in front of "10#", so do some regex magic to make it happen.
-    fhr="10#${fhr}"
-    fhr=${fhr//10\#-/-10\#}
-    export FORECAST_HOUR=$(( fhr ))
-    "${HOMEgfs}/jobs/JGLOBAL_ATMOS_PRODUCTS"
-    status=$?
-    if (( status != 0 )); then exit "${status}"; fi
-done
+###############################################################
+"${HOMEgfs}/jobs/JGLOBAL_ATMOS_PRODUCTS"
 
-exit 0
+exit $?
diff --git a/jobs/rocoto/awips_20km_1p0deg.sh b/jobs/rocoto/awips_20km_1p0deg.sh
index e1bf623883..af08b46111 100755
--- a/jobs/rocoto/awips_20km_1p0deg.sh
+++ b/jobs/rocoto/awips_20km_1p0deg.sh
@@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh"
 ## HOMEgfs   : /full/path/to/workflow
 ## EXPDIR : /full/path/to/config/files
 ## CDATE  : current analysis date (YYYYMMDDHH)
-## CDUMP  : cycle name (gdas / gfs)
+## RUN    : cycle name (gdas / gfs)
 ## PDY    : current date (YYYYMMDD)
 ## cyc    : current cycle (HH)
 ###############################################################
@@ -45,7 +45,7 @@ for fhr3 in ${fhrlst}; do
     if (( fhr >= fhmin && fhr <= fhmax )); then
         if ((fhr % 3 == 0)); then
             export fcsthrs="${fhr3}"
-            "${AWIPS20KM1P0DEGSH}"
+            "${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG"
         fi
     fi
 
@@ -54,7 +54,7 @@ for fhr3 in ${fhrlst}; do
     if (( fhr >= fhmin && fhr <= fhmax )); then
         if ((fhr % 6 == 0)); then
             export fcsthrs="${fhr3}"
-            "${AWIPS20KM1P0DEGSH}"
+            "${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG"
         fi
     fi
 done
diff --git a/jobs/rocoto/awips_g2.sh b/jobs/rocoto/awips_g2.sh
deleted file mode 100755
index 121c96d63f..0000000000
--- a/jobs/rocoto/awips_g2.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-## Abstract:
-## Inline awips driver script
-## HOMEgfs   : /full/path/to/workflow
-## EXPDIR : /full/path/to/config/files
-## CDATE  : current analysis date (YYYYMMDDHH)
-## CDUMP  : cycle name (gdas / gfs)
-## PDY    : current date (YYYYMMDD)
-## cyc    : current cycle (HH)
-###############################################################
-
-###############################################################
-# Source FV3GFS workflow modules
-source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
-status=$?
-(( status != 0 )) && exit "${status}"
-
-export job="awips_g2"
-export jobid="${job}.$$"
-
-source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips"
-
-# shellcheck disable=SC2153
-fhrlst=$(echo "${FHRLST}" | sed -e "s/_/ /g; s/f/ /g; s/,/ /g")
-
-###############################################################
-
-################################################################################
-echo
-echo "=============== BEGIN AWIPS ==============="
-
-for fhr3 in ${fhrlst}; do
-    fhr=$(( 10#${fhr3} ))
-    if (( fhr > FHMAX_GFS )); then
-        echo "Nothing to process for FHR = ${fhr3}, cycle"
-        continue
-    fi
-
-    fhmin=0
-    fhmax=240
-    if (( fhr >= fhmin && fhr <= fhmax )); then
-        if ((fhr % 6 == 0)); then
-            "${AWIPSG2SH}"
-        fi
-    fi
-done
-
-
-###############################################################
-# Force Exit out cleanly
-if [[ ${KEEPDATA:-"NO"} == "NO" ]] ; then rm -rf "${DATA}" ; fi
-
-exit 0
diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh
index c4c7341698..4a9263b509 100755
--- a/jobs/rocoto/earc.sh
+++ b/jobs/rocoto/earc.sh
@@ -8,6 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
+###############################################################
+# setup python path for workflow utilities and tasks
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python"
+export PYTHONPATH
+
 export job="earc"
 export jobid="${job}.$$"
 
@@ -16,5 +21,4 @@ export jobid="${job}.$$"
 "${HOMEgfs}/jobs/JGDAS_ENKF_ARCHIVE"
 status=$?
 
-
 exit "${status}"
diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh
deleted file mode 100755
index c5667cb970..0000000000
--- a/jobs/rocoto/efcs.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-# Source FV3GFS workflow modules
-# TODO clean this up once ncdiag/1.1.2 is installed on WCOSS2
-source "${HOMEgfs}/ush/detect_machine.sh"
-if [[ "${MACHINE_ID}" == "wcoss2" ]]; then
-   . ${HOMEgfs}/ush/load_ufswm_modules.sh
-else
-   . ${HOMEgfs}/ush/load_fv3gfs_modules.sh
-fi
-status=$?
-[[ ${status} -ne 0 ]] && exit ${status}
-
-export job="efcs"
-export jobid="${job}.$$"
-
-###############################################################
-# Execute the JJOB
-"${HOMEgfs}/jobs/JGDAS_ENKF_FCST"
-status=$?
-
-exit ${status}
diff --git a/jobs/rocoto/extractvars.sh b/jobs/rocoto/extractvars.sh
new file mode 100755
index 0000000000..a872431358
--- /dev/null
+++ b/jobs/rocoto/extractvars.sh
@@ -0,0 +1,23 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+echo
+echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ==============="
+. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+status=$?
+[[ "${status}" -ne 0 ]] && exit "${status}"
+
+export job="extractvars"
+export jobid="${job}.$$"
+
+###############################################################
+echo
+echo "=============== START TO RUN EXTRACTVARS ==============="
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_EXTRACTVARS"
+status=$?
+[[ "${status}" -ne 0 ]] && exit "${status}"
+
+exit 0
diff --git a/jobs/rocoto/gempak.sh b/jobs/rocoto/gempak.sh
index 82ea1175d8..f5aea2379d 100755
--- a/jobs/rocoto/gempak.sh
+++ b/jobs/rocoto/gempak.sh
@@ -1,15 +1,14 @@
 #! /usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
 status=$?
 if (( status != 0 )); then exit "${status}"; fi
 
 export job="gempak"
 export jobid="${job}.$$"
 
+
 # Execute the JJOB
 "${HOMEgfs}/jobs/J${RUN^^}_ATMOS_GEMPAK"
 
diff --git a/jobs/rocoto/gempakpgrb2spec.sh b/jobs/rocoto/gempakgrb2spec.sh
similarity index 71%
rename from jobs/rocoto/gempakpgrb2spec.sh
rename to jobs/rocoto/gempakgrb2spec.sh
index f76c33ecdb..ddcb84599e 100755
--- a/jobs/rocoto/gempakpgrb2spec.sh
+++ b/jobs/rocoto/gempakgrb2spec.sh
@@ -1,15 +1,14 @@
 #! /usr/bin/env bash
 
 source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
 status=$?
 if (( status != 0 )); then exit "${status}"; fi
 
 export job="gempakpgrb2spec"
 export jobid="${job}.$$"
 
+
 # Execute the JJOB
 "${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC"
 
diff --git a/jobs/rocoto/landanl.sh b/jobs/rocoto/marineanalletkf.sh
similarity index 66%
rename from jobs/rocoto/landanl.sh
rename to jobs/rocoto/marineanalletkf.sh
index f49b6f9f8b..f2bfb9f70c 100755
--- a/jobs/rocoto/landanl.sh
+++ b/jobs/rocoto/marineanalletkf.sh
@@ -8,17 +8,16 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
-export job="landanl"
+export job="marineanalletkf"
 export jobid="${job}.$$"
 
 ###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+# Setup Python path for GDASApp ush
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python"
 export PYTHONPATH
 
 ###############################################################
 # Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS"
+"${HOMEgfs}/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF"
 status=$?
 exit "${status}"
diff --git a/jobs/rocoto/ocnanalbmat.sh b/jobs/rocoto/marinebmat.sh
similarity index 79%
rename from jobs/rocoto/ocnanalbmat.sh
rename to jobs/rocoto/marinebmat.sh
index e62db9115a..9b72e5e12c 100755
--- a/jobs/rocoto/ocnanalbmat.sh
+++ b/jobs/rocoto/marinebmat.sh
@@ -8,12 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ "${status}" -ne 0 ]] && exit "${status}"
 
-export job="ocnanalbmat"
+export job="marinebmat"
 export jobid="${job}.$$"
 
 ###############################################################
 # Execute the JJOB
-"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT
-echo "BMAT gets run here"
+"${HOMEgfs}"/jobs/JGLOBAL_MARINE_BMAT
 status=$?
 exit "${status}"
diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh
new file mode 100755
index 0000000000..2a3b617d05
--- /dev/null
+++ b/jobs/rocoto/oceanice_products.sh
@@ -0,0 +1,25 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+## ocean ice products driver script
+## FHRLST : forecast hour list to post-process (e.g. f000, f000_f001_f002, ...)
+###############################################################
+
+# Source FV3GFS workflow modules
+. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+status=$?
+if (( status != 0 )); then exit "${status}"; fi
+
+export job="oceanice_products"
+export jobid="${job}.$$"
+
+export FORECAST_HOUR=$(( 10#${FHR3} ))
+
+###############################################################
+# Execute the JJOB
+###############################################################
+"${HOMEgfs}/jobs/JGLOBAL_OCEANICE_PRODUCTS"
+
+exit $?
diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/ocnanalecen.sh
similarity index 65%
rename from jobs/rocoto/atmanlrun.sh
rename to jobs/rocoto/ocnanalecen.sh
index 1b87cb4074..c5fdbbbf32 100755
--- a/jobs/rocoto/atmanlrun.sh
+++ b/jobs/rocoto/ocnanalecen.sh
@@ -8,17 +8,16 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
-export job="atmanlrun"
+export job="ocnanalecen"
 export jobid="${job}.$$"
 
 ###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+# Setup Python path for GDASApp ush
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/gdas.cd/ush"
 export PYTHONPATH
 
 ###############################################################
 # Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN"
+"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN
 status=$?
 exit "${status}"
diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh
deleted file mode 100755
index 5a2dc091cf..0000000000
--- a/jobs/rocoto/ocnpost.sh
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-## CICE5/MOM6 post driver script
-## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...)
-## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...)
-###############################################################
-
-# Source FV3GFS workflow modules
-source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
-status=$?
-(( status != 0 )) && exit "${status}"
-
-export job="ocnpost"
-export jobid="${job}.$$"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost"
-
-##############################################
-# Set variables used in the exglobal script
-##############################################
-export CDUMP=${RUN/enkf}
-
-##############################################
-# Begin JOB SPECIFIC work
-##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3D \
-  COM_OCEAN_XSECT COM_ICE_HISTORY
-
-for grid in "0p50" "0p25"; do
-  YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
-done
-
-for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do
-  if [[ ! -d "${!outdir}" ]]; then
-    mkdir -p "${!outdir}"
-  fi
-done
-
-fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g')
-
-export OMP_NUM_THREADS=1
-export ENSMEM=${ENSMEM:-000}
-
-export IDATE=${PDY}${cyc}
-
-for fhr in ${fhrlst}; do
-  export fhr=${fhr}
-  # Ignore possible spelling error (nothing is misspelled)
-  # shellcheck disable=SC2153
-  VDATE=$(${NDATE} "${fhr}" "${IDATE}")
-  # shellcheck disable=
-  declare -x VDATE
-  cd "${DATA}" || exit 2
-  if (( 10#${fhr} > 0 )); then
-    # TODO: This portion calls NCL scripts that are deprecated (see Issue #923)
-    if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then
-      export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
-      "${MOM6REGRID}/scripts/run_regrid.sh"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-
-      # Convert the netcdf files to grib2
-      export executable=${MOM6REGRID}/exec/reg2grb2.x
-      "${MOM6REGRID}/scripts/run_reg2grb2.sh"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-      ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" "${COM_OCEAN_GRIB_0p25}/"
-      ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" "${COM_OCEAN_GRIB_0p50}/"
-    fi
-
-    #break up ocn netcdf into multiple files:
-    if [[ -f "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
-      echo "File ${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
-    else
-      ncks -x -v vo,uo,so,temp \
-        "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
-        "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-    fi
-    if [[ -f "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
-       echo "File ${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
-    else
-      ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v \
-        "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
-        "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-    fi
-    if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
-      echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
-    else
-      ncks -v temp -d yh,0.0 \
-        "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
-        "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-    fi
-    if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
-      echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
-    else
-      ncks -v uo -d yh,0.0 \
-        "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
-        "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
-      status=$?
-      [[ ${status} -ne 0 ]] && exit "${status}"
-    fi
-  fi
-done
-
-# clean up working folder
-if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi
-###############################################################
-# Exit out cleanly
-
-
-exit 0
diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh
index dfb541abb6..bbde68377d 100755
--- a/jobs/rocoto/prep.sh
+++ b/jobs/rocoto/prep.sh
@@ -13,7 +13,8 @@ export job="prep"
 export jobid="${job}.$$"
 source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep"
 
-export CDUMP="${RUN/enkf}"
+# Strip 'enkf' from RUN for pulling data
+RUN_local="${RUN/enkf}"
 
 ###############################################################
 # Set script and dependency variables
@@ -25,11 +26,11 @@ gPDY=${GDATE:0:8}
 gcyc=${GDATE:8:2}
 GDUMP="gdas"
 
-export OPREFIX="${CDUMP}.t${cyc}z."
+export OPREFIX="${RUN_local}.t${cyc}z."
 
-YMD=${PDY} HH=${cyc} DUMP=${CDUMP} generate_com -rx COM_OBS COM_OBSDMP
+YMD=${PDY} HH=${cyc} DUMP=${RUN_local} declare_from_tmpl -rx COM_OBS COM_OBSDMP
 
-RUN=${GDUMP} DUMP=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+RUN=${GDUMP} DUMP=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
     COM_OBS_PREV:COM_OBS_TMPL \
     COM_OBSDMP_PREV:COM_OBSDMP_TMPL
 
@@ -39,7 +40,7 @@ if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi
 ###############################################################
 # If ROTDIR_DUMP=YES, copy dump files to rotdir
 if [[ ${ROTDIR_DUMP} = "YES" ]]; then
-   "${HOMEgfs}/ush/getdump.sh" "${PDY}${cyc}" "${CDUMP}" "${COM_OBSDMP}" "${COM_OBS}"
+   "${HOMEgfs}/ush/getdump.sh" "${PDY}${cyc}" "${RUN_local}" "${COM_OBSDMP}" "${COM_OBS}"
    status=$?
    [[ ${status} -ne 0 ]] && exit ${status}
 
@@ -73,14 +74,14 @@ if [[ ${PROCESS_TROPCY} = "YES" ]]; then
         done
     fi
 
-    if [[ ${ROTDIR_DUMP} = "YES" ]]; then rm "${COM_OBS}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00"; fi
+    if [[ ${ROTDIR_DUMP} = "YES" ]]; then rm "${COM_OBS}/${RUN_local}.t${cyc}z.syndata.tcvitals.tm00"; fi
 
     "${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC"
     status=$?
     [[ ${status} -ne 0 ]] && exit ${status}
 
 else
-    if [[ ${ROTDIR_DUMP} = "NO" ]]; then cp "${COM_OBSDMP}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00" "${COM_OBS}/"; fi
+    if [[ ${ROTDIR_DUMP} = "NO" ]]; then cp "${COM_OBSDMP}/${RUN_local}.t${cyc}z.syndata.tcvitals.tm00" "${COM_OBS}/"; fi
 fi
 
 
@@ -93,17 +94,17 @@ if [[ ${MAKE_PREPBUFR} = "YES" ]]; then
         rm -f "${COM_OBS}/${OPREFIX}nsstbufr"
     fi
 
-    export job="j${CDUMP}_prep_${cyc}"
+    export job="j${RUN_local}_prep_${cyc}"
     export COMIN=${COM_OBS}
     export COMOUT=${COM_OBS}
-    RUN="gdas" YMD=${PDY} HH=${cyc} generate_com -rx COMINgdas:COM_ATMOS_HISTORY_TMPL
-    RUN="gfs" YMD=${PDY} HH=${cyc} generate_com -rx COMINgfs:COM_ATMOS_HISTORY_TMPL
+    RUN="gdas" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMINgdas:COM_ATMOS_HISTORY_TMPL
+    RUN="gfs" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMINgfs:COM_ATMOS_HISTORY_TMPL
     if [[ ${ROTDIR_DUMP} = "NO" ]]; then
-        export COMSP=${COMSP:-"${COM_OBSDMP}/${CDUMP}.t${cyc}z."}
+        export COMSP=${COMSP:-"${COM_OBSDMP}/${RUN_local}.t${cyc}z."}
     else
-        export COMSP=${COMSP:-"${COM_OBS}/${CDUMP}.t${cyc}z."}
+        export COMSP=${COMSP:-"${COM_OBS}/${RUN_local}.t${cyc}z."}
     fi
-    export COMSP=${COMSP:-${COMIN_OBS}/${CDUMP}.t${cyc}z.}
+    export COMSP=${COMSP:-${COMIN_OBS}/${RUN_local}.t${cyc}z.}
 
     # Disable creating NSSTBUFR if desired, copy from DMPDIR instead
     if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then
diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/prep_emissions.sh
similarity index 61%
rename from jobs/rocoto/atmensanlrun.sh
rename to jobs/rocoto/prep_emissions.sh
index d991e3eb82..0677073947 100755
--- a/jobs/rocoto/atmensanlrun.sh
+++ b/jobs/rocoto/prep_emissions.sh
@@ -4,21 +4,20 @@ source "${HOMEgfs}/ush/preamble.sh"
 
 ###############################################################
 # Source UFSDA workflow modules
-. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
 status=$?
-[[ ${status} -ne 0 ]] && exit "${status}"
+(( status !=  0 )) && exit "${status}"
 
-export job="atmensanlrun"
+export job="prep_emissions"
 export jobid="${job}.$$"
 
 ###############################################################
 # setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python"
 export PYTHONPATH
 
 ###############################################################
 # Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN"
+"${HOMEgfs}/jobs/JGLOBAL_PREP_EMISSIONS"
 status=$?
 exit "${status}"
diff --git a/jobs/rocoto/prepatmiodaobs.sh b/jobs/rocoto/prepatmiodaobs.sh
index d424df9261..26629a514f 100755
--- a/jobs/rocoto/prepatmiodaobs.sh
+++ b/jobs/rocoto/prepatmiodaobs.sh
@@ -12,10 +12,10 @@ export job="prepatmobs"
 export jobid="${job}.$$"
 
 ###############################################################
-# setup python path for workflow and ioda utilities
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYIODALIB="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7"
-PYTHONPATH="${PYIODALIB}:${wxflowPATH}:${PYTHONPATH}"
+# setup python path for ioda utilities
+# shellcheck disable=SC2311
+pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/"
+PYTHONPATH="${pyiodaPATH}:${PYTHONPATH}"
 export PYTHONPATH
 
 ###############################################################
diff --git a/jobs/rocoto/prepobsaero.sh b/jobs/rocoto/prepobsaero.sh
new file mode 100755
index 0000000000..5d65ff8a02
--- /dev/null
+++ b/jobs/rocoto/prepobsaero.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="prepobsaero"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_PREP_OBS_AERO"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/prepoceanobs.sh b/jobs/rocoto/prepoceanobs.sh
index d7ae87851f..d8626f5518 100755
--- a/jobs/rocoto/prepoceanobs.sh
+++ b/jobs/rocoto/prepoceanobs.sh
@@ -12,6 +12,11 @@ status=$?
 export job="prepoceanobs"
 export jobid="${job}.$$"
 
+###############################################################
+# setup python path for class defs and utils
+
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush"
+
 ###############################################################
 # Execute the JJOB
 "${HOMEgfs}"/jobs/JGLOBAL_PREP_OCEAN_OBS
diff --git a/jobs/rocoto/preplandobs.sh b/jobs/rocoto/prepsnowobs.sh
similarity index 57%
rename from jobs/rocoto/preplandobs.sh
rename to jobs/rocoto/prepsnowobs.sh
index 6304dd611b..3f23bc16a5 100755
--- a/jobs/rocoto/preplandobs.sh
+++ b/jobs/rocoto/prepsnowobs.sh
@@ -8,18 +8,19 @@ source "${HOMEgfs}/ush/preamble.sh"
 status=$?
 [[ ${status} -ne 0 ]] && exit "${status}"
 
-export job="preplandobs"
+export job="prepsnowobs"
 export jobid="${job}.$$"
 
 ###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}"
+# setup python path for ioda utilities
+# shellcheck disable=SC2311
+pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/"
+gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}"
 export PYTHONPATH
 
 ###############################################################
 # Execute the JJOB
-"${HOMEgfs}/jobs/JGLOBAL_PREP_LAND_OBS"
+"${HOMEgfs}/jobs/JGLOBAL_PREP_SNOW_OBS"
 status=$?
 exit "${status}"
diff --git a/jobs/rocoto/snowanl.sh b/jobs/rocoto/snowanl.sh
new file mode 100755
index 0000000000..97df7a46c7
--- /dev/null
+++ b/jobs/rocoto/snowanl.sh
@@ -0,0 +1,18 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+# Source UFSDA workflow modules
+. "${HOMEgfs}/ush/load_ufsda_modules.sh"
+status=$?
+[[ ${status} -ne 0 ]] && exit "${status}"
+
+export job="snowanl"
+export jobid="${job}.$$"
+
+###############################################################
+# Execute the JJOB
+"${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS"
+status=$?
+exit "${status}"
diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh
index 18d5c12cea..c3f128ab02 100755
--- a/jobs/rocoto/upp.sh
+++ b/jobs/rocoto/upp.sh
@@ -29,31 +29,26 @@ if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
   module load python/3.8.6
   module load crtm/2.4.0  # TODO: This is only needed when UPP_RUN=goes.  Is there a better way to handle this?
   set_trace
+
+  # Add wxflow to PYTHONPATH
+  wxflowPATH="${HOMEgfs}/ush/python"
+  PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+  export PYTHONPATH
+
 else
   . "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
   status=$?
   if (( status != 0 )); then exit "${status}"; fi
 fi
 
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
 export job="upp"
 export jobid="${job}.$$"
 
-###############################################################
-# shellcheck disable=SC2153,SC2001
-IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's convert to array
+export FORECAST_HOUR=$(( 10#${FHR3} ))
 
+###############################################################
 # Execute the JJOB
-for fhr in "${fhrs[@]}"; do
-    export FORECAST_HOUR=$(( 10#${fhr} ))
-    "${HOMEgfs}/jobs/JGLOBAL_ATMOS_UPP"
-    status=$?
-    if (( status != 0 )); then exit "${status}"; fi
-done
-
-exit 0
+###############################################################
+"${HOMEgfs}/jobs/JGLOBAL_ATMOS_UPP"
+
+exit $?
diff --git a/modulefiles/module-setup.csh.inc b/modulefiles/module-setup.csh.inc
deleted file mode 100644
index 7086326627..0000000000
--- a/modulefiles/module-setup.csh.inc
+++ /dev/null
@@ -1,87 +0,0 @@
-set __ms_shell=csh
-
-eval "if ( -d / ) set __ms_shell=tcsh"
-
-if ( { test -d /lfs/f1 } ) then
-    # We are on NOAA Cactus or Dogwood
-    if ( ! { module help >& /dev/null } ) then
-        source /usr/share/lmod/lmod/init/$__ms_shell
-    fi
-    module reset
-else if ( { test -d /lfs3 } ) then
-    if ( ! { module help >& /dev/null } ) then
-            source /apps/lmod/lmod/init/$__ms_shell
-    endif
-    module purge
-else if ( { test -d /scratch1 } ) then
-    # We are on NOAA Hera
-    if ( ! { module help >& /dev/null } ) then
-        source /apps/lmod/lmod/init/$__ms_shell
-    endif
-    module purge
-elif [[ -d /work ]] ; then
-    # We are on MSU Orion or Hercules
-    if [[ -d /apps/other ]] ; then
-        # Hercules
-        init_path="/apps/other/lmod/lmod/init/$__ms_shell"
-    else
-        # Orion
-        init_path="/apps/lmod/lmod/init/$__ms_shell"
-    fi
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source "${init_path}"
-    fi
-    module purge
-else if ( { test -d /data/prod } ) then
-    # We are on SSEC S4
-    if ( ! { module help >& /dev/null } ) then
-        source /usr/share/lmod/lmod/init/$__ms_shell
-    endif
-    source /etc/profile
-    module purge
-else if ( { test -d /glade } ) then
-    # We are on NCAR Yellowstone
-    if ( ! { module help >& /dev/null } ) then
-        source /usr/share/Modules/init/$__ms_shell
-    endif
-    module purge
-else if ( { test -d /lustre -a -d /ncrc } ) then
-    # We are on GAEA.
-    if ( ! { module help >& /dev/null } ) then
-        # We cannot simply load the module command.  The GAEA
-        # /etc/csh.login modifies a number of module-related variables
-        # before loading the module command.  Without those variables,
-        # the module command fails.  Hence we actually have to source
-        # /etc/csh.login here.
-        source /etc/csh.login
-        set __ms_source_etc_csh_login=yes
-    else
-        set __ms_source_etc_csh_login=no
-    endif
-    module purge
-    unsetenv _LMFILES_
-    unsetenv _LMFILES_000
-    unsetenv _LMFILES_001
-    unsetenv LOADEDMODULES
-    module load modules
-    if ( { test -d /opt/cray/ari/modulefiles } ) then
-        module use -a /opt/cray/ari/modulefiles
-    endif
-    if ( { test -d /opt/cray/pe/ari/modulefiles } ) then
-        module use -a /opt/cray/pe/ari/modulefiles
-    endif
-    if ( { test -d /opt/cray/pe/craype/default/modulefiles } ) then
-        module use -a /opt/cray/pe/craype/default/modulefiles
-    endif
-    setenv NCEPLIBS /lustre/f1/pdata/ncep_shared/NCEPLIBS/lib
-    if ( { test -d /lustre/f1/pdata/ncep_shared/NCEPLIBS/lib } ) then
-      module use $NCEPLIBS/modulefiles
-    endif
-    if ( "$__ms_source_etc_csh_login" == yes ) then
-      source /etc/csh.login
-      unset __ms_source_etc_csh_login
-    endif
-else
-    # Workaround for csh limitation.  Use sh to print to stderr.
-    sh -c 'echo WARNING: UNKNOWN PLATFORM 1>&2'
-endif
diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc
deleted file mode 100644
index db9dabffe1..0000000000
--- a/modulefiles/module-setup.sh.inc
+++ /dev/null
@@ -1,110 +0,0 @@
-# Create a test function for sh vs. bash detection.  The name is
-# randomly generated to reduce the chances of name collision.
-__ms_function_name="setup__test_function__$$"
-eval "$__ms_function_name() { /bin/true ; }"
-
-# Determine which shell we are using
-__ms_ksh_test=$( eval '__text="text" ; if [[ $__text =~ ^(t).* ]] ; then printf "%s" ${.sh.match[1]} ; fi' 2> /dev/null | cat )
-__ms_bash_test=$( eval 'if ( set | grep '$__ms_function_name' | grep -v name > /dev/null 2>&1 ) ; then echo t ; fi ' 2> /dev/null | cat )
-
-if [[ ! -z "$__ms_ksh_test" ]] ; then
-    __ms_shell=ksh
-elif [[ ! -z "$__ms_bash_test" ]] ; then
-    __ms_shell=bash
-else
-    # Not bash or ksh, so assume sh.
-    __ms_shell=sh
-fi
-
-if [[ -d /lfs/f1 ]] ; then
-    # We are on NOAA Cactus or Dogwood
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source /usr/share/lmod/lmod/init/$__ms_shell
-    fi
-    module reset
-elif [[ -d /mnt/lfs1 ]] ; then
-    # We are on NOAA Jet
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source /apps/lmod/lmod/init/$__ms_shell
-    fi
-    module purge
-elif [[ -d /scratch1 ]] ; then
-    # We are on NOAA Hera
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source /apps/lmod/lmod/init/$__ms_shell
-    fi
-    module purge
-elif [[ -d /work ]] ; then
-    # We are on MSU Orion or Hercules
-    if [[ -d /apps/other ]] ; then
-        # Hercules
-        init_path="/apps/other/lmod/lmod/init/$__ms_shell"
-    else
-        # Orion
-        init_path="/apps/lmod/lmod/init/$__ms_shell"
-    fi
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source "${init_path}"
-    fi
-    module purge
-elif [[ -d /glade ]] ; then
-    # We are on NCAR Yellowstone
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        . /usr/share/Modules/init/$__ms_shell
-    fi
-    module purge
-elif [[ -d /lustre && -d /ncrc ]] ; then
-    # We are on GAEA.
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        # We cannot simply load the module command.  The GAEA
-        # /etc/profile modifies a number of module-related variables
-        # before loading the module command.  Without those variables,
-        # the module command fails.  Hence we actually have to source
-        # /etc/profile here.
-        source /etc/profile
-        __ms_source_etc_profile=yes
-    else
-        __ms_source_etc_profile=no
-    fi
-    module purge
-    # clean up after purge
-    unset _LMFILES_
-    unset _LMFILES_000
-    unset _LMFILES_001
-    unset LOADEDMODULES
-    module load modules
-    if [[ -d /opt/cray/ari/modulefiles ]] ; then
-        module use -a /opt/cray/ari/modulefiles
-    fi
-    if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then
-        module use -a /opt/cray/pe/ari/modulefiles
-    fi
-    if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then
-        module use -a /opt/cray/pe/craype/default/modulefiles
-    fi
-    if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then
-        source /etc/opt/cray/pe/admin-pe/site-config
-    fi
-    export NCEPLIBS=/lustre/f1/pdata/ncep_shared/NCEPLIBS/lib
-    if [[ -d "$NCEPLIBS" ]] ; then
-        module use $NCEPLIBS/modulefiles
-    fi
-    if [[ "$__ms_source_etc_profile" == yes ]] ; then
-      source /etc/profile
-      unset __ms_source_etc_profile
-    fi
-elif [[ -d /data/prod ]] ; then
-    # We are on SSEC's S4
-    if ( ! eval module help > /dev/null 2>&1 ) ; then
-        source /usr/share/lmod/lmod/init/$__ms_shell
-    fi
-    module purge
-else
-    echo WARNING: UNKNOWN PLATFORM 1>&2
-fi
-
-unset __ms_shell
-unset __ms_ksh_test
-unset __ms_bash_test
-unset $__ms_function_name
-unset __ms_function_name
diff --git a/modulefiles/module_base.gaea.lua b/modulefiles/module_base.gaea.lua
new file mode 100644
index 0000000000..55ad6b0c34
--- /dev/null
+++ b/modulefiles/module_base.gaea.lua
@@ -0,0 +1,39 @@
+help([[
+Load environment to run GFS on Gaea
+]])
+
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
+
+load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
+load(pathJoin("stack-cray-mpich", (os.getenv("stack_cray_mpich_ver") or "None")))
+load(pathJoin("python", (os.getenv("python_ver") or "None")))
+
+load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
+load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
+load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
+load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None")))
+load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None")))
+load(pathJoin("netcdf-fortran", (os.getenv("netcdf_fortran_ver") or "None")))
+
+load(pathJoin("nco", (os.getenv("nco_ver") or "None")))
+load(pathJoin("prod_util", (os.getenv("prod_util_ver") or "None")))
+load(pathJoin("grib-util", (os.getenv("grib_util_ver") or "None")))
+load(pathJoin("g2tmpl", (os.getenv("g2tmpl_ver") or "None")))
+load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
+load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
+load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
+load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
+load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
+load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
+
+setenv("WGRIB2","wgrib2")
+setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
+
+whatis("Description: GFS run setup environment")
diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua
index bf970bbaee..701cc4667d 100644
--- a/modulefiles/module_base.hera.lua
+++ b/modulefiles/module_base.hera.lua
@@ -16,6 +16,7 @@ load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
 load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
 load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
 load(pathJoin("R", (os.getenv("R_ver") or "None")))
+load(pathJoin("perl", (os.getenv("perl_ver") or "None")))
 
 load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None")))
 load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None")))
@@ -29,19 +30,24 @@ load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
 load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
 load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
 load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
-
--- MET/METplus are not available for use with spack-stack, yet
---load(pathJoin("met", (os.getenv("met_ver") or "None")))
---load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
 
 setenv("WGRIB2","wgrib2")
+
+-- Stop gap fix for wgrib with spack-stack 1.6.0
+-- TODO Remove this when spack-stack issue #1097 is resolved
+setenv("WGRIB","wgrib")
 setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
 
---prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
-prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/dev-gfsv17/modulefiles"))
+prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
 load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
 
 prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
diff --git a/modulefiles/module_base.hercules.lua b/modulefiles/module_base.hercules.lua
index d9c8f5ed0b..fdc5f58698 100644
--- a/modulefiles/module_base.hercules.lua
+++ b/modulefiles/module_base.hercules.lua
@@ -2,20 +2,14 @@ help([[
 Load environment to run GFS on Hercules
 ]])
 
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
 
 load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
 load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
 load(pathJoin("intel-oneapi-mkl", (os.getenv("intel_mkl_ver") or "None")))
 load(pathJoin("python", (os.getenv("python_ver") or "None")))
-load(pathJoin("perl", (os.getenv("perl_ver") or "None")))
 
--- TODO load NCL once the SAs remove the 'depends_on' statements within it
---      NCL is a static installation and does not depend on any libraries
---      but as is will load, among others, the system netcdf-c/4.9.0 module
---load(pathJoin("ncl", (os.getenv("ncl_ver") or "None")))
 load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
 load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
 load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
@@ -32,17 +26,29 @@ load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
 load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
 load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
 load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
 
 setenv("WGRIB2","wgrib2")
+
+-- Stop gap fix for wgrib with spack-stack 1.6.0
+-- TODO Remove this when spack-stack issue #1097 is resolved
+setenv("WGRIB","wgrib")
 setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
 
-prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
+--prepend_path("MODULEPATH", pathJoin"/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")
+prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
 load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
 
-prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
+--prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
+prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
 load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None")))
 
 whatis("Description: GFS run environment")
diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua
index 64d35da57a..76320688b0 100644
--- a/modulefiles/module_base.jet.lua
+++ b/modulefiles/module_base.jet.lua
@@ -2,9 +2,8 @@ help([[
 Load environment to run GFS on Jet
 ]])
 
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
 
 load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
 load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -30,15 +29,27 @@ load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
 load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
 load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
 load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
+
+-- Adding perl as a module; With Rocky8, perl packages will not be from the OS
+load(pathJoin("perl", (os.getenv("perl_ver") or "None")))
 
 setenv("WGRIB2","wgrib2")
+
+-- Stop gap fix for wgrib with spack-stack 1.6.0
+-- TODO Remove this when spack-stack issue #1097 is resolved
+setenv("WGRIB","wgrib")
 setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
 
---prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
-prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
+prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
 load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
 
 prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua
index 65486855d0..5cee9e5e31 100644
--- a/modulefiles/module_base.orion.lua
+++ b/modulefiles/module_base.orion.lua
@@ -2,16 +2,13 @@ help([[
 Load environment to run GFS on Orion
 ]])
 
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
 
 load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
 load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
 load(pathJoin("python", (os.getenv("python_ver") or "None")))
 
-load(pathJoin("gempak", (os.getenv("gempak_ver") or "None")))
-load(pathJoin("ncl", (os.getenv("ncl_ver") or "None")))
 load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
 load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
 load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
@@ -28,22 +25,29 @@ load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
 load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
 load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
 load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
-
--- MET/METplus are not yet supported with spack-stack
---load(pathJoin("met", (os.getenv("met_ver") or "None")))
---load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
 
 setenv("WGRIB2","wgrib2")
+
+-- Stop gap fix for wgrib with spack-stack 1.6.0
+-- TODO Remove this when spack-stack issue #1097 is resolved
+setenv("WGRIB","wgrib")
 setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
 
---prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
-prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
+--prepend_path("MODULEPATH", pathJoin"/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")
+prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
 load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
 
-prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
+--prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
+prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
 load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None")))
 
 whatis("Description: GFS run environment")
diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua
index d99a93c3f4..835249fb85 100644
--- a/modulefiles/module_base.s4.lua
+++ b/modulefiles/module_base.s4.lua
@@ -2,9 +2,8 @@ help([[
 Load environment to run GFS on S4
 ]])
 
-spack_stack_ver=(os.getenv("spack_stack_ver") or "None")
-spack_env=(os.getenv("spack_env") or "None")
-prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core")
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
 
 load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
 load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
@@ -27,11 +26,21 @@ load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
 load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
 load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
 load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
 
 setenv("WGRIB2","wgrib2")
+
+-- Stop gap fix for wgrib with spack-stack 1.6.0
+-- TODO Remove this when spack-stack issue #1097 is resolved
+setenv("WGRIB","wgrib")
 setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
 
 --prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua
index ee4ee6a5fb..49d5abc678 100644
--- a/modulefiles/module_base.wcoss2.lua
+++ b/modulefiles/module_base.wcoss2.lua
@@ -31,8 +31,12 @@ load(pathJoin("ncdiag", (os.getenv("ncdiag_ver") or "None")))
 load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
 load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
 
---prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
-prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
+prepend_path("MODULEPATH", "/apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304")
+setenv("HPC_OPT", "/apps/ops/para/libs")
+load(pathJoin("met", (os.getenv("met_ver") or "None")))
+load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+
+prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
 load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
 
 prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
diff --git a/modulefiles/module_gwci.hercules.lua b/modulefiles/module_gwci.hercules.lua
index 9c60aed467..179bbef114 100644
--- a/modulefiles/module_gwci.hercules.lua
+++ b/modulefiles/module_gwci.hercules.lua
@@ -2,7 +2,7 @@ help([[
 Load environment to run GFS workflow ci scripts on Hercules
 ]])
 
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
 
 load(pathJoin("stack-intel", os.getenv("2021.9.0")))
 load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.9.0")))
diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua
index 18851ba7d4..e2213932d7 100644
--- a/modulefiles/module_gwci.orion.lua
+++ b/modulefiles/module_gwci.orion.lua
@@ -2,13 +2,13 @@ help([[
 Load environment to run GFS workflow ci scripts on Orion
 ]])
 
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env-rocky9/install/modulefiles/Core")
 
-load(pathJoin("stack-intel", os.getenv("2022.0.2")))
-load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1")))
+load(pathJoin("stack-intel", os.getenv("2021.9.0")))
+load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.9.0")))
 
 load(pathJoin("netcdf-c", os.getenv("4.9.2")))
-load(pathJoin("netcdf-fortran", os.getenv("4.6.0")))
+load(pathJoin("netcdf-fortran", os.getenv("4.6.1")))
 load(pathJoin("nccmp","1.9.0.1"))
 load(pathJoin("wgrib2", "2.0.8"))
 
diff --git a/modulefiles/module_gwci.wcoss2.lua b/modulefiles/module_gwci.wcoss2.lua
new file mode 100644
index 0000000000..50eaeefee0
--- /dev/null
+++ b/modulefiles/module_gwci.wcoss2.lua
@@ -0,0 +1,8 @@
+help([[
+Load environment to run GFS workflow ci scripts on WCOSS2
+]])
+
+prepend_path("MODULEPATH", "/apps/ops/para/nco/modulefiles/core")
+load(pathJoin("gh","2.28.0"))
+
+whatis("Description: GFS run setup ci environment")
diff --git a/modulefiles/module_gwsetup.gaea.lua b/modulefiles/module_gwsetup.gaea.lua
new file mode 100644
index 0000000000..8b9f70e4a0
--- /dev/null
+++ b/modulefiles/module_gwsetup.gaea.lua
@@ -0,0 +1,19 @@
+help([[
+Load environment to run GFS workflow setup scripts on Gaea
+]])
+
+prepend_path("MODULEPATH", "/ncrc/proj/epic/rocoto/modulefiles")
+load(pathJoin("rocoto"))
+
+prepend_path("MODULEPATH", "/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+
+local stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0"
+local python_ver=os.getenv("python_ver") or "3.10.13"
+
+load(pathJoin("stack-intel", stack_intel_ver))
+load(pathJoin("python", python_ver))
+load("py-jinja2")
+load("py-pyyaml")
+load("py-numpy")
+
+whatis("Description: GFS run setup environment")
diff --git a/modulefiles/module_gwsetup.hercules.lua b/modulefiles/module_gwsetup.hercules.lua
index 673928605c..9d845fb71d 100644
--- a/modulefiles/module_gwsetup.hercules.lua
+++ b/modulefiles/module_gwsetup.hercules.lua
@@ -3,12 +3,12 @@ Load environment to run GFS workflow ci scripts on Hercules
 ]])
 
 load(pathJoin("contrib","0.1"))
-load(pathJoin("rocoto","1.3.5"))
+load(pathJoin("rocoto","1.3.7"))
 
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
 
 local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
 
 load(pathJoin("stack-intel", stack_intel_ver))
 load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.jet.lua b/modulefiles/module_gwsetup.jet.lua
index d08389c711..bc14b19a79 100644
--- a/modulefiles/module_gwsetup.jet.lua
+++ b/modulefiles/module_gwsetup.jet.lua
@@ -2,12 +2,12 @@ help([[
 Load environment to run GFS workflow setup scripts on Jet
 ]])
 
-load(pathJoin("rocoto", "1.3.3"))
+load(pathJoin("rocoto"))
 
-prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev-rocky8/install/modulefiles/Core")
 
 local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
 
 load(pathJoin("stack-intel", stack_intel_ver))
 load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua
index 93a59c8e50..b8e2fc8a9f 100644
--- a/modulefiles/module_gwsetup.orion.lua
+++ b/modulefiles/module_gwsetup.orion.lua
@@ -4,13 +4,12 @@ Load environment to run GFS workflow ci scripts on Orion
 
 prepend_path("MODULEPATH", "/apps/modulefiles/core")
 load(pathJoin("contrib","0.1"))
-load(pathJoin("rocoto","1.3.3"))
-load(pathJoin("git","2.28.0"))
+load(pathJoin("rocoto","1.3.7"))
 
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env-rocky9/install/modulefiles/Core")
 
-local stack_intel_ver=os.getenv("stack_intel_ver") or "2022.0.2"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0"
+local python_ver=os.getenv("python_ver") or "3.11.6"
 
 load(pathJoin("stack-intel", stack_intel_ver))
 load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.s4.lua b/modulefiles/module_gwsetup.s4.lua
index 291c654bb3..77a647006f 100644
--- a/modulefiles/module_gwsetup.s4.lua
+++ b/modulefiles/module_gwsetup.s4.lua
@@ -5,10 +5,10 @@ Load environment to run GFS workflow setup scripts on S4
 load(pathJoin("rocoto","1.3.5"))
 load(pathJoin("git","2.30.0"))
 
-prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core")
 
 local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
-local python_ver=os.getenv("python_ver") or "3.10.8"
+local python_ver=os.getenv("python_ver") or "3.11.6"
 
 load(pathJoin("stack-intel", stack_intel_ver))
 load(pathJoin("python", python_ver))
diff --git a/modulefiles/module_gwsetup.wcoss2.lua b/modulefiles/module_gwsetup.wcoss2.lua
index d4e64548b0..a2440569db 100644
--- a/modulefiles/module_gwsetup.wcoss2.lua
+++ b/modulefiles/module_gwsetup.wcoss2.lua
@@ -2,8 +2,6 @@ help([[
 Load environment to run GFS workflow ci scripts on WCOSS2
 ]])
 
-load(pathJoin("git","2.29.0"))
-
 prepend_path("MODULEPATH", "/apps/ops/test/nco/modulefiles/core")
 load(pathJoin("rocoto","1.3.5"))
 
diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/arcdir.yaml.j2
new file mode 100644
index 0000000000..57dbc78885
--- /dev/null
+++ b/parm/archive/arcdir.yaml.j2
@@ -0,0 +1,156 @@
+{% set cycle_HH = current_cycle | strftime("%H") %}
+{% set cycle_YMDH = current_cycle | to_YMDH %}
+{% set cycle_YMD = current_cycle | to_YMD %}
+{% set head = RUN + ".t" + cycle_HH + "z." %}
+
+# Select data to store in the ARCDIR and VFYARC from deterministic runs
+# This file set will contain all source-destination pairs to send to the FileHandler for copying
+{% set file_set = [] %}
+
+# Declare the VFYARC where Fit2Obs data will be sent
+{% set VFYARC = ROTDIR ~ "/vrfyarch" %}
+
+# Deterministic files
+{% if "enkf" not in RUN %}
+    # Common files to be added to both the gfs and gdas keys below
+    {% set det_files = [] %}
+    # Cyclone forecasts, produced for both gdas and gfs cycles
+    ## Only created if tracking is on and there were systems to track
+    {% if path_exists(COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %}
+        {% do det_files.append([COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH,
+                                ARCDIR ~"/atcfunix." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+        {% do det_files.append([COMIN_ATMOS_TRACK ~ "/atcfunixp." ~ RUN ~ "." ~ cycle_YMDH,
+                                ARCDIR ~ "/atcfunixp." ~ RUN ~ "." ~ cycle_YMDH]) %}
+    {% endif %}
+
+    # Cyclone tracking data
+    {% for basin in ["epac", "natl"] %}
+        {% if path_exists(COMIN_ATMOS_TRACK + "/" + basin) %}
+            {% do det_files.append([COMIN_ATMOS_TRACK ~ "/" ~ basin,
+                                    ARCDIR ~ "/" ~ basin ]) %}
+        {% endif %}
+    {% endfor %}
+
+    # Deterministic analysis files (generated for cycled experiments)
+    {% set det_anl_files = [] %}
+    # Analysis data (if we are running in cycled mode)
+        {% do det_anl_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.anl",
+                                    ARCDIR ~ "/pgbanl." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %}
+
+    {% if DO_JEDIATMVAR == True %}
+        {% do det_anl_files.append([COMIN_ATMOS_ANALYSIS ~ "/" ~ head ~ "atmstat",
+                                    ARCDIR ~ "/atmstat." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+    {% else %}
+        {% do det_anl_files.append([COMIN_ATMOS_ANALYSIS ~ "/" ~ head ~ "gsistat",
+                                    ARCDIR ~ "/gsistat." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+    {% endif %}
+
+    {% if DO_JEDISNOWDA == True %}
+        {% do det_anl_files.append([COMIN_SNOW_ANALYSIS ~ "/" ~ head ~ "snowstat.tgz",
+                                    ARCDIR ~ "/snowstat." ~ RUN ~ "." ~ cycle_YMDH ~ ".tgz"]) %}
+    {% endif %}
+
+    {% if AERO_ANL_RUN == RUN or AERO_ANL_RUN == "both" %}
+        {% do det_anl_files.append([COMIN_CHEM_ANALYSIS ~ "/" ~ head ~ "aerostat",
+                                    ARCDIR ~ "/aerostat." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+    {% endif %}
+
+    {% if DO_PREP_OBS_AERO == True %}
+        {% do det_anl_files.append([COMIN_OBS ~ "/" ~ head ~ "aeroobs",
+                                    ARCDIR ~ "/aeroobs." ~ RUN ~ "." ~ cycle_YMDH]) %}
+        {% do det_anl_files.append([COMIN_OBS ~ "/" ~ head ~ "aeroawobs",
+                                    ARCDIR ~ "/aeroawobs." ~ RUN ~ "." ~ cycle_YMDH]) %}
+    {% endif %}
+
+    # GFS-specific files
+    {% set gfs_files = [] %}
+    {% for fhr in range(0, FHMAX_GFS + 1, FHOUT_GFS) %}
+        {% do gfs_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.f" ~ '%03d'|format(fhr),
+                                ARCDIR ~ "/pgbf" ~ '%02d'|format(fhr) ~ "." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %}
+    {% endfor %}
+
+    # Cyclone genesis data (only present if there are storms)
+    {% if path_exists(COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %}
+        {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH,
+                                ARCDIR ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH ]) %}
+        {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen.altg." ~ cycle_YMDH,
+                                ARCDIR ~ "/storms.gfso.atcf_gen.altg." ~ cycle_YMDH ]) %}
+    {% endif %}
+
+    {% if path_exists(COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %}
+        {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH,
+                                ARCDIR ~ "/trak.gfso.atcfunix." ~ cycle_YMDH ]) %}
+        {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix.altg." ~ cycle_YMDH,
+                                ARCDIR ~ "/trak.gfso.atcfunix.altg." ~ cycle_YMDH ]) %}
+    {% endif %}
+
+    # GFS Fit2Obs data
+    {% set fit2obs_files = [] %}
+    {% for fhr in range(0, FHMAX_FITS + 1, 6) %}
+        {% set sfcfile = "/" + head + "sfcf" + '%03d'|format(fhr) + ".nc" %}
+        {% set sigfile = "/" + head + "atmf" + '%03d'|format(fhr) + ".nc" %}
+        {% do fit2obs_files.append([COMIN_ATMOS_HISTORY ~ "/" ~ sfcfile,
+                                    VFYARC ~ "/" ~ RUN ~ "." ~ cycle_YMD ~ "/" ~ cycle_HH ~ "/" ~ sfcfile ]) %}
+        {% do fit2obs_files.append([COMIN_ATMOS_HISTORY ~ "/" ~ sigfile,
+                                    VFYARC ~ "/" ~ RUN ~ "." ~ cycle_YMD ~ "/" ~ cycle_HH ~ "/" ~ sigfile ]) %}
+    {% endfor %}
+
+    # GDAS-specific files
+    {% set gdas_files = [] %}
+    {% for fhr in range(0, FHMAX + 1, FHOUT) %}
+        {% do gdas_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.f" ~ '%03d'|format(fhr),
+                                 ARCDIR ~ "/pgbf" ~ '%02d'|format(fhr) ~ "." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %}
+    {% endfor %}
+
+    # Now append the necessary file pairs to file_set
+    # Common deterministic files
+    {% set file_set = file_set + det_files %}
+    {% if MODE == "cycled" %}
+        {% set file_set = file_set + det_anl_files %}
+    {% endif %}
+
+    # Run-specific deterministic files
+    {% if RUN == "gfs" %}
+        {% set file_set = file_set + gfs_files %}
+        # Fit2Obs files
+        {% if DO_FIT2OBS == True %}
+            {% set file_set = file_set + fit2obs_files %}
+        {% endif %}
+    {% elif RUN == "gdas" %}
+        {% set file_set = file_set + gdas_files %}
+    {% endif %}
+
+{% else %}  # End of deterministic files
+
+    # Ensemble analysis files
+    {% set enkf_files = [] %}
+    {% if DO_JEDIATMENS == True %}
+        {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "atmensstat",
+                                 ARCDIR ~ "/atmensstat." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+        {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "atminc.ensmean.nc",
+                                 ARCDIR ~ "/atmensstat." ~ RUN ~ "." ~ cycle_YMDH ~ ".ensmean.nc"]) %}
+    {% else %}
+        {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "enkfstat",
+                                 ARCDIR ~ "/enkfstat." ~ RUN ~ "." ~ cycle_YMDH ]) %}
+        {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "gsistat.ensmean",
+                                 ARCDIR ~ "/gsistat." ~ RUN ~ "." ~ cycle_YMDH ~ ".ensmean"]) %}
+    {% endif %}
+
+    # Construct the final file set
+    {% set file_set = file_set + enkf_files %}
+
+{% endif %}
+
+
+# Actually write the yaml
+mkdir:
+    - "{{ ARCDIR }}"
+
+    {% if DO_FIT2OBS == True %}
+    - "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}"
+    {% endif %}
+
+copy:
+   {% for source_dest_pair in file_set %}
+   - {{ source_dest_pair }}
+   {% endfor %}
diff --git a/parm/archive/chem.yaml.j2 b/parm/archive/chem.yaml.j2
new file mode 100644
index 0000000000..33c6dcef57
--- /dev/null
+++ b/parm/archive/chem.yaml.j2
@@ -0,0 +1,7 @@
+chem:
+    {% set head = "gocart" %}
+    name: "CHEM"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/chem.tar"
+    required:
+        # TODO explicitize this set
+        - "{{ COMIN_CHEM_HISTORY | relpath(ROTDIR) }}/{{ head }}*"
diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2
new file mode 100644
index 0000000000..bc5ef03cb8
--- /dev/null
+++ b/parm/archive/enkf.yaml.j2
@@ -0,0 +1,82 @@
+enkf:
+    name: "ENKF"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}.tar"
+    required:
+        # Logs
+        {% for mem in range(1, nmem_ens + 1) %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst_mem{{ '%03d' % mem }}.log"
+        {% endfor %}
+        {% for fhr in range(fhmin, fhmax + 1, fhout) %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}epos{{ '%03d' % (fhr - fhmin) }}.log"
+        {% endfor %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}echgres.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}esfc.log"
+        {% for grp in range(iaufhrs | length) %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ecen{{ '%03d' % grp }}.log"
+        {% endfor %}
+
+        {% if DO_JEDIATMENS %}
+        {% set steps = ["atmensanlinit", "atmensanlletkf", "atmensanlfv3inc", "atmensanlfinal"] %}
+        {% else %}
+        {% set steps = ["eobs", "eupd"] %}
+        {% if lobsdiag_forenkf %}
+        {% do steps.append("ediag") %}
+        {% else %}
+        {% for mem in range(1, nmem_ens + 1) %}
+        {% do steps.append("eomg_mem{{ '%03d' % mem }}") %}
+        {% endfor %}
+        {% endif %}
+        {% endif %}
+
+        {% for step in steps %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}{{ step }}.log"
+        {% endfor %}
+
+        # Ensemble mean and spread
+        {% for fhr in range(3, fhmax + 1, 3) %}
+        - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensmean.nc"
+        - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.ensmean.nc"
+        {% if ENKF_SPREAD %}
+        - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensspread.nc"
+        {% endif %}
+        {% endfor %}
+
+        # Ensemble mean state
+        {% if not DO_JEDIATMENS %}
+        {% set da_files = ["enkfstat",
+                           "gsistat.ensmean",
+                           "cnvstat.ensmean",
+                           "oznstat.ensmean",
+                           "radstat.ensmean"] %}
+        {% else %}
+        {% set da_files = ["atmens.yaml",
+                           "atminc.ensmean.nc",
+                           "atmensstat"] %}
+        {% endif %}
+        {% for file in da_files %}
+        - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}{{ file }}"
+        {% endfor %}
+
+        # Ensemble mean analyses/increments
+        # 6-hr analysis/increment
+        {% if do_calc_increment %}
+        - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmanl.ensmean.nc"
+        {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atminc.ensmean.nc"
+        {% endif %}
+
+        {% if DOIAU %}
+        # IAU increments/analyses
+
+        {% for fhr in iaufhrs if fhr != 6 %}
+        {% if do_calc_increment %}
+        # Store analyses instead of increments
+        - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc"
+        {% else %}
+        # Store increments
+        - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmi{{ '%03d' % fhr }}.ensmean.nc"
+        {% endif %}
+        {% endfor %}
+
+        {% endif %}  # End IAU
+        # End of analysis mean increments/analyses
diff --git a/parm/archive/enkf_grp.yaml.j2 b/parm/archive/enkf_grp.yaml.j2
new file mode 100644
index 0000000000..933ca45caf
--- /dev/null
+++ b/parm/archive/enkf_grp.yaml.j2
@@ -0,0 +1,29 @@
+enkf_grp:
+    name: "ENKF_GRP"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_grp{{ ENSGRP }}.tar"
+    required:
+        {% for mem in range(first_group_mem, last_group_mem + 1) %}
+        {% set imem = mem - first_group_mem %}
+        # Construct member COM directories
+        {% set COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_MEM_list[imem] %}
+        {% set COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_MEM_list[imem] %}
+        {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %}
+
+        # Forecast data
+        {% for fhr in range(3, 10, 3) %}
+        - "{{ COMIN_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}atmf{{ "%03d" % fhr }}.nc"
+        {% endfor %}
+
+        # Only store the 6-hour surface forecast
+        - "{{ COMIN_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}sfcf006.nc"
+
+        # Store the individual member analysis data
+        {% if not lobsdiag_forenkf %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}gsistat"
+        {% endif %}
+        {% if do_calc_increment %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc"
+        {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc"
+        {% endif %}
+        {% endfor %}  # first_group_mem to last_group_mem
diff --git a/parm/archive/enkf_restarta_grp.yaml.j2 b/parm/archive/enkf_restarta_grp.yaml.j2
new file mode 100644
index 0000000000..41e03edc92
--- /dev/null
+++ b/parm/archive/enkf_restarta_grp.yaml.j2
@@ -0,0 +1,53 @@
+enkf_restarta_grp:
+    name: "ENKF_RESTARTA_GRP"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_restarta_grp{{ ENSGRP }}.tar"
+    required:
+        {% for mem in range(first_group_mem, last_group_mem + 1) %}
+        {% set imem = mem - first_group_mem %}
+        # Construct the pertinent member COM directories
+        {% set COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_MEM_list[imem] %}
+        {% set COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_MEM_list[imem] %}
+        {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %}
+
+        # Store bias data
+        {% if not lobsdiag_forenkf %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias"
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_air"
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_int"
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_pc"
+        {% endif %}
+
+        # Member surface analysis data
+        {% if DOIAU_ENKF %}
+        {% set anl_delta = "-3H" | to_timedelta %}
+        {% else %}
+        {% set anl_delta = "0H" | to_timedelta %}
+        {% endif %}
+        {% set anl_time = current_cycle | add_to_datetime(anl_delta) %}
+        {% for itile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc"
+        {% endfor %}
+
+        # Member atmospheric analysis data
+        {% if do_calc_increment %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc"
+        {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc"
+        {% endif %}
+
+        # Member increments
+        {% for iaufhr in iaufhrs if iaufhr != 6 %}
+        {% set iaufhr = iaufhr %}
+        {% if do_calc_increment %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc"
+        {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc"
+        {% endif %}
+        {% endfor %}  # iaufhr in iaufhrs
+
+        # Conventional data
+        {% if not lobsdiag_forenkf and not DO_JEDIATMENS %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}cnvstat"
+        {% endif %}
+
+        {% endfor %}  # first_group_mem to last_group_mem
diff --git a/parm/archive/enkf_restartb_grp.yaml.j2 b/parm/archive/enkf_restartb_grp.yaml.j2
new file mode 100644
index 0000000000..7cd799f0a9
--- /dev/null
+++ b/parm/archive/enkf_restartb_grp.yaml.j2
@@ -0,0 +1,37 @@
+enkf_restartb_grp:
+    name: "ENKF_RESTARTB_GRP"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_restartb_grp{{ ENSGRP }}.tar"
+    required:
+        {% for mem in range(first_group_mem, last_group_mem + 1) %}
+        {% set imem = mem - first_group_mem %}
+        {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %}
+
+        # Grab surface analysis data.
+        # If IAU is on, grab the beginning of the window, otherwise grab the center.
+        {% if DOIAU_ENKF %}
+        {% set offset_td = "-3H" | to_timedelta %}
+        {% else %}
+        {% set offset_td = "0H" | to_timedelta %}
+        {% endif %}
+        {% set offset_dt = current_cycle | add_to_datetime(offset_td) %}
+        {% set offset_YMD = offset_dt | to_YMD %}
+        {% set offset_HH = offset_dt | strftime("%H") %}
+        {% set prefix = offset_YMD + "." + offset_HH + "0000" %}
+        {% for itile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc"
+        {% endfor %}
+
+        # Now get the restart files.
+        {% for r_time in range(restart_interval, fhmax + 1, restart_interval) %}
+        {% set r_timedelta = (r_time | string + "H") | to_timedelta %}
+        {% set r_dt = current_cycle | add_to_datetime(r_timedelta) %}
+        {% set r_prefix = r_dt | to_YMD + "." + r_dt | strftime("%H") + "0000" %}
+        {% for itile in range(1, 7) %}
+        {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc"
+        {% endfor %}
+        {% endfor %}
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res"
+        - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc"
+        {% endfor %}
+        {% endfor %}
diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2
new file mode 100644
index 0000000000..ce5054a82f
--- /dev/null
+++ b/parm/archive/gdas.yaml.j2
@@ -0,0 +1,163 @@
+gdas:
+    {% set head = "gdas.t" + cycle_HH + "z." %}
+    name: "GDAS"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas.tar"
+    required:
+        {% if MODE == "cycled" %}
+        # Cycled logs
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}prep.log"
+            {% if DO_JEDIATMVAR %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}prepatmiodaobs.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlinit.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfinal.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfv3inc.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlvar.log"
+            {% else %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}anal.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}analdiag.log"
+            {% endif %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlupp.log"
+            {% if DO_JEDIOCNVAR %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}prepoceanobs.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalprep.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}marinebmat.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalrun.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalpost.log"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalchkpt.log"
+                {% if DOHYBVAR %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalecen.log"
+                {% endif %}
+            {% endif %}
+            {% if DO_VRFY_OCEANDA %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalvrfy.log"
+            {% endif %}
+
+        # Analysis GRIB2 (sub-sampled) data
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx"
+
+        # Analysis netCDF (raw) data
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc"
+
+            {% if DOHYBVAR %}
+        # Ensemble analysis residual
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc"
+                {% if DOIAU %}
+        # Ensemble IAU analysis residuals
+                    {% for fhr in iaufhrs if fhr != 6 %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc"
+                    {% endfor %}
+                {% endif %}
+        # End of ensemble analysis mean residuals
+            {% endif %}
+
+        # Analysis state
+            {% if DO_JEDIATMVAR %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat"
+            {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}oznstat"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat"
+            {% endif %}
+            {% if AERO_ANL_RUN == "gdas" or AERO_ANL_RUN == "both" %}
+        - "{{ COMIN_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat"
+            {% endif %}
+            {% if DO_PREP_OBS_AERO %}
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs"
+            {% endif %}
+            {% if DO_JEDISNOWDA %}
+        - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz"
+            {% endif %}
+
+        # Ozone verification
+            {% if DO_VERFOZN %}
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_cnt.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_diag.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_pen.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/stdout.time.tar.gz"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/horiz/stdout.horiz.tar.gz"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}verfozn.log"
+            {% endif %}
+
+        # Radiance verification
+            {% if DO_VERFRAD %}
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_angle.tar.gz"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcoef.tar.gz"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcor.tar.gz"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_time.tar.gz"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}verfrad.log"
+            {% endif %}
+
+        # Minimization monitor
+            {% if DO_VMINMON %}
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt"
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}vminmon.log"
+            {% endif %}
+        {% endif %}  # End of cycled data
+
+        # Forecast and post logs
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log"
+
+        {% for fhr in range(0, FHMAX + 1, 3) %}
+            {% set fhr3 = '%03d' % fhr %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_prod_f{{ fhr3 }}.log"
+            {% if not WRITE_DOPOST %}
+        - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_upp_f{{ fhr3 }}.log"
+            {% endif %}  ## not WRITE_DOPOST
+        # Forecast GRIB2 data
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ fhr3 }}"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ fhr3 }}.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ fhr3 }}"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ fhr3 }}.idx"
+        # Forecast GRIB2 fluxes
+        - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ fhr3 }}.grib2"
+        - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ fhr3 }}.grib2.idx"
+        # FV3 log
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ fhr3 }}.txt"
+        # Raw netCDF forecasts
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ fhr3 }}.nc"
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ fhr3 }}.nc"
+        {% endfor %}
+    optional:
+        {% if MODE == "cycled" %}
+            # Radiance verification (only created if there are problems)
+            {% if DO_VERFRAD %}
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_diag.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_pen.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/low_count.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/warning.{{ cycle_YMDH }}"
+            {% endif %}
+
+            {% if DO_VERFOZN %}
+                # Not all of these ozone instruments always produce data
+                {% set oznmon_types = [
+                   "gome_metop-b", "omi_aura", "ompslp_npp", "ompsnp_n20",
+                   "ompsnp_npp", "ompstc8_n20", "ompstc8_npp", "sbuv2_n19"
+                ] %}
+                {% for group in [ "horiz", "time" ] %}
+                    {% if group == "horiz" %}
+                        {% set suffix = ".gz" %}
+                    {% else %}
+                        {% set suffix = "" %}
+                    {% endif %}
+                    {% for type in oznmon_types %}
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.ctl"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.{{ cycle_YMDH }}.ieee_d{{ suffix }}"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.ctl"
+        - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.{{ cycle_YMDH }}.ieee_d{{ suffix }}"
+                    {% endfor %}
+                {% endfor %}
+            {% endif %}
+        {% endif %}
diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2
new file mode 100644
index 0000000000..4c0522fed7
--- /dev/null
+++ b/parm/archive/gdas_restarta.yaml.j2
@@ -0,0 +1,54 @@
+gdas_restarta:
+    {% set head = "gdas.t" + cycle_HH + "z." %}
+    name: "GDAS_RESTARTA"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas_restarta.tar"
+    required:
+        # Deterministic analysis increments
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc"
+        # IAU increments
+        {% for iaufhr in iaufhrs if iaufhr != 6 %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc"
+        {% endfor %}
+
+        # Surface analysis tiles
+        {% if DOHYBVAR and DOIAU %}
+        {% set anl_offset = "-3H" %}
+        {% else %}
+        {% set anl_offset = "0H" %}
+        {% endif %}
+        {% set anl_timedelta = anl_offset | to_timedelta %}
+        {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %}
+        {% for itile in range(1,7) %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc"
+        {% endfor %}
+
+        # Initial biases
+        {% if not DO_JEDIATMVAR %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_air"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_pc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_int"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt"
+        {% endif %}
+
+        # Snow surface data
+        {% if DO_JEDISNOWDA %}
+        {% for itile in range(1,7) %}
+        # Snow analysis is 3dvar
+        - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/snowinc.{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc"
+        - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc"
+        {% endfor %}
+        {% endif %}
+
+        # Snow configuration yaml
+        {% if DO_JEDISNOWDA %}
+        - "{{ COMIN_CONF | relpath(ROTDIR) }}/{{ head }}letkfoi.yaml"
+        {% endif %}
+
+        # Input BUFR files
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles"
diff --git a/parm/archive/gdas_restartb.yaml.j2 b/parm/archive/gdas_restartb.yaml.j2
new file mode 100644
index 0000000000..0bbf517fb2
--- /dev/null
+++ b/parm/archive/gdas_restartb.yaml.j2
@@ -0,0 +1,39 @@
+gdas_restartb:
+    {% set head = "gdas.t" + cycle_HH + "z." %}
+    name: "GDAS_RESTARTB"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas_restartb.tar"
+    required:
+        # Grab the surface analysis data.
+        # If IAU is on, grab the beginning of the window.
+        {% if DOIAU %}
+        {% set offset_td = "-3H" | to_timedelta %}
+        {% set offset_dt = current_cycle | add_to_datetime(offset_td) %}
+        {% set offset_YMD = offset_dt | to_YMD %}
+        {% set offset_HH = offset_dt | strftime("%H") %}
+        {% set prefix = offset_YMD + "." + offset_HH + "0000" %}
+        {% for itile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc"
+        {% endfor %}
+        {% endif %}
+
+        # Regardless, always grab the center surface analysis data.
+        {% set prefix = cycle_YMD + "." + cycle_HH + "0000" %}
+        {% for itile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc"
+        {% endfor %}
+
+        # Now get the restart files.
+        {% for r_time in range(restart_interval_gdas, FHMAX + 1, restart_interval_gdas) %}
+        {% set r_timedelta = (r_time | string + "H") | to_timedelta %}
+        {% set r_dt = current_cycle | add_to_datetime(r_timedelta) %}
+        {% set r_YMD = r_dt | to_YMD %}
+        {% set r_HH = r_dt | strftime("%H") %}
+        {% set r_prefix = r_YMD + "." + r_HH + "0000" %}
+        {% for itile in range(1, 7) %}
+        {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc"
+        {% endfor %}
+        {% endfor %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res"
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc"
+        {% endfor %}
diff --git a/parm/archive/gdasice.yaml.j2 b/parm/archive/gdasice.yaml.j2
new file mode 100644
index 0000000000..da02decf83
--- /dev/null
+++ b/parm/archive/gdasice.yaml.j2
@@ -0,0 +1,10 @@
+gdasice:
+    {% set head = "gdas.ice.t" + cycle_HH + "z." %}
+    name: "GDASICE"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice.tar"
+    required:
+        - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc"
+        {% for fhr in range(FHOUT_ICE, FHMAX+1, FHOUT_ICE) %}
+        - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc"
+        {% endfor %}
+        - '{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.ice_in'
diff --git a/parm/archive/gdasice_restart.yaml.j2 b/parm/archive/gdasice_restart.yaml.j2
new file mode 100644
index 0000000000..15c8ba627d
--- /dev/null
+++ b/parm/archive/gdasice_restart.yaml.j2
@@ -0,0 +1,7 @@
+gdasice_restart:
+    {% set head = "gdas.ice.t" + cycle_HH + "z." %}
+    name: "GDASICE_RESTART"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice_restart.tar"
+    required:
+        # TODO explicitly name the restart files to archive
+        - '{{ COMIN_ICE_RESTART | relpath(ROTDIR) }}/*'
diff --git a/parm/archive/gdasocean.yaml.j2 b/parm/archive/gdasocean.yaml.j2
new file mode 100644
index 0000000000..9e6ca38851
--- /dev/null
+++ b/parm/archive/gdasocean.yaml.j2
@@ -0,0 +1,9 @@
+gdasocean:
+    {% set head = "gdas.ocean.t" + cycle_HH + "z." %}
+    name: "GDASOCEAN"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean.tar"
+    required:
+        {% for fhr in range(FHMIN, FHMAX + 1, FHOUT_OCN) %}
+        - "{{ COMIN_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc"
+        {% endfor %}
+        - '{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.MOM_input'
diff --git a/parm/archive/gdasocean_analysis.yaml.j2 b/parm/archive/gdasocean_analysis.yaml.j2
new file mode 100644
index 0000000000..b7c057eacf
--- /dev/null
+++ b/parm/archive/gdasocean_analysis.yaml.j2
@@ -0,0 +1,32 @@
+gdasocean_analysis:
+    {% set head = "gdas.t" + cycle_HH + "z." %}
+    name: "GDASOCEAN_ANALYSIS"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_analysis.tar"
+    required:
+        # analysis and analysis increments
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocninc.nc'
+        {% for domain in ["ocn", "ice"] %}
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.incr.nc'
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}ana.nc'
+        {% endfor %}
+
+        # static background error
+        - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.bkgerr_stddev.nc'
+        - '{{ COMIN_ICE_BMATRIX | relpath(ROTDIR) }}/{{ head }}ice.bkgerr_stddev.nc'
+
+        # ensemble background error
+        {% if NMEM_ENS > 2 %}
+        - '{{ COMIN_ICE_BMATRIX | relpath(ROTDIR) }}/{{ head }}ice.ens_weights.nc'
+        - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.ens_weights.nc'
+        - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.recentering_error.nc'
+        {% for diag_type in ["ssh_steric_stddev", "ssh_unbal_stddev", "ssh_total_stddev", "steric_explained_variance"] %}
+        - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.{{ diag_type }}.nc'
+        {% endfor %}
+        {% endif %}
+
+        # obs space diags
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.*.stats.csv'
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/diags/*.nc4'
+
+        # runtime configs
+        - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/yaml/*.yaml'
diff --git a/parm/archive/gdasocean_restart.yaml.j2 b/parm/archive/gdasocean_restart.yaml.j2
new file mode 100644
index 0000000000..f2b6bfb875
--- /dev/null
+++ b/parm/archive/gdasocean_restart.yaml.j2
@@ -0,0 +1,8 @@
+gdasocean_restart:
+    {% set head = "gdas.ocean.t" + cycle_HH + "z." %}
+    name: "GDASOCEAN_RESTART"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_restart.tar"
+    required:
+        # TODO explicitly name the restart files to archive
+        - '{{ COMIN_OCEAN_RESTART | relpath(ROTDIR) }}/*'
+        - '{{ COMIN_MED_RESTART | relpath(ROTDIR) }}/*'
diff --git a/parm/archive/gdaswave.yaml.j2 b/parm/archive/gdaswave.yaml.j2
new file mode 100644
index 0000000000..220770b38d
--- /dev/null
+++ b/parm/archive/gdaswave.yaml.j2
@@ -0,0 +1,8 @@
+gdaswave:
+    {% set head = "gdas.wave.t" + cycle_HH + "z." %}
+    name: "GDASWAVE"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave.tar"
+    required:
+        # TODO explicitly name the wave grid/station files to archive
+        - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*"
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}*"
diff --git a/parm/archive/gdaswave_restart.yaml.j2 b/parm/archive/gdaswave_restart.yaml.j2
new file mode 100644
index 0000000000..7ada504e2a
--- /dev/null
+++ b/parm/archive/gdaswave_restart.yaml.j2
@@ -0,0 +1,6 @@
+gdaswave_restart:
+    name: "GDASWAVE_RESTART"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave_restart.tar"
+    required:
+        # TODO explicitly name the wave restart files to archive
+        - "{{ COMIN_WAVE_RESTART | relpath(ROTDIR) }}/*"
diff --git a/parm/archive/gfs_downstream.yaml.j2 b/parm/archive/gfs_downstream.yaml.j2
new file mode 100644
index 0000000000..ed5317b42c
--- /dev/null
+++ b/parm/archive/gfs_downstream.yaml.j2
@@ -0,0 +1,12 @@
+gfs_downstream:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_DOWNSTREAM"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_downstream.tar"
+    required:
+        - "{{ COMIN_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.sfc"
+        - "{{ COMIN_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.snd"
+        {% for i in range(1, NUM_SND_COLLECTIVES) %}
+        - "{{ COMIN_ATMOS_WMO | relpath(ROTDIR) }}/gfs_collective{{ i }}.postsnd_{{ cycle_HH }}"
+        {% endfor %}
+        - "{{ COMIN_ATMOS_BUFR | relpath(ROTDIR) }}/bufr.t{{ cycle_HH }}z"
+        - "{{ COMIN_ATMOS_BUFR | relpath(ROTDIR) }}/gfs.t{{ cycle_HH }}z.bufrsnd.tar.gz"
diff --git a/parm/archive/gfs_flux.yaml.j2 b/parm/archive/gfs_flux.yaml.j2
new file mode 100644
index 0000000000..46bd0624b6
--- /dev/null
+++ b/parm/archive/gfs_flux.yaml.j2
@@ -0,0 +1,9 @@
+gfs_flux:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_FLUX"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux.tar"
+    required:
+        {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %}
+        - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2"
+        - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx"
+        {% endfor %}
diff --git a/parm/archive/gfs_flux_1p00.yaml.j2 b/parm/archive/gfs_flux_1p00.yaml.j2
new file mode 100644
index 0000000000..97fcd6e4d2
--- /dev/null
+++ b/parm/archive/gfs_flux_1p00.yaml.j2
@@ -0,0 +1,9 @@
+gfs_flux_1p00:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_FLUX_1P00"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux_1p00.tar"
+    required:
+        {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %}
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}.idx"
+        {% endfor %}
diff --git a/parm/archive/gfs_netcdfa.yaml.j2 b/parm/archive/gfs_netcdfa.yaml.j2
new file mode 100644
index 0000000000..8c0d4a813f
--- /dev/null
+++ b/parm/archive/gfs_netcdfa.yaml.j2
@@ -0,0 +1,16 @@
+gfs_netcdfa:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_NETCDFA"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfa.tar"
+    required:
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc"
+        {% for iauhr in iaufhrs if iauhr != 6 %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc"
+        {% endfor %}
+    optional:
+        {% if not DO_JEDIATMVAR %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt"
+        {% endif %}
diff --git a/parm/archive/gfs_netcdfb.yaml.j2 b/parm/archive/gfs_netcdfb.yaml.j2
new file mode 100644
index 0000000000..727f054715
--- /dev/null
+++ b/parm/archive/gfs_netcdfb.yaml.j2
@@ -0,0 +1,9 @@
+gfs_netcdfb:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_NETCDFB"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfb.tar"
+    required:
+        {% for fhr in range(0, ARCH_GAUSSIAN_FHMAX + ARCH_GAUSSIAN_FHINC, ARCH_GAUSSIAN_FHINC) %}
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc"
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc"
+        {% endfor %}
diff --git a/parm/archive/gfs_pgrb2b.yaml.j2 b/parm/archive/gfs_pgrb2b.yaml.j2
new file mode 100644
index 0000000000..ca20d1a3d8
--- /dev/null
+++ b/parm/archive/gfs_pgrb2b.yaml.j2
@@ -0,0 +1,19 @@
+gfs_pgrb2b:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_PGRB2B"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_pgrb2b.tar"
+    required:
+        {% if MODE == "cycled" %}
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl.idx"
+        {% endif %}
+        {% if ARCH_GAUSSIAN %}
+        {% for fhr in range(0, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %}
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}.idx"
+        {% endfor %}
+        {% endif %}
diff --git a/parm/archive/gfs_restarta.yaml.j2 b/parm/archive/gfs_restarta.yaml.j2
new file mode 100644
index 0000000000..8f6a0b6c10
--- /dev/null
+++ b/parm/archive/gfs_restarta.yaml.j2
@@ -0,0 +1,23 @@
+gfs_restarta:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFS_RESTARTA"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_restarta.tar"
+    required:
+        {% if MODE == "cycled" %}
+        {% if DOHYBVAR and DOIAU %}
+        {% set anl_offset = "-3H" %}
+        {% else %}
+        {% set anl_offset = "0H" %}
+        {% endif %}
+        {% set anl_timedelta = anl_offset | to_timedelta %}
+        {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %}
+        {% for i_tile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ i_tile }}.nc"
+        {% endfor %}
+        {% elif MODE == "forecast-only" %}
+        - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_ctrl.nc"
+        {% for i_tile in range(1, 7) %}
+        - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_data.tile{{ i_tile }}.nc"
+        - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/sfc_data.tile{{ i_tile }}.nc"
+        {% endfor %}
+        {% endif %}
diff --git a/parm/archive/gfsa.yaml.j2 b/parm/archive/gfsa.yaml.j2
new file mode 100644
index 0000000000..4a86778e2e
--- /dev/null
+++ b/parm/archive/gfsa.yaml.j2
@@ -0,0 +1,68 @@
+gfsa:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFSA"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfsa.tar"
+    required:
+        # Logs
+        # TODO explicitly name all logs to include
+        {% for log in glob("logs/" ~ cycle_YMDH ~ "/gfs*.log") %}
+        {% if not "gfsarch.log" in log %}
+        - "{{ log }}"
+        {% endif %}
+        {% endfor %}
+
+        # UFS configuration
+        - "{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.input.nml"
+
+        {% if MODE == "cycled" %}
+        # Analysis GRIB2 (gridded) data
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx"
+
+        {% if DO_VMINMON %}
+        # Minimization monitor
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d"
+        - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt"
+        {% endif %}
+
+        # State data
+        {% if DO_JEDIATMVAR %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml"
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat"
+        {% else %}
+        - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat"
+        {% endif %}
+        {% if AERO_ANL_RUN == "gfs" or AERO_ANL_RUN == "both" %}
+        - "{{ COMIN_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat"
+        {% endif %}
+        {% if DO_PREP_OBS_AERO %}
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs"
+        {% endif %}
+
+        # BUFR inputs
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr"
+        - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles"
+        {% endif %}  # Full cycle
+
+        # Forecast GRIB2 products
+        {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %}
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx"
+        - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt"
+        {% endfor %}
+
+    optional:
+        # Cyclone tracking data; only present if there's something to track.
+        - "{{ COMIN_ATMOS_TRACK | relpath(ROTDIR) }}/avno.t{{ cycle_HH }}z.cyclone.trackatcfunix"
+        - "{{ COMIN_ATMOS_TRACK | relpath(ROTDIR) }}/avnop.t{{ cycle_HH }}z.cyclone.trackatcfunix"
+        - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}"
+        - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}"
diff --git a/parm/archive/gfsb.yaml.j2 b/parm/archive/gfsb.yaml.j2
new file mode 100644
index 0000000000..e6ffa05766
--- /dev/null
+++ b/parm/archive/gfsb.yaml.j2
@@ -0,0 +1,20 @@
+gfsb:
+    {% set head = "gfs.t" + cycle_HH + "z." %}
+    name: "GFSB"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfsb.tar"
+    required:
+        {% if MODE == "cycled" %}
+        # GRIB2 (subsampled) analysis data
+        - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl"
+        - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx"
+        {% endif %}
+
+        # GRIB2 orecast data
+        {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %}
+        - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}.idx"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}"
+        - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx"
+        {% endfor %}
diff --git a/parm/archive/gfswave.yaml.j2 b/parm/archive/gfswave.yaml.j2
new file mode 100644
index 0000000000..6909421757
--- /dev/null
+++ b/parm/archive/gfswave.yaml.j2
@@ -0,0 +1,30 @@
+gfswave:
+    {% set head = "gfswave.t" + cycle_HH + "z." %}
+    name: "GFSWAVE"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfswave.tar"
+    required:
+        # Wave GRIB2 regional forecast products
+        {% for fh in range(0, FHMAX_HF_WAV + FHOUT_HF_WAV, FHOUT_HF_WAV) %}
+        # NOTE This is as explicit as possible without major logic to parse wavepostGRD.
+        #      Matches files of the form "gfswave.tCCz.<region>.<res>.fHHH.grib2".
+        - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2"
+        - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx"
+        {% endfor %}
+
+        # Global wave GRIB2 forecast products
+        {% for fh in range(FHMAX_HF_WAV + FHOUT_WAV, FHMAX_WAV_GFS + FHOUT_WAV, FHOUT_WAV) %}
+        - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2"
+        - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx"
+        {% endfor %}
+
+        # Wave bulletins
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}bull_tar"
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}cbull_tar"
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}spec_tar.gz"
+
+        # Wave IBP bulletins
+        {% if DOIBP_WAV %}
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpbull_tar"
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpcbull_tar"
+        - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibp_tar"
+        {% endif %}
diff --git a/parm/archive/ice_6hravg.yaml.j2 b/parm/archive/ice_6hravg.yaml.j2
new file mode 100644
index 0000000000..6eb64ae70d
--- /dev/null
+++ b/parm/archive/ice_6hravg.yaml.j2
@@ -0,0 +1,9 @@
+ice_6hravg:
+    {% set head = "gfs.ice.t" + cycle_HH + "z." %}
+    name: "ICE_6HRAVG"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_6hravg.tar"
+    required:
+        - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc"
+        {% for fhr in range(6, FHMAX_GFS + 6, 6) %}
+        - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc"
+        {% endfor %}
diff --git a/parm/archive/ice_grib2.yaml.j2 b/parm/archive/ice_grib2.yaml.j2
new file mode 100644
index 0000000000..04bc2f5873
--- /dev/null
+++ b/parm/archive/ice_grib2.yaml.j2
@@ -0,0 +1,19 @@
+ice_grib2:
+    {% set head = "gfs.ice.t" + cycle_HH + "z." %}
+    name: "ICE_GRIB2"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_grib2.tar"
+    required:
+        # Ice forecast GRIB2 products
+        {% for fhr in range(FHOUT_ICE_GFS, FHMAX_GFS + FHOUT_ICE_GFS, FHOUT_ICE_GFS) %}
+        {% set fhr3 = '%03d' % fhr %}
+        {% if ICERES == 500 %}
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx"
+        {% elif ICERES == 100 %}
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx"
+        {% elif ICERES == 25 or ICERES == "025" %}
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx"
+        {% endif %}
+        {% endfor %}
diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2
new file mode 100644
index 0000000000..3ebd52dbad
--- /dev/null
+++ b/parm/archive/master_enkf.yaml.j2
@@ -0,0 +1,102 @@
+# Set variables/lists needed to parse the enkf templates
+{% set cycle_HH = current_cycle | strftime("%H") %}
+{% set cycle_YMD = current_cycle | to_YMD %}
+{% set cycle_YMDH = current_cycle | to_YMDH %}
+{% set head = RUN + ".t" + cycle_HH + "z." %}
+
+# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
+{% if IAUFHRS is string %}
+    # "3,6,9"
+    {% set iaufhrs = [] %}
+    {% for iaufhr in IAUFHRS.split(",") %}
+        {% do iaufhrs.append(iaufhr | int) %}
+    {% endfor %}
+{% else %}
+    # 6 (integer)
+    {% set iaufhrs = [IAUFHRS] %}
+{% endif %}
+
+# Repeat for IAUFHRS_ENKF
+{% if IAUFHRS_ENKF is string %}
+    {% set iaufhrs_enkf = [] %}
+    {% for iaufhr in IAUFHRS_ENKF.split(",") %}
+        {% do iaufhrs_enkf.append(iaufhr | int) %}
+    {% endfor %}
+{% else %}
+    {% set iaufhrs_enkf = [IAUFHRS_ENKF] %}
+{% endif %}
+
+# Determine which data to archive
+datasets:
+{% if ENSGRP == 0 %}
+    {% filter indent(width=4) %}
+        # Archive the ensemble means and spreads
+{% include "enkf.yaml.j2" %}
+    {% endfilter %}
+{% else %}
+
+    # Archive individual member data
+    # First, construct individual member directories from templates
+    #    COMIN_ATMOS_ANALYSIS_MEM, COMIN_ATMOS_HISTORY_MEM, and COMIN_ATMOS_RESTART_MEM
+
+    # Declare to-be-filled lists of member COM directories
+    {% set COMIN_ATMOS_ANALYSIS_MEM_list = [] %}
+    {% set COMIN_ATMOS_RESTART_MEM_list = [] %}
+    {% set COMIN_ATMOS_HISTORY_MEM_list = [] %}
+
+    # Determine which ensemble members belong to this group
+    {% set first_group_mem = (ENSGRP - 1) * NMEM_EARCGRP + 1 %}
+    {% set last_group_mem = [ ENSGRP * NMEM_EARCGRP, nmem_ens ] | min %}
+
+    # Construct member COM directories for the group
+    {% for mem in range(first_group_mem, last_group_mem + 1) %}
+
+        # Declare a dict of search and replace terms to run on each template
+        {% set mem_char = 'mem%03d' | format(mem) %}
+        {% set tmpl_dict = ({ '${ROTDIR}':ROTDIR,
+                              '${RUN}':RUN,
+                              '${YMD}':cycle_YMD,
+                              '${HH}':cycle_HH,
+                              '${MEMDIR}': mem_char }) %}
+
+        {% set COMIN_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) %}
+        {% set COMIN_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_TMPL | replace_tmpl(tmpl_dict) %}
+        {% set COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) %}
+
+        # Append the member COM directories
+        {% do COMIN_ATMOS_ANALYSIS_MEM_list.append(COMIN_ATMOS_ANALYSIS_MEM)%}
+        {% do COMIN_ATMOS_HISTORY_MEM_list.append(COMIN_ATMOS_HISTORY_MEM)%}
+        {% do COMIN_ATMOS_RESTART_MEM_list.append(COMIN_ATMOS_RESTART_MEM)%}
+
+    {% endfor %}
+
+    # Archive member data
+    {% filter indent(width=4) %}
+{% include "enkf_grp.yaml.j2" %}
+    {% endfilter %}
+
+    # Determine if restarts should be saved
+    {% set save_warm_start_forecast, save_warm_start_cycled = ( False, False ) %}
+
+    # Save the increments and restarts every ARCH_WARMICFREQ days
+    # The ensemble increments (group a) should be saved on the ARCH_CYC
+    {% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %}
+        {% if ARCH_CYC == cycle_HH | int %}
+            {% filter indent(width=4) %}
+{% include "enkf_restarta_grp.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+    {% endif %}
+
+    # The ensemble ICs (group b) are restarts and always lag increments by assim_freq
+    {% set ics_offset = (assim_freq | string + "H") | to_timedelta %}
+    {% if (current_cycle | add_to_datetime(ics_offset) - SDATE).days % ARCH_WARMICFREQ == 0 %}
+        {% if (ARCH_CYC - assim_freq) % 24 == cycle_HH | int %}
+            {% filter indent(width=4) %}
+{% include "enkf_restartb_grp.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+    {% endif %}
+
+    # End of individual member archiving
+{% endif %}
diff --git a/parm/archive/master_enkfgdas.yaml.j2 b/parm/archive/master_enkfgdas.yaml.j2
new file mode 100644
index 0000000000..e21f6a381b
--- /dev/null
+++ b/parm/archive/master_enkfgdas.yaml.j2
@@ -0,0 +1,6 @@
+# Set variables specific to gdasenkf runs then parse the master_enkf template
+{% set (fhmin, fhmax, fhout) = (FHMIN_ENKF, FHMAX_ENKF, FHOUT_ENKF) %}
+{% set do_calc_increment = DO_CALC_INCREMENT %}
+{% set nmem_ens = NMEM_ENS %}
+{% set restart_interval = restart_interval_enkfgdas %}
+{% include "master_enkf.yaml.j2" %}
diff --git a/parm/archive/master_enkfgfs.yaml.j2 b/parm/archive/master_enkfgfs.yaml.j2
new file mode 100644
index 0000000000..93ec38b660
--- /dev/null
+++ b/parm/archive/master_enkfgfs.yaml.j2
@@ -0,0 +1,6 @@
+# Set variables specific to gfsenkf runs then parse the master_enkf template
+{% set (fhmin, fhmax, fhout) = (FHMIN_ENKF, FHMAX_ENKF_GFS, FHOUT_ENKF_GFS) %}
+{% set do_calc_increment = DO_CALC_INCREMENT_ENKF_GFS %}
+{% set nmem_ens = NMEM_ENS_GFS %}
+{% set restart_interval = restart_interval_enkfgfs %}
+{% include "master_enkf.yaml.j2" %}
diff --git a/parm/archive/master_gdas.yaml.j2 b/parm/archive/master_gdas.yaml.j2
new file mode 100644
index 0000000000..30a2175653
--- /dev/null
+++ b/parm/archive/master_gdas.yaml.j2
@@ -0,0 +1,109 @@
+{% set cycle_HH = current_cycle | strftime("%H") %}
+{% set cycle_YMD = current_cycle | to_YMD %}
+{% set cycle_YMDH = current_cycle | to_YMDH %}
+{% set head = "gdas.t" + cycle_HH + "z." %}
+
+# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
+{% if IAUFHRS is string %}
+    {% set iaufhrs = [] %}
+    {% for iaufhr in IAUFHRS.split(",") %}
+        {% do iaufhrs.append(iaufhr | int) %}
+    {% endfor %}
+{% else %}
+    {% set iaufhrs = [IAUFHRS] %}
+{% endif %}
+
+datasets:
+# Always archive atmosphere forecast/analysis data
+{% filter indent(width=4) %}
+{% include "gdas.yaml.j2" %}
+{% endfilter %}
+
+{% if DO_ICE %}
+    # Ice data
+    {% filter indent(width=4) %}
+{% include "gdasice.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if DO_OCN %}
+    # Ocean forecast products
+    {% filter indent(width=4) %}
+{% include "gdasocean.yaml.j2" %}
+    {% endfilter %}
+    {% if DO_JEDIOCNVAR and MODE == "cycled" %}
+        # Ocean analysis products
+        {% filter indent(width=4) %}
+{% include "gdasocean_analysis.yaml.j2" %}
+        {% endfilter %}
+    {% endif %}
+{% endif %}
+
+{% if DO_WAVE %}
+    # Wave products
+    {% filter indent(width=4) %}
+{% include "gdaswave.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if MODE == "cycled" %}
+    # Determine if we will save restart ICs or not (only valid for cycled)
+    {% set save_warm_start_forecast, save_warm_start_cycled = ( False, False ) %}
+
+    {% if ARCH_CYC == cycle_HH | int%}
+        # Save the forecast-only cycle ICs every ARCH_WARMICFREQ or ARCH_FCSTICFREQ days
+        {% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %}
+            {% set save_warm_start_forecast = True %}
+        {% elif (current_cycle - SDATE).days % ARCH_FCSTICFREQ == 0 %}
+            {% set save_warm_start_forecast = True %}
+        {% endif %}
+    {% endif %}
+
+    # The GDAS ICs (group b) are restarts and always lag increments by assim_freq
+    {% if (ARCH_CYC - assim_freq) % 24 == cycle_HH | int %}
+        {% set ics_offset = (assim_freq | string + "H") | to_timedelta %}
+        {% if (current_cycle | add_to_datetime(ics_offset) - SDATE).days % ARCH_WARMICFREQ == 0 %}
+            {% set save_warm_start_cycled = True %}
+        {% endif %}
+    {% endif %}
+
+    {% if save_warm_start_forecast %}
+        # Save warm start forecast-only data
+        # Atmosphere restarts
+        {% filter indent(width=4) %}
+{% include "gdas_restarta.yaml.j2" %}
+        {% endfilter %}
+
+        {% if DO_WAVE %}
+            # Wave restarts
+            {% filter indent(width=4) %}
+{% include "gdaswave_restart.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+
+        {% if DO_OCN %}
+            # Ocean restarts
+            {% filter indent(width=4) %}
+{% include "gdasocean_restart.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+
+        {% if DO_ICE %}
+            # Ice restarts
+            {% filter indent(width=4) %}
+{% include "gdasice_restart.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+
+        # End of forecast-only restarts
+    {% endif %}
+
+    {% if save_warm_start_cycled %}
+        # Save warm start cycled restarts
+        {% filter indent(width=4) %}
+{% include "gdas_restartb.yaml.j2" %}
+        {% endfilter %}
+    {% endif %}
+
+    # End of restart checking
+{% endif %}
diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2
new file mode 100644
index 0000000000..6ce27a536a
--- /dev/null
+++ b/parm/archive/master_gfs.yaml.j2
@@ -0,0 +1,113 @@
+# Set variables/lists needed to parse the enkf templates
+{% set cycle_HH = current_cycle | strftime("%H") %}
+{% set cycle_YMD = current_cycle | to_YMD %}
+{% set cycle_YMDH = current_cycle | to_YMDH %}
+
+# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
+{% if IAUFHRS is string %}
+    # "3,6,9"
+    {% set iaufhrs = [] %}
+    {% for iaufhr in IAUFHRS.split(",") %}
+        {% do iaufhrs.append(iaufhr | int) %}
+    {% endfor %}
+{% else %}
+    # 6 (integer)
+    {% set iaufhrs = [IAUFHRS] %}
+{% endif %}
+
+# Determine which data to archive
+datasets:
+# Always archive atmosphere forecast/analysis data
+{% filter indent(width=4) %}
+{% include "gfsa.yaml.j2" %}
+{% include "gfsb.yaml.j2" %}
+{% include "gfs_netcdfb.yaml.j2" %}
+{% endfilter %}
+
+{% if ARCH_GAUSSIAN %}
+    # Archive Gaussian data
+    {% filter indent(width=4) %}
+{% include "gfs_flux.yaml.j2" %}
+{% include "gfs_netcdfb.yaml.j2" %}
+{% include "gfs_pgrb2b.yaml.j2" %}
+    {% endfilter %}
+    {% if MODE == "cycled" %}
+        # Archive Gaussian analysis data
+        {% filter indent(width=4) %}
+{% include "gfs_netcdfa.yaml.j2" %}
+        {% endfilter %}
+    {% endif %}
+{% endif %}
+
+{% if DO_WAVE %}
+    # Wave forecasts
+    {% filter indent(width=4) %}
+{% include "gfswave.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if AERO_FCST_RUN == "gfs" or AERO_FCST_RUN == "both" %}
+    # Aerosol forecasts
+    {% filter indent(width=4) %}
+{% include "chem.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if DO_OCN %}
+    # Ocean forecasts
+    {% filter indent(width=4) %}
+{% include "ocean_6hravg.yaml.j2" %}
+{% include "ocean_grib2.yaml.j2" %}
+{% include "gfs_flux_1p00.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if DO_ICE %}
+    # Ice forecasts
+    {% filter indent(width=4) %}
+{% include "ice_6hravg.yaml.j2" %}
+{% include "ice_grib2.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+{% if DO_BUFRSND %}
+    # Downstream BUFR soundings
+    {% filter indent(width=4) %}
+{% include "gfs_downstream.yaml.j2" %}
+    {% endfilter %}
+{% endif %}
+
+# Determine whether to save the MOS tarball
+{% if DO_MOS and cycle_HH == "18" %}
+
+    {% if not REALTIME %}
+        {% filter indent(width=4) %}
+{% include "gfsmos.yaml.j2" %}
+        {% endfilter %}
+
+    {% else %}
+
+        {% set td_from_sdate = current_cycle - SDATE %}
+        {% set td_one_day = "+1D" | to_timedelta %}
+        {% if td_from_sdate > td_one_day %}
+            {% filter indent(width=4) %}
+{% include "gfsmos.yaml.j2" %}
+            {% endfilter %}
+        {% endif %}
+
+    {% endif %}
+{% endif %}
+
+# Determine if we will save restart ICs or not
+{% if ARCH_CYC == cycle_HH | int %}
+    # Save the forecast-only cycle ICs every ARCH_WARMICFREQ or ARCH_FCSTICFREQ days
+    {% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %}
+        {% filter indent(width=4) %}
+{% include "gfs_restarta.yaml.j2" %}
+        {% endfilter %}
+    {% elif (current_cycle - SDATE).days % ARCH_FCSTICFREQ == 0 %}
+        {% filter indent(width=4) %}
+{% include "gfs_restarta.yaml.j2" %}
+        {% endfilter %}
+    {% endif %}
+{% endif %}
diff --git a/parm/archive/ocean_6hravg.yaml.j2 b/parm/archive/ocean_6hravg.yaml.j2
new file mode 100644
index 0000000000..58db08538f
--- /dev/null
+++ b/parm/archive/ocean_6hravg.yaml.j2
@@ -0,0 +1,8 @@
+ocean_6hravg:
+    {% set head = "gfs.ocean.t" + cycle_HH + "z." %}
+    name: "OCEAN_6HRAVG"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_6hravg.tar"
+    required:
+        {% for fhr in range(6, FHMAX_GFS + 6, 6) %}
+        - "{{ COMIN_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc"
+        {% endfor %}
diff --git a/parm/archive/ocean_grib2.yaml.j2 b/parm/archive/ocean_grib2.yaml.j2
new file mode 100644
index 0000000000..e8f2e3170b
--- /dev/null
+++ b/parm/archive/ocean_grib2.yaml.j2
@@ -0,0 +1,18 @@
+ocean_grib2:
+    {% set head = "gfs.ocean.t" + cycle_HH + "z." %}
+    name: "OCEAN_GRIB2"
+    target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_grib2.tar"
+    required:
+        {% for fhr in range(FHOUT_OCN_GFS, FHMAX_GFS + FHOUT_OCN_GFS, FHOUT_OCN_GFS) %}
+        {% set fhr3 = '%03d' % fhr %}
+        {% if OCNRES == 500 %}
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx"
+        {% elif OCNRES == 100 %}
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx"
+        {% elif OCNRES == 25 or OCNRES == "025" %}
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2"
+        - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx"
+        {% endif %}
+        {% endfor %}
diff --git a/parm/config/gefs/config.atmos_ensstat b/parm/config/gefs/config.atmos_ensstat
new file mode 100644
index 0000000000..d371f75887
--- /dev/null
+++ b/parm/config/gefs/config.atmos_ensstat
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.atmos_ensstat ##########
+# atmosphere grib2 enstat specific
+
+echo "BEGIN: config.atmos_ensstat"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmos_ensstat
+
+echo "END: config.atmos_ensstat"
diff --git a/parm/config/gefs/config.atmos_products b/parm/config/gefs/config.atmos_products
new file mode 100644
index 0000000000..4a0fb8b49f
--- /dev/null
+++ b/parm/config/gefs/config.atmos_products
@@ -0,0 +1,28 @@
+#! /usr/bin/env bash
+
+########## config.atmos_products ##########
+# atmosphere grib2 products specific
+
+echo "BEGIN: config.atmos_products"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmos_products
+
+# No. of forecast hours to process in a single job
+export NFHRS_PER_GROUP=3
+
+# Scripts used by this job
+export INTERP_ATMOS_MASTERSH="${USHgfs}/interp_atmos_master.sh"
+export INTERP_ATMOS_SFLUXSH="${USHgfs}/interp_atmos_sflux.sh"
+
+export downset=2
+export FHOUT_PGBS=${FHOUT_GFS:-3}  # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg
+export FLXGF="NO"  # Create interpolated sflux.1p00 file
+
+# paramlist files for the different forecast hours and downsets
+export paramlista="${PARMgfs}/product/gefs.0p25.fFFF.paramlist.a.txt"
+export paramlista_anl="${PARMgfs}/product/gefs.0p25.anl.paramlist.a.txt"
+export paramlista_f000="${PARMgfs}/product/gefs.0p25.f000.paramlist.a.txt"
+export paramlistb="${PARMgfs}/product/gefs.0p25.fFFF.paramlist.b.txt"
+
+echo "END: config.atmos_products"
diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base
similarity index 76%
rename from parm/config/gefs/config.base.emc.dyn
rename to parm/config/gefs/config.base
index 5358a37768..735743b568 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base
@@ -17,28 +17,20 @@ export QUEUE="@QUEUE@"
 export QUEUE_SERVICE="@QUEUE_SERVICE@"
 export PARTITION_BATCH="@PARTITION_BATCH@"
 export PARTITION_SERVICE="@PARTITION_SERVICE@"
+export RESERVATION="@RESERVATION@"
 
 # Project to use in mass store:
 export HPSS_PROJECT="@HPSS_PROJECT@"
 
 # Directories relative to installation areas:
 export HOMEgfs=@HOMEgfs@
-export PARMgfs=${HOMEgfs}/parm
-export FIXgfs=${HOMEgfs}/fix
-export USHgfs=${HOMEgfs}/ush
-export UTILgfs=${HOMEgfs}/util
 export EXECgfs=${HOMEgfs}/exec
+export FIXgfs=${HOMEgfs}/fix
+export PARMgfs=${HOMEgfs}/parm
 export SCRgfs=${HOMEgfs}/scripts
-
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
+export USHgfs=${HOMEgfs}/ush
+export FIXorog=${FIXgfs}/orog
+export FIXugwd=${FIXgfs}/ugwd
 
 ########################################################################
 
@@ -58,9 +50,9 @@ export NOSCRUB="@NOSCRUB@"
 export BASE_GIT="@BASE_GIT@"
 
 # Toggle to turn on/off GFS downstream processing.
-export DO_BUFRSND="NO"     # BUFR sounding products
-export DO_GEMPAK="NO"      # GEMPAK products
-export DO_AWIPS="NO"       # AWIPS products
+export DO_BUFRSND="@DO_BUFRSND@"     # BUFR sounding products
+export DO_GEMPAK="@DO_GEMPAK@"       # GEMPAK products
+export DO_AWIPS="@DO_AWIPS@"         # AWIPS products
 
 # NO for retrospective parallel; YES for real-time parallel
 #  arch.sh uses REALTIME for MOS.  Need to set REALTIME=YES
@@ -85,7 +77,8 @@ export NCP="/bin/cp -p"
 export NMV="/bin/mv"
 export NLN="/bin/ln -sf"
 export VERBOSE="YES"
-export KEEPDATA="NO"
+export KEEPDATA="@KEEPDATA@"
+export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true
 export CHGRP_RSTPROD="@CHGRP_RSTPROD@"
 export CHGRP_CMD="@CHGRP_CMD@"
 export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump"
@@ -104,22 +97,18 @@ export PSLOT="@PSLOT@"
 export EXPDIR="@EXPDIR@/${PSLOT}"
 export ROTDIR="@COMROOT@/${PSLOT}"
 
-export DATAROOT="${STMP}/RUNDIRS/${PSLOT}"  # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}"  # TODO: Should be removed; use DATAROOT instead
 export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
 export ATARDIR="@ATARDIR@"
 
 # Commonly defined parameters in JJOBS
 export envir=${envir:-"prod"}
 export NET="gefs"  # NET is defined in the job-card (ecf)
-export RUN="gefs"  # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy
-# TODO: determine where is RUN actually used in the workflow other than here
-# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be
-#       consistent w/ EE2?
+export RUN="gefs"  # RUN is defined in the job-card (ecf)
 
 # Get all the COM path templates
 source "${EXPDIR}/config.com"
 
+# shellcheck disable=SC2016
 export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
 export LOGSCRIPT=${LOGSCRIPT:-""}
 #export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
@@ -143,9 +132,20 @@ export DO_WAVE="NO"
 export DO_OCN="NO"
 export DO_ICE="NO"
 export DO_AERO="NO"
-export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
+export DO_EXTRACTVARS="@DO_EXTRACTVARS@" # Option to process and extract a subset of products to save on disk
+export AERO_FCST_RUN="" # When to run aerosol forecast: gdas, gfs, or both
+export AERO_ANL_RUN="" # When to run aerosol analysis: gdas, gfs, or both
+export WAVE_RUN="" # When to include wave suite: gdas, gfs, or both
+export DOBNDPNT_WAVE="NO" # The GEFS buoys file does not currently have any boundary points
+export DOIBP_WAV="NO" # Option to create point outputs from input boundary points
 export FRAC_GRID=".true."
+export DO_NEST="NO" # Whether to run a global-nested domain
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  export ntiles=7
+  export NEST_OUTPUT_GRID="regional_latlon"
+else
+  export ntiles=6
+fi
 
 # Set operational resolution
 export OPS_RES="C768" # Do not change
@@ -162,7 +162,7 @@ case "${CASE}" in
         export waveGRD='glo_500'
         ;;
     "C96" | "C192")
-        export waveGRD='glo_200'
+        export waveGRD='glo_100'
         ;;
     "C384")
         export waveGRD='glo_025'
@@ -181,11 +181,13 @@ case "${APP}" in
     ;;
   ATMA)
     export DO_AERO="YES"
+    export AERO_ANL_RUN="both"
+    export AERO_FCST_RUN="gdas"
     ;;
   ATMW)
     export DO_COUPLED="YES"
     export DO_WAVE="YES"
-    export WAVE_CDUMP="both"
+    export WAVE_RUN="both"
     ;;
   NG-GODAS)
     export DO_ATM="NO"
@@ -199,11 +201,13 @@ case "${APP}" in
 
     if [[ "${APP}" =~ A$ ]]; then
         export DO_AERO="YES"
+        export AERO_ANL_RUN="both"
+        export AERO_FCST_RUN="gdas"
     fi
 
     if [[ "${APP}" =~ ^S2SW ]]; then
         export DO_WAVE="YES"
-        export WAVE_CDUMP="both"
+        export WAVE_RUN="both"
         export cplwav2atm=".true."
     fi
     ;;
@@ -213,21 +217,30 @@ case "${APP}" in
     ;;
 esac
 
+# Output frequency of the forecast model (for cycling)
+export FHMIN=0
+export FHMAX=9
+export FHOUT=3           # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false)
+export FHOUT_OCN=3
+export FHOUT_ICE=3
+
 # GFS cycle info
 export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
 
 # GFS output and frequency
 export FHMIN_GFS=0
-
-export FHMAX_GFS_00=120
-export FHMAX_GFS_06=120
-export FHMAX_GFS_12=120
-export FHMAX_GFS_18=120
-current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var}
-
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
-export FHMAX_HF_GFS=0
+export FHMIN=${FHMIN_GFS}
+export FHMAX_GFS=@FHMAX_GFS@
+export FHOUT_GFS=6
+export FHMAX_HF_GFS=@FHMAX_HF_GFS@
 export FHOUT_HF_GFS=1
+export FHOUT_OCN_GFS=6
+export FHOUT_ICE_GFS=6
+export FHMIN_WAV=0
+export FHOUT_WAV=3
+export FHMAX_HF_WAV=120
+export FHOUT_HF_WAV=1
+export FHMAX_WAV=${FHMAX_GFS}
 if (( gfs_cyc != 0 )); then
     export STEP_GFS=$(( 24 / gfs_cyc ))
 else
@@ -238,9 +251,12 @@ export ILPOST=1           # gempak output frequency up to F120
 export FHMIN_ENKF=${FHMIN_GFS}
 export FHMAX_ENKF=${FHMAX_GFS}
 export FHOUT_ENKF=${FHOUT_GFS}
+export FHOUT_OCN=${FHOUT_OCN_GFS}
+export FHOUT_ICE=${FHOUT_ICE_GFS}
 
 # GFS restart interval in hours
 export restart_interval_gfs=12
+export restart_interval_enkfgfs=12
 # NOTE: Do not set this to zero.  Instead set it to $FHMAX_GFS
 # TODO: Remove this variable from config.base and reference from config.fcst
 # TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used
@@ -262,7 +278,7 @@ export imp_physics=8
 export DO_JEDIATMVAR="NO"
 export DO_JEDIATMENS="NO"
 export DO_JEDIOCNVAR="NO"
-export DO_JEDILANDDA="NO"
+export DO_JEDISNOWDA="NO"
 export DO_MERGENSST="NO"
 
 # Hybrid related
@@ -270,13 +286,22 @@ export NMEM_ENS=@NMEM_ENS@
 
 # set default member number memdir for control
 # this will be overwritten for the perturbed members
-export ENSMEM="000"
+export ENSMEM=${ENSMEM:-"000"}
 export MEMDIR="mem${ENSMEM}"
 
+# initialize ocean ensemble members with perturbations
+#   if true, only occurs for members greater than zero
+export REPLAY_ICS=@REPLAY_ICS@
+if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+    export OFFSET_START_HOUR=$(( assim_freq / 2 ))
+else
+    export OFFSET_START_HOUR=0
+fi
+
 export DOIAU="NO"  # While we are not doing IAU, we may want to warm start w/ IAU in the future
 # Check if cycle is cold starting
 if [[ "${EXP_WARM_START}" = ".false." ]]; then
-  export IAU_FHROT=0
+  export IAU_FHROT=${OFFSET_START_HOUR}
 else
   if [[ "${DOIAU}" = "YES" ]]; then
     export IAU_FHROT=3
@@ -315,4 +340,7 @@ export ARCH_FCSTICFREQ=1     # Archive frequency in days for gdas and gfs foreca
 
 export DELETE_COM_IN_ARCHIVE_JOB="YES"   # NO=retain ROTDIR.  YES default in arch.sh and earc.sh.
 
+# Number of regional collectives to create soundings for
+export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9}
+
 echo "END: config.base"
diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs
index 9593408848..807ed66d48 100644
--- a/parm/config/gefs/config.efcs
+++ b/parm/config/gefs/config.efcs
@@ -5,14 +5,16 @@
 
 echo "BEGIN: config.efcs"
 
-# Turn off components in ensemble via _ENKF, or use setting from deterministic
-export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}}
-export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}}
-export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}}
-export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}}
+# Turn off components in ensemble
+# export DO_AERO="NO"
+# export DO_OCN="NO"
+# export DO_ICE="NO"
+# export DO_WAVE="NO"
+
+export CASE="${CASE_ENS}"
 
 # Source model specific information that is resolution dependent
-string="--fv3 ${CASE_ENS}"
+string="--fv3 ${CASE}"
 # Ocean/Ice/Waves ensemble configurations are identical to deterministic member
 [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}"
 [[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}"
@@ -24,23 +26,24 @@ source "${EXPDIR}/config.ufs" ${string}
 # Get task specific resources
 source "${EXPDIR}/config.resources" efcs
 
-# Use serial I/O for ensemble (lustre?)
-export OUTPUT_FILETYPE_ATM="netcdf"
-export OUTPUT_FILETYPE_SFC="netcdf"
-
-# Number of enkf members per fcst job
-export NMEM_EFCSGRP=1
-export RERUN_EFCSGRP="NO"
+# nggps_diag_nml
+export FHOUT=${FHOUT_ENKF:-3}
+if [[ "${RUN}" == "enkfgfs" ]]; then
+    export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}}
+fi
 
-# Turn off inline UPP for EnKF forecast
-export WRITE_DOPOST=".true."
+# model_configure
+export FHMAX=${FHMAX_ENKF:-9}
+if [[ "${RUN}" == "enkfgfs" ]]; then
+   export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}}
+fi
 
 # Stochastic physics parameters (only for ensemble forecasts)
 export DO_SKEB="YES"
-export SKEB=0.3
-export SKEB_TAU=21600.
-export SKEB_LSCALE=250000.
-export SKEBNORM=0
+export SKEB="0.8,-999,-999,-999,-999"
+export SKEB_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7"
+export SKEB_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3"
+export SKEBNORM=1
 export SKEB_NPASS=30
 export SKEB_VDOF=5
 export DO_SHUM="YES"
@@ -48,12 +51,33 @@ export SHUM=0.005
 export SHUM_TAU=21600.
 export SHUM_LSCALE=500000.
 export DO_SPPT="YES"
-export SPPT=0.5
-export SPPT_TAU=21600.
-export SPPT_LSCALE=500000.
+export SPPT="0.56,0.28,0.14,0.056,0.028"
+export SPPT_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7"
+export SPPT_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3"
 export SPPT_LOGIT=".true."
 export SPPT_SFCLIMIT=".true."
+export DO_CA="YES"
+# OCN options
+export DO_OCN_SPPT="YES"
+export OCNSPPT="0.8,0.4,0.2,0.08,0.04"
+export OCNSPPT_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7"
+export OCNSPPT_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3"
+export DO_OCN_PERT_EPBL="YES"
+export EPBL="0.8,0.4,0.2,0.08,0.04"
+export EPBL_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7"
+export EPBL_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3"
 
-export restart_interval=${restart_interval_gfs}
+if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+    export ODA_INCUPD="True"
+    export ODA_TEMPINC_VAR='t_pert'
+    export ODA_SALTINC_VAR='s_pert'
+    export ODA_THK_VAR='h_anl'
+    export ODA_UINC_VAR='u_pert'
+    export ODA_VINC_VAR='v_pert'
+    export ODA_INCUPD_NHOURS=0.0
+else
+    export ODA_INCUPD="False"
+fi
+export restart_interval="${restart_interval_enkfgfs:-12}"
 
 echo "END: config.efcs"
diff --git a/parm/config/gefs/config.extractvars b/parm/config/gefs/config.extractvars
new file mode 100644
index 0000000000..706fe18450
--- /dev/null
+++ b/parm/config/gefs/config.extractvars
@@ -0,0 +1,41 @@
+#! /usr/bin/env bash                                                                                                                                                                                                                     
+
+########## config.extractvars ##########
+# Extractvars specific
+
+echo "BEGIN: config.extractvars"
+
+. "${EXPDIR}/config.resources" extractvars
+
+export COMPRSCMD=${COMPRSCMD:-bzip2}
+
+export compress_ocn=0 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product
+export compress_ice=0 #1: Compress extracted ice product, 0: Do not compress extracted ice product
+
+export ocnres="5p00" # Resolution of ocean products
+export iceres="5p00" # Resolution of ice products
+export wavres="5p00" # Resolution of wave products
+
+export depthvar_name="z_l" # Name of depth variable in NetCDF ocean products
+export zmin="0." # Minimum depth to extract from NetCDF ocean products
+export zmax="300." # Maximum depth to extract from NetCDF ocean products
+
+export FHOUT_WAV_EXTRACT=6 # Frequency of wave output to be saved on disk
+
+#Paramater Tables used
+export varlist_2d="${PARMgfs}/product/gefs_shortparmlist_2d.parm" # Parameter table for surface variables
+export varlist_3d="${PARMgfs}/product/gefs_shortparmlist_3d_h.parm" # Parameter table for upper air instantaneous variables
+export varlist_3d_d="${PARMgfs}/product/gefs_shortparmlist_3d_d.parm" # Parameter table for upper air daily-averaged variables
+export varlist_wav="${PARMgfs}/product/gefs_wav_shortparmlist.parm" # Parameter table for wave variables
+export varlist_ocn_netcdf="${PARMgfs}/product/gefs_ocn_shortparmlist.parm" # Parameter table for ocean netcdf variables
+export varlist_ice_netcdf="${PARMgfs}/product/gefs_ice_shortparmlist.parm" # Parameter table for ice netcdf variables
+
+#Directory to save extracted variables
+export ARC_RFCST_PROD="${ARCDIR}/rfcst/${PDY:0:4}/${PDY:0:6}/${PDY:0:8}/mem${ENSMEM}"
+export ARC_RFCST_PROD_ATMOS_F2D="${ARC_RFCST_PROD}/atmos/f2d"
+export ARC_RFCST_PROD_ATMOS_F3D="${ARC_RFCST_PROD}/atmos/f3d"
+export ARC_RFCST_PROD_OCN="${ARC_RFCST_PROD}/ocn"
+export ARC_RFCST_PROD_ICE="${ARC_RFCST_PROD}/ice"
+export ARC_RFCST_PROD_WAV="${ARC_RFCST_PROD}/wav"
+
+echo "END: config.extractvars"
diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst
index 4c8d3be99f..e66fc15f87 100644
--- a/parm/config/gefs/config.fcst
+++ b/parm/config/gefs/config.fcst
@@ -5,12 +5,21 @@
 
 echo "BEGIN: config.fcst"
 
-# Turn off waves if not used for this CDUMP
-case ${WAVE_CDUMP} in
-  both | "${CDUMP/enkf}" ) ;; # Don't change
+export USE_ESMF_THREADING="YES"  # Toggle to use ESMF-managed threading or traditional threading in UFSWM
+export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately)
+
+# Turn off waves if not used for this RUN
+case ${WAVE_RUN} in
+  both | "${RUN/enkf}" ) ;; # Don't change
   *) DO_WAVE="NO" ;; # Turn waves off
 esac
 
+# Turn off aerosols if not used for this RUN
+case ${AERO_FCST_RUN} in
+  both | "${RUN/enkf}" ) ;; # Don't change
+  *) DO_AERO="NO" ;; # Turn waves off
+esac
+
 # Source model specific information that is resolution dependent
 string="--fv3 ${CASE}"
 [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}"
@@ -21,6 +30,14 @@ string="--fv3 ${CASE}"
 # shellcheck disable=SC2086
 source "${EXPDIR}/config.ufs" ${string}
 
+# shellcheck disable=SC2153
+export FHMAX=${FHMAX_GFS}
+# shellcheck disable=SC2153
+export FHOUT=${FHOUT_GFS}
+export FHMAX_HF=${FHMAX_HF_GFS}
+export FHOUT_HF=${FHOUT_HF_GFS}
+export FHOUT_OCN=${FHOUT_OCN_GFS}
+export FHOUT_ICE=${FHOUT_ICE_GFS} 
 
 # Get task specific resources
 source "${EXPDIR}/config.resources" fcst
@@ -37,9 +54,8 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O
 
 #######################################################################
 
-export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh"
-#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py"  # Temp. while this is worked on
-export FCSTEXECDIR="${HOMEgfs}/exec"
+export FORECASTSH="${SCRgfs}/exglobal_forecast.sh"
+#export FORECASTSH="${SCRgfs}/exglobal_forecast.py"  # Temp. while this is worked on
 export FCSTEXEC="ufs_model.x"
 
 #######################################################################
@@ -92,29 +108,19 @@ if (( gwd_opt == 2 )); then
     export do_ugwp_v0_orog_only=".false."
     export do_ugwp_v0_nst_only=".false."
     export do_gsl_drag_ls_bl=".true."
-    export do_gsl_drag_ss=".true."
+    export do_gsl_drag_ss=".false."
     export do_gsl_drag_tofd=".true."
+    export do_gwd_opt_psl=".true."    
     export do_ugwp_v1_orog_only=".false."
     launch_level=$(echo "${LEVS}/2.35" |bc)
     export launch_level
-    if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then
-       export cdmbgwd=${cdmbgwd_gsl}
-    fi
 fi
 
 # Sponge layer settings
-export tau=0.
-export rf_cutoff=10.
 export d2_bg_k1=0.20
 export d2_bg_k2=0.04
 export dz_min=6
 export n_sponge=42
-if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then
-   export tau=5.0
-   export rf_cutoff=1.0e3
-   export d2_bg_k1=0.20
-   export d2_bg_k2=0.0
-fi
 
 # PBL/turbulance schemes
 export hybedmf=".false."
@@ -129,7 +135,11 @@ tbp=""
 if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi
 
 # Radiation options
-export IAER=1011    ; #spectral band mapping method for aerosol optical properties
+if [[ "${DO_AERO}" == "YES" ]]; then
+    export IAER=2011  # spectral band mapping method for aerosol optical properties
+else
+    export IAER=1011    
+fi
 export iovr_lw=3    ; #de-correlation length cloud overlap method (Barker, 2008)
 export iovr_sw=3    ; #de-correlation length cloud overlap method (Barker, 2008)
 export iovr=3       ; #de-correlation length cloud overlap method (Barker, 2008)
@@ -156,17 +166,17 @@ export random_clds=".true."
 case ${imp_physics} in
     99) # ZhaoCarr
         export ncld=1
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}"
         export nwat=2
         ;;
     6)  # WSM6
         export ncld=2
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}"
         export nwat=6
         ;;
     8)  # Thompson
         export ncld=2
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}"
         export nwat=6
 
         export cal_pre=".false."
@@ -189,7 +199,7 @@ case ${imp_physics} in
         ;;
     11) # GFDL
         export ncld=5
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}"
         export nwat=6
         export dnats=1
         export cal_pre=".false."
@@ -231,22 +241,8 @@ export FSICL="0"
 export FSICS="0"
 
 #---------------------------------------------------------------------
-
-# ideflate: netcdf zlib lossless compression (0-9): 0 no compression
-# nbits: netcdf lossy compression level (0-32): 0 lossless
-export ideflate=1
-export nbits=14
-export ishuffle=0
-# compression for RESTART files written by FMS
-export shuffle=1
-export deflate_level=1
-
-#---------------------------------------------------------------------
-# Disable the use of coupler.res; get model start time from model_configure
-export USE_COUPLER_RES="NO"
-
 # Write more variables to output
-export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table"
+export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
 
 # Write gfs restart files to rerun fcst from any break point
 export restart_interval=${restart_interval_gfs:-12}
@@ -261,6 +257,9 @@ else
   export io_layout="1,1"
 fi
 
+if (( OFFSET_START_HOUR != 0 )); then
+    export reforecast="YES"
+fi
 # Remember config.efcs will over-ride these values for ensemble forecasts
 # if these variables are re-defined there.
 # Otherwise, the ensemble forecast will inherit from config.fcst
diff --git a/parm/config/gefs/config.oceanice_products b/parm/config/gefs/config.oceanice_products
new file mode 100644
index 0000000000..3b8b064947
--- /dev/null
+++ b/parm/config/gefs/config.oceanice_products
@@ -0,0 +1,15 @@
+#! /usr/bin/env bash
+
+########## config.oceanice_products ##########
+
+echo "BEGIN: config.oceanice_products"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" oceanice_products
+
+export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products_gefs.yaml"
+
+# No. of forecast hours to process in a single job
+export NFHRS_PER_GROUP=3
+
+echo "END: config.oceanice_products"
diff --git a/parm/config/gefs/config.prep_emissions b/parm/config/gefs/config.prep_emissions
new file mode 100644
index 0000000000..fa411c27ad
--- /dev/null
+++ b/parm/config/gefs/config.prep_emissions
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.prep_emissions ##########
+# aerosol emissions preprocessing specific
+
+echo "BEGIN: config.prep_emissions"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" prep_emissions
+
+echo "END: config.prep_emissions"
diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index a50418d23a..81d2a20635 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -4,17 +4,9 @@
 # Set resource information for job tasks
 # e.g. walltime, node, cores per node, memory etc.
 
-if [[ $# -ne 1 ]]; then
+if (( $# != 1 )); then
 
     echo "Must specify an input task argument to set resource variables!"
-    echo "argument can be any one of the following:"
-    echo "stage_ic aerosol_init"
-    echo "sfcanl analcalc analdiag fcst fit2obs metp arch echgres"
-    echo "ecen esfc efcs epos earc"
-    echo "init_chem mom6ic ocnpost"
-    echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
-    echo "wavegempak waveawipsbulls waveawipsgridded"
-    echo "postsnd awips gempak"
     exit 1
 
 fi
@@ -23,455 +15,291 @@ step=$1
 
 echo "BEGIN: config.resources"
 
-if [[ "${machine}" = "WCOSS2" ]]; then
-   export npe_node_max=128
-elif [[ "${machine}" = "JET" ]]; then
-   if [[ ${PARTITION_BATCH} = "xjet" ]]; then
-     export npe_node_max=24
-   elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then
-     export npe_node_max=16
-   elif [[ ${PARTITION_BATCH} = "kjet" ]]; then
-     export npe_node_max=40
-   fi
-elif [[ ${machine} = "HERA" ]]; then
-   export npe_node_max=40
-elif [[ ${machine} = "S4" ]]; then
-   if [[ ${PARTITION_BATCH} = "s4" ]]; then
-      export npe_node_max=32
-   elif [[ ${PARTITION_BATCH} = "ivy" ]]; then
-      export npe_node_max=20
-   fi
-elif [[ ${machine} = "ORION" ]]; then
-   export npe_node_max=40
-elif [[ ${machine} = "HERCULES" ]]; then
-   export npe_node_max=40
-fi
-
-if [[ ${step} = "prep" ]]; then
-    export wtime_prep='00:30:00'
-    export npe_prep=4
-    export npe_node_prep=2
-    export nth_prep=1
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export is_exclusive=True
-    else
-      export memory_prep="40G"
-    fi
-
-elif [[ "${step}" = "aerosol_init" ]]; then
-    export wtime_aerosol_init="00:05:00"
-    export npe_aerosol_init=1
-    export nth_aerosol_init=1
-    npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc)
-    export npe_node_aerosol_init
-    export NTASKS=${npe_aerosol_init}
-    export memory_aerosol_init="6G"
-
-elif [[ ${step} = "waveinit" ]]; then
-
-    export wtime_waveinit="00:10:00"
-    export npe_waveinit=12
-    export nth_waveinit=1
-    npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc)
-    export npe_node_waveinit
-    export NTASKS=${npe_waveinit}
-    export memory_waveinit="2GB"
-
-elif [[ ${step} = "waveprep" ]]; then
-
-    export wtime_waveprep="00:10:00"
-    export npe_waveprep=5
-    export npe_waveprep_gfs=65
-    export nth_waveprep=1
-    export nth_waveprep_gfs=1
-    npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc)
-    export npe_node_waveprep
-    npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc)
-    export npe_node_waveprep_gfs
-    export NTASKS=${npe_waveprep}
-    export NTASKS_gfs=${npe_waveprep_gfs}
-    export memory_waveprep="100GB"
-    export memory_waveprep_gfs="150GB"
-
-elif [[ ${step} = "wavepostsbs" ]]; then
-
-    export wtime_wavepostsbs="00:20:00"
-    export wtime_wavepostsbs_gfs="03:00:00"
-    export npe_wavepostsbs=8
-    export nth_wavepostsbs=1
-    npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc)
-    export npe_node_wavepostsbs
-    export NTASKS=${npe_wavepostsbs}
-    export memory_wavepostsbs="10GB"
-    export memory_wavepostsbs_gfs="10GB"
-
-elif [[ ${step} = "wavepostbndpnt" ]]; then
-
-    export wtime_wavepostbndpnt="01:00:00"
-    export npe_wavepostbndpnt=240
-    export nth_wavepostbndpnt=1
-    npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc)
-    export npe_node_wavepostbndpnt
-    export NTASKS=${npe_wavepostbndpnt}
-    export is_exclusive=True
-
-elif [[ ${step} = "wavepostbndpntbll" ]]; then
-
-    export wtime_wavepostbndpntbll="01:00:00"
-    export npe_wavepostbndpntbll=448
-    export nth_wavepostbndpntbll=1
-    npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc)
-    export npe_node_wavepostbndpntbll
-    export NTASKS=${npe_wavepostbndpntbll}
-    export is_exclusive=True
-
-elif [[ ${step} = "wavepostpnt" ]]; then
-
-    export wtime_wavepostpnt="01:30:00"
-    export npe_wavepostpnt=200
-    export nth_wavepostpnt=1
-    npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc)
-    export npe_node_wavepostpnt
-    export NTASKS=${npe_wavepostpnt}
-    export is_exclusive=True
-
-elif [[ ${step} = "wavegempak" ]]; then
-
-    export wtime_wavegempak="02:00:00"
-    export npe_wavegempak=1
-    export nth_wavegempak=1
-    npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc)
-    export npe_node_wavegempak
-    export NTASKS=${npe_wavegempak}
-    export memory_wavegempak="1GB"
-
-elif [[ ${step} = "waveawipsbulls" ]]; then
+case ${machine} in
+  "WCOSS2")   max_tasks_per_node=128;;
+  "HERA")     max_tasks_per_node=40;;
+  "ORION")    max_tasks_per_node=40;;
+  "HERCULES") max_tasks_per_node=80;;
+  "JET")
+    case ${PARTITION_BATCH} in
+      "xjet")          max_tasks_per_node=24;;
+      "vjet" | "sjet") max_tasks_per_node=16;;
+      "kjet")          max_tasks_per_node=40;;
+      *)
+        echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}"
+        exit 3
+    esac
+    ;;
+  "S4")
+    case ${PARTITION_BATCH} in
+      "s4")  max_tasks_per_node=32;;
+      "ivy") max_tasks_per_node=20;;
+      *)
+        echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}"
+        exit 3
+    esac
+    ;;
+  "AWSPW")
+    export PARTITION_BATCH="compute"
+    max_tasks_per_node=40
+    ;;
+  *)
+    echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}"
+    exit 2
+    ;;
+esac
+export max_tasks_per_node
 
-    export wtime_waveawipsbulls="00:20:00"
-    export npe_waveawipsbulls=1
-    export nth_waveawipsbulls=1
-    npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc)
-    export npe_node_waveawipsbulls
-    export NTASKS=${npe_waveawipsbulls}
-    export is_exclusive=True
+case ${step} in
 
-elif [[ ${step} = "waveawipsgridded" ]]; then
-
-    export wtime_waveawipsgridded="02:00:00"
-    export npe_waveawipsgridded=1
-    export nth_waveawipsgridded=1
-    npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc)
-    export npe_node_waveawipsgridded
-    export NTASKS=${npe_waveawipsgridded}
-    export memory_waveawipsgridded_gfs="1GB"
-
-elif [[ ${step} = "analcalc" ]]; then
-
-    export wtime_analcalc="00:10:00"
-    export npe_analcalc=127
-    export ntasks="${npe_analcalc}"
-    export nth_analcalc=1
-    export nth_echgres=4
-    export nth_echgres_gfs=12
-    npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc)
-    export npe_node_analcalc
+  "stage_ic")
+    export walltime="00:15:00"
+    export ntasks=1
+    export tasks_per_node=1
+    export threads_per_task=1
     export is_exclusive=True
-
-elif [[ ${step} = "analdiag" ]]; then
-
-    export wtime_analdiag="00:15:00"
-    export npe_analdiag=96             # Should be at least twice npe_ediag
-    export nth_analdiag=1
-    npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc)
-    export npe_node_analdiag
-    export memory_analdiag="48GB"
-
-elif [[ ${step} = "sfcanl" ]]; then
-
-    export wtime_sfcanl="00:10:00"
-    export npe_sfcanl=6
-    export nth_sfcanl=1
-    npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc)
-    export npe_node_sfcanl
+    ;;
+
+  "waveinit")
+    export walltime="00:10:00"
+    export ntasks=12
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export NTASKS=${ntasks}
+    export memory="2GB"
+    ;;
+
+  "prep_emissions")
+    export walltime="00:10:00"
+    export ntasks=1
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export memory="1GB"
+    ;;
+
+  "fcst" | "efcs")
     export is_exclusive=True
 
-elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then
+    export layout_x=${layout_x_gfs}
+    export layout_y=${layout_y_gfs}
+    export WRITE_GROUP=${WRITE_GROUP_GFS}
+    export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS}
+    ntasks_fv3=${ntasks_fv3_gfs}
+    ntasks_quilt=${ntasks_quilt_gfs}
+    nthreads_fv3=${nthreads_fv3_gfs}
+    nthreads_ufs=${nthreads_ufs_gfs}
+
+    # Determine if using ESMF-managed threading or traditional threading
+    # If using traditional threading, set them to 1
+    if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+      export UFS_THREADS=1
+    else  # traditional threading
+      export UFS_THREADS=${nthreads_ufs:-1}
+      nthreads_fv3=1
+      nthreads_mediator=1
+      [[ "${DO_WAVE}" == "YES" ]] && nthreads_ww3=1
+      [[ "${DO_OCN}" == "YES" ]] && nthreads_mom6=1
+      [[ "${DO_ICE}" == "YES" ]] && nthreads_cice6=1
+    fi
 
-    export is_exclusive=True
+    # PETS for the atmosphere dycore
+    (( FV3PETS = ntasks_fv3 * nthreads_fv3 ))
+    echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})"
 
-    if [[ "${step}" = "fcst" ]]; then
-        _CDUMP_LIST=${CDUMP:-"gdas gfs"}
-    elif [[ "${step}" = "efcs" ]]; then
-        _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"}
+    # PETS for quilting
+    if [[ "${QUILTING:-}" == ".true." ]]; then
+      (( QUILTPETS = ntasks_quilt * nthreads_fv3 ))
+      (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD ))
+      export WRTTASK_PER_GROUP
+    else
+      QUILTPETS=0
+    fi
+    echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})"
+
+    # Total PETS for the atmosphere component
+    ATMTHREADS=${nthreads_fv3}
+    (( ATMPETS = FV3PETS + QUILTPETS ))
+    export ATMPETS ATMTHREADS
+    echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})"
+
+    # Total PETS for the coupled model (starting w/ the atmosphere)
+    NTASKS_TOT=${ATMPETS}
+
+    # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks.
+    # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance.
+    # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit
+    # TODO: Update reference when moved to ufs-weather-model RTD
+    MEDTHREADS=${nthreads_mediator:-1}
+    MEDPETS=${MEDPETS:-${FV3PETS}}
+    (( "${MEDPETS}" > 300 )) && MEDPETS=300
+    export MEDPETS MEDTHREADS
+    echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})"
+
+    CHMPETS=0; CHMTHREADS=0
+    if [[ "${DO_AERO}" == "YES" ]]; then
+      # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks).
+      (( CHMTHREADS = ATMTHREADS ))
+      (( CHMPETS = FV3PETS ))
+      # Do not add to NTASKS_TOT
+      echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})"
+    fi
+    export CHMPETS CHMTHREADS
+
+    WAVPETS=0; WAVTHREADS=0
+    if [[ "${DO_WAVE}" == "YES" ]]; then
+      (( WAVPETS = ntasks_ww3 * nthreads_ww3 ))
+      (( WAVTHREADS = nthreads_ww3 ))
+      echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + WAVPETS ))
     fi
+    export WAVPETS WAVTHREADS
+
+    OCNPETS=0; OCNTHREADS=0
+    if [[ "${DO_OCN}" == "YES" ]]; then
+      (( OCNPETS = ntasks_mom6 * nthreads_mom6 ))
+      (( OCNTHREADS = nthreads_mom6 ))
+      echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + OCNPETS ))
+    fi
+    export OCNPETS OCNTHREADS
+
+    ICEPETS=0; ICETHREADS=0
+    if [[ "${DO_ICE}" == "YES" ]]; then
+      (( ICEPETS = ntasks_cice6 * nthreads_cice6 ))
+      (( ICETHREADS = nthreads_cice6 ))
+      echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + ICEPETS ))
+    fi
+    export ICEPETS ICETHREADS
 
-    # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined
-    for _CDUMP in ${_CDUMP_LIST}; do
-        if [[ "${_CDUMP}" =~ "gfs" ]]; then
-          export layout_x=${layout_x_gfs}
-          export layout_y=${layout_y_gfs}
-          export WRITE_GROUP=${WRITE_GROUP_GFS}
-          export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS}
-          ntasks_fv3=${ntasks_fv3_gfs}
-          ntasks_quilt=${ntasks_quilt_gfs}
-          nthreads_fv3=${nthreads_fv3_gfs}
-        fi
-
-        # PETS for the atmosphere dycore
-        (( FV3PETS = ntasks_fv3 * nthreads_fv3 ))
-        echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})"
-
-        # PETS for quilting
-        if [[ "${QUILTING:-}" = ".true." ]]; then
-          (( QUILTPETS = ntasks_quilt * nthreads_fv3 ))
-          (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD ))
-          export WRTTASK_PER_GROUP
-        else
-          QUILTPETS=0
-        fi
-        echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})"
-
-        # Total PETS for the atmosphere component
-        ATMTHREADS=${nthreads_fv3}
-        (( ATMPETS = FV3PETS + QUILTPETS ))
-        export ATMPETS ATMTHREADS
-        echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})"
-
-        # Total PETS for the coupled model (starting w/ the atmosphere)
-        NTASKS_TOT=${ATMPETS}
-
-        # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks.
-        # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance.
-        # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit
-        # TODO: Update reference when moved to ufs-weather-model RTD
-        MEDTHREADS=${nthreads_mediator:-1}
-        MEDPETS=${MEDPETS:-ATMPETS}
-        [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300
-        export MEDPETS MEDTHREADS
-        echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})"
-
-        CHMPETS=0; CHMTHREADS=0
-        if [[ "${DO_AERO}" = "YES" ]]; then
-          # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks).
-          (( CHMTHREADS = ATMTHREADS ))
-          (( CHMPETS = FV3PETS ))
-          # Do not add to NTASKS_TOT
-          echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})"
-        fi
-        export CHMPETS CHMTHREADS
-
-        WAVPETS=0; WAVTHREADS=0
-        if [[ "${DO_WAVE}" = "YES" ]]; then
-          (( WAVPETS = ntasks_ww3 * nthreads_ww3 ))
-          (( WAVTHREADS = nthreads_ww3 ))
-          echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + WAVPETS ))
-        fi
-        export WAVPETS WAVTHREADS
-
-        OCNPETS=0; OCNTHREADS=0
-        if [[ "${DO_OCN}" = "YES" ]]; then
-          (( OCNPETS = ntasks_mom6 * nthreads_mom6 ))
-          (( OCNTHREADS = nthreads_mom6 ))
-          echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + OCNPETS ))
-        fi
-        export OCNPETS OCNTHREADS
-
-        ICEPETS=0; ICETHREADS=0
-        if [[ "${DO_ICE}" = "YES" ]]; then
-          (( ICEPETS = ntasks_cice6 * nthreads_cice6 ))
-          (( ICETHREADS = nthreads_cice6 ))
-          echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + ICEPETS ))
-        fi
-        export ICEPETS ICETHREADS
-
-        echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}"
-
-        if [[ "${_CDUMP}" =~ "gfs" ]]; then
-          declare -x "npe_${step}_gfs"="${NTASKS_TOT}"
-          declare -x "nth_${step}_gfs"=1  # ESMF handles threading for the UFS-weather-model
-          declare -x "npe_node_${step}_gfs"="${npe_node_max}"
-        else
-          declare -x "npe_${step}"="${NTASKS_TOT}"
-          declare -x "nth_${step}"=1  # ESMF handles threading for the UFS-weather-model
-          declare -x "npe_node_${step}"="${npe_node_max}"
-        fi
-
-    done
+    echo "Total PETS = ${NTASKS_TOT}"
+
+    declare -x "ntasks"="${NTASKS_TOT}"
+    declare -x "threads_per_task"="${UFS_THREADS}"
+    declare -x "tasks_per_node"="${max_tasks_per_node}"
 
     case "${CASE}" in
       "C48" | "C96" | "C192")
-        declare -x "wtime_${step}"="03:00:00"
-        declare -x "wtime_${step}_gfs"="03:00:00"
+        declare -x "walltime"="03:00:00"
         ;;
       "C384" | "C768" | "C1152")
-        declare -x "wtime_${step}"="06:00:00"
-        declare -x "wtime_${step}_gfs"="06:00:00"
+        declare -x "walltime"="06:00:00"
         ;;
       *)
-        echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}"
-        exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
         ;;
     esac
 
-    unset _CDUMP _CDUMP_LIST
     unset NTASKS_TOT
+    ;;
 
-elif [[ ${step} = "ocnpost" ]]; then
-
-    export wtime_ocnpost="00:30:00"
-    export npe_ocnpost=1
-    export npe_node_ocnpost=1
-    export nth_ocnpost=1
-    export memory_ocnpost="96G"
-    if [[ ${machine} == "JET" ]]; then
-       # JET only has 88GB of requestable memory per node
-       # so a second node is required to meet the requiremtn
-       npe_ocnpost=2
-    fi
-
-elif [[ "${step}" = "fit2obs" ]]; then
-
-    export wtime_fit2obs="00:20:00"
-    export npe_fit2obs=3
-    export nth_fit2obs=1
-    export npe_node_fit2obs=1
-    export memory_fit2obs="20G"
-    if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi
-
-elif [[ "${step}" = "metp" ]]; then
-
-    export nth_metp=1
-    export wtime_metp="03:00:00"
-    export npe_metp=4
-    export npe_node_metp=4
-    export wtime_metp_gfs="06:00:00"
-    export npe_metp_gfs=4
-    export npe_node_metp_gfs=4
+  "atmos_products")
+    export walltime="00:15:00"
+    export ntasks=24
+    export threads_per_task=1
+    export tasks_per_node="${ntasks}"
     export is_exclusive=True
+    ;;
 
-elif [[ ${step} = "echgres" ]]; then
-
-    export wtime_echgres="00:10:00"
-    export npe_echgres=3
-    export nth_echgres=${npe_node_max}
-    export npe_node_echgres=1
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export memory_echgres="200GB"
-    fi
-
-elif [[ ${step} = "init_chem" ]]; then
-
-    export wtime_init_chem="00:30:00"
-    export npe_init_chem=1
-    export npe_node_init_chem=1
+  "atmos_ensstat")
+    export walltime="00:30:00"
+    export ntasks=6
+    export threads_per_task=1
+    export tasks_per_node="${ntasks}"
     export is_exclusive=True
-
-elif [[ ${step} = "mom6ic" ]]; then
-
-    export wtime_mom6ic="00:30:00"
-    export npe_mom6ic=24
-    export npe_node_mom6ic=24
+    ;;
+
+  "oceanice_products")
+    export walltime="00:15:00"
+    export ntasks=1
+    export tasks_per_node=1
+    export threads_per_task=1
+    export memory="96GB"
+    ;;
+
+  "wavepostsbs")
+    export walltime="03:00:00"
+    export ntasks=1
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export NTASKS=${ntasks}
+    export memory="10GB"
+    ;;
+
+  # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes.
+  # Limit the number of tasks/node to 40.
+  "wavepostbndpnt")
+    export walltime="03:00:00"
+    export ntasks=240
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ ${step} = "arch" || ${step} = "earc" ]]; then
-
-    eval "export wtime_${step}='06:00:00'"
-    eval "export npe_${step}=1"
-    eval "export npe_node_${step}=1"
-    eval "export nth_${step}=1"
-    eval "export memory_${step}=4096M"
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      eval "export memory_${step}=50GB"
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        export tasks_per_node=40
+        export is_exclusive=False
     fi
-
-elif [[ ${step} = "stage_ic" ]]; then
-
-    export wtime_stage_ic="00:15:00"
-    export npe_stage_ic=1
-    export npe_node_stage_ic=1
-    export nth_stage_ic=1
-    export is_exclusive=True
-
-elif [[ ${step} = "ecen" ]]; then
-
-    export wtime_ecen="00:10:00"
-    export npe_ecen=80
-    export nth_ecen=4
-    if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi
-    if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi
-    npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc)
-    export npe_node_ecen
-    export nth_cycle=${nth_ecen}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
+    export NTASKS=${ntasks}
+    ;;
+
+  "wavepostbndpntbll")
+    export walltime="01:00:00"
+    export ntasks=448
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ ${step} = "esfc" ]]; then
-
-    export wtime_esfc="00:06:00"
-    export npe_esfc=80
-    export nth_esfc=1
-    npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc)
-    export npe_node_esfc
-    export nth_cycle=${nth_esfc}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
-    export memory_esfc="80GB"
-
-elif [[ ${step} = "epos" ]]; then
-
-    export wtime_epos="00:15:00"
-    export npe_epos=80
-    export nth_epos=4
-    if [[ "${machine}" == "HERA" ]]; then
-      export nth_epos=6
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        export tasks_per_node=40
+        export is_exclusive=False
     fi
-    npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc)
-    export npe_node_epos
+    export NTASKS=${ntasks}
+    ;;
+
+  "wavepostpnt")
+    export walltime="04:00:00"
+    export ntasks=200
+    export threads_per_task=1
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ ${step} = "postsnd" ]]; then
-
-    export wtime_postsnd="02:00:00"
-    export npe_postsnd=40
-    export nth_postsnd=8
-    export npe_node_postsnd=10
-    export npe_postsndcfp=9
-    export npe_node_postsndcfp=1
-    postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc)
-    if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then
-        npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc)
-        export npe_node_postsnd
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        export tasks_per_node=40
+        export is_exclusive=False
     fi
-    export is_exclusive=True
-
-elif [[ ${step} = "awips" ]]; then
-
-    export wtime_awips="03:30:00"
-    export npe_awips=1
-    export npe_node_awips=1
-    export nth_awips=1
-    export memory_awips="3GB"
-
-elif [[ ${step} = "gempak" ]]; then
-
-    export wtime_gempak="03:00:00"
-    export npe_gempak=2
-    export npe_gempak_gfs=28
-    export npe_node_gempak=2
-    export npe_node_gempak_gfs=28
-    export nth_gempak=1
-    export memory_gempak="4GB"
-    export memory_gempak_gfs="2GB"
-
-else
+    export NTASKS=${ntasks}
+    ;;
+
+  "extractvars")
+    export walltime_gefs="00:30:00"
+    export ntasks_gefs=1
+    export threads_per_task_gefs=1
+    export tasks_per_node_gefs="${ntasks}"
+    export walltime_gfs="${walltime_gefs}"
+    export ntasks_gfs="${ntasks_gefs}"
+    export threads_per_tasks_gfs="${threads_per_task_gefs}"
+    export tasks_per_node_gfs="${tasks_per_node_gefs}"
+    export is_exclusive=False
+    ;;
+
+  *)
+    echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}"
+    exit 1
+    ;;
 
-    echo "Invalid step = ${step}, ABORT!"
-    exit 2
+esac
 
+# Get machine-specific resources, overriding/extending the above assignments
+if [[ -f "${EXPDIR}/config.resources.${machine}" ]]; then
+   source "${EXPDIR}/config.resources.${machine}"
 fi
 
+# Check for RUN-specific variables and export them
+for resource_var in threads_per_task ntasks tasks_per_node NTASKS memory walltime; do
+   run_resource_var="${resource_var}_${RUN}"
+   if [[ -n "${!run_resource_var+0}" ]]; then
+      declare -x "${resource_var}"="${!run_resource_var}"
+   elif [[ -n "${!resource_var+0}" ]]; then
+      export "${resource_var?}"
+   fi
+done
+
 echo "END: config.resources"
diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic
index e2bb0af2b8..f0b5dfa609 100644
--- a/parm/config/gefs/config.stage_ic
+++ b/parm/config/gefs/config.stage_ic
@@ -8,11 +8,26 @@ echo "BEGIN: config.stage_ic"
 source "${EXPDIR}/config.resources" stage_ic
 
 case "${CASE}" in
+  "C384")
+    export CPL_ATMIC=""
+    export CPL_ICEIC=""
+    export CPL_OCNIC=""
+    export CPL_WAVIC=""
+    export CPL_MEDIC=""
+    ;;
+  "C96")
+    export CPL_ATMIC=""
+    export CPL_ICEIC=""
+    export CPL_OCNIC=""
+    export CPL_WAVIC=""
+    export CPL_MEDIC=""
+    ;;
   "C48")
     export CPL_ATMIC="gefs_test"
     export CPL_ICEIC="gefs_test"
     export CPL_OCNIC="gefs_test"
     export CPL_WAVIC="gefs_test"
+    export CPL_MEDIC="gefs_test"
     ;;
   *)
     echo "FATAL ERROR Unrecognized resolution: ${CASE}"
diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs
index 68b364529e..584e4769a8 100644
--- a/parm/config/gefs/config.ufs
+++ b/parm/config/gefs/config.ufs
@@ -15,7 +15,7 @@ if (( $# <= 1 )); then
     echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072"
     echo "--mom6 500|100|025"
     echo "--cice6 500|100|025"
-    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025"
+    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|glo_100"
     echo "--gocart"
 
     exit 1
@@ -68,51 +68,6 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${
   skip_mediator=false
 fi
 
-case "${machine}" in
-  "WCOSS2")
-    npe_node_max=128
-    ;;
-  "HERA" | "ORION" | "HERCULES" )
-    npe_node_max=40
-    ;;
-  "JET")
-    case "${PARTITION_BATCH}" in
-      "xjet")
-        npe_node_max=24
-        ;;
-      "vjet" | "sjet")
-        npe_node_max=16
-        ;;
-      "kjet")
-        npe_node_max=40
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  "S4")
-    case "${PARTITION_BATCH}" in
-      "s4")
-        npe_node_max=32
-        ;;
-      "ivy")
-        npe_node_max=20
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  *)
-    echo "FATAL ERROR: Unrecognized machine ${machine}"
-    exit 14
-    ;;
-esac
-export npe_node_max
-
 # (Standard) Model resolution dependent variables
 case "${fv3_res}" in
     "C48")
@@ -123,8 +78,16 @@ case "${fv3_res}" in
         export layout_y_gfs=1
         export nthreads_fv3=1
         export nthreads_fv3_gfs=1
-        export cdmbgwd="0.071,2.1,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+        export nthreads_ufs=1
+        export nthreads_ufs_gfs=1
+	      export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction
+	      export cdmbgwd="0.071,2.1,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="40.0,1.77,1.0,1.0"   # settings for GSL drag suite
+        export k_split=1
+        export n_split=4
+        export tau=10.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=3600
         export knob_ugwp_tauamp=6.0e-3      # setting for UGWPv1 non-stationary GWD
         export WRITE_GROUP=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
@@ -139,25 +102,40 @@ case "${fv3_res}" in
         export layout_y_gfs=2
         export nthreads_fv3=1
         export nthreads_fv3_gfs=1
-        export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+        export nthreads_ufs=1
+        export nthreads_ufs_gfs=1
+	      export xr_cnvcld=".false."  # Do not pass conv. clouds to Xu-Randall cloud fraction
+	      export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="20.0,2.5,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=3.0e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=1
+        export n_split=4
+        export tau=8.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=1800
         export WRITE_GROUP=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
         export WRITE_GROUP_GFS=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
         ;;
     "C192")
-        export DELTIM=450
+        export DELTIM=600
         export layout_x=4
         export layout_y=6
         export layout_x_gfs=4
         export layout_y_gfs=6
         export nthreads_fv3=1
         export nthreads_fv3_gfs=2
+        export nthreads_ufs=1
+        export nthreads_ufs_gfs=2
         export cdmbgwd="0.23,1.5,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="10.0,3.5,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=1.5e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=2
+        export n_split=4
+        export tau=6.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=1800
         export WRITE_GROUP=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
         export WRITE_GROUP_GFS=2
@@ -171,9 +149,16 @@ case "${fv3_res}" in
         export layout_y_gfs=8
         export nthreads_fv3=1
         export nthreads_fv3_gfs=2
+        export nthreads_ufs=1
+        export nthreads_ufs_gfs=2
         export cdmbgwd="1.1,0.72,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="5.0,5.0,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.8e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=2
+        export n_split=4
+        export tau=4.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=900
         export WRITE_GROUP=2
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8
         export WRITE_GROUP_GFS=2
@@ -187,29 +172,43 @@ case "${fv3_res}" in
         export layout_y_gfs=16
         export nthreads_fv3=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.15,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="2.5,7.5,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.5e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=2
+        export n_split=4
+        export tau=3.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=450
         export WRITE_GROUP=2
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
         export WRITE_GROUP_GFS=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
         ;;
     "C1152")
-        export DELTIM=120
+        export DELTIM=150
         export layout_x=8
         export layout_y=16
         export layout_x_gfs=8
         export layout_y_gfs=16
         export nthreads_fv3=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.10,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="1.67,8.8,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.35e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=2
+        export n_split=6
+        export tau=2.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=450
         export WRITE_GROUP=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10  # TODO: refine these numbers when a case is available
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20  # TODO: refine these numbers when a case is available
         ;;
     "C3072")
         export DELTIM=90
@@ -219,9 +218,16 @@ case "${fv3_res}" in
         export layout_y_gfs=32
         export nthreads_fv3=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.05,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="0.625,14.1,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.13e-3      # setting for UGWPv1 non-stationary GWD
+        export k_split=4
+        export n_split=5
+        export tau=0.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=300
         export WRITE_GROUP=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
@@ -258,6 +264,10 @@ case ${fv3_res} in
     OUTPUT_FILETYPE_ATM="netcdf_parallel"
     OUTPUT_FILETYPE_SFC="netcdf_parallel"
     ;;
+  *)
+    echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}"
+    exit 15
+    ;;
 esac
 export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC
 
@@ -296,9 +306,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
       CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
       MOM6_RESTART_SETTING='r'
       MOM6_RIVER_RUNOFF='False'
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
+        MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_MISVAL="0.0"
+      fi
       eps_imesh="4.0e-1"
       MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc"
-      MOM6_DIAG_MISVAL="0.0"
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
       ;;
@@ -309,18 +323,20 @@ if [[ "${skip_mom6}" == "false" ]]; then
       NY_GLB=320
       DT_DYNAM_MOM6='1800'
       DT_THERM_MOM6='3600'
-      FRUNOFF=""
+      FRUNOFF="runoff.daitren.clim.1deg.nc"
       CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
-      MOM6_RESTART_SETTING='n'
+      MOM6_RESTART_SETTING='r'
       MOM6_RIVER_RUNOFF='False'
       eps_imesh="2.5e-1"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      TOPOEDITS="ufs.topo_edits_011818.nc"
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
+      MOM6_ALLOW_LANDMASK_CHANGES='True'
       ;;
     "050")
       ntasks_mom6=60
@@ -334,15 +350,15 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RESTART_SETTING='n'
       MOM6_RIVER_RUNOFF='True'
       eps_imesh="1.0e-1"
-      TOPOEDITS="ufs.topo_edits_011818.nc"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else            
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
-      MOM6_ALLOW_LANDMASK_CHANGES='True'
+      MOM6_ALLOW_LANDMASK_CHANGES='False'
+      TOPOEDITS=""
       ;;
     "025")
       ntasks_mom6=220
@@ -356,15 +372,15 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RIVER_RUNOFF='True'
       MOM6_RESTART_SETTING="r"
       eps_imesh="1.0e-1"
-      TOPOEDITS=""
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
-      MOM6_ALLOW_LANDMASK_CHANGES='True'
+      MOM6_ALLOW_LANDMASK_CHANGES='False'
+      TOPOEDITS=""
       ;;
     *)
       echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!"
@@ -378,10 +394,10 @@ if [[ "${skip_mom6}" == "false" ]]; then
   export DT_DYNAM_MOM6 DT_THERM_MOM6
   export FRUNOFF
   export CHLCLIM
+  export TOPOEDITS
   export MOM6_RIVER_RUNOFF
   export MOM6_RESTART_SETTING
   export eps_imesh
-  export TOPOEDITS
   export MOM6_DIAG_COORD_DEF_Z_FILE
   export MOM6_DIAG_MISVAL
   export MOM6_ALLOW_LANDMASK_CHANGES
@@ -397,6 +413,7 @@ if [[ "${skip_cice6}" == "false" ]]; then
     echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!"
     exit 1
   fi
+
   nthreads_cice6=${nthreads_mom6}  # CICE6 needs to run on same threads as MOM6
   case "${cice6_res}" in
     "500")
@@ -443,6 +460,10 @@ if [[ "${skip_ww3}" == "false" ]]; then
     "glo_025")
       ntasks_ww3=262
       ;;
+    "glo_100")
+      ntasks_ww3=20
+      nthreads_ww3=1
+      ;;
     "glo_200")
       ntasks_ww3=30
       nthreads_ww3=1
@@ -470,39 +491,45 @@ if [[ "${skip_gocart}" == "false" ]]; then
 fi
 
 # Set the name of the UFS (previously nems) configure template to use
+# Default ufs.configure templates for supported model configurations
+if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+  tmpl_suffix="_esmf"
+fi
 case "${model_list}" in
   atm)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN"
     ;;
   atm.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN"
     ;;
   atm.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN"
     ;;
   *)
-    echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}"
+    echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}"
     exit 16
     ;;
 esac
 
+# Allow user to override the default template
+export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}}
+unset model_list default_template
+
 if [[ ! -r "${ufs_configure_template}" ]]; then
   echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable."
   exit 17
 fi
 
-unset model_list
-
 echo "END: config.ufs"
diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave
index e04331e533..6a1529274a 100644
--- a/parm/config/gefs/config.wave
+++ b/parm/config/gefs/config.wave
@@ -6,26 +6,14 @@
 echo "BEGIN: config.wave"
 
 # Parameters that are common to all wave model steps
-
-# System and version
-export wave_sys_ver=v1.0.0
-
-export EXECwave="${HOMEgfs}/exec"
-export FIXwave="${HOMEgfs}/fix/wave"
-export PARMwave="${HOMEgfs}/parm/wave"
-export USHwave="${HOMEgfs}/ush"
-
 # This config contains variables/parameters used in the fcst step
 # Some others are also used across the workflow in wave component scripts
 
 # General runtime labels
-export CDUMPwave="${RUN}wave"
+export RUNwave="${RUN}wave"
 
 # In GFS/GDAS, restart files are generated/read from gdas runs
-export CDUMPRSTwave="gdas"
-
-# Grids for wave model
-export waveGRD=${waveGRD:-'mx025'}
+export RUNRSTwave="gdas"
 
 #grid dependent variable defaults
 export waveGRDN='1'                   # grid number for ww3_multi
@@ -68,6 +56,12 @@ case "${waveGRD}" in
     export wavepostGRD='glo_025'
     export waveuoutpGRD=${waveGRD}
     ;;
+  "glo_100")
+    #Global regular lat/lon 1deg deg grid
+    export waveinterpGRD=''
+    export wavepostGRD='glo_100'
+    export waveuoutpGRD=${waveGRD}
+    ;;
   "glo_200")
     #Global regular lat/lon 2deg deg grid
     export waveinterpGRD=''
@@ -91,14 +85,9 @@ export WAVEWND_DID=
 export WAVEWND_FID=
 
 # The start time reflects the number of hindcast hours prior to the cycle initial time
-export FHMAX_WAV=${FHMAX_GFS}
 export WAVHINDH=0
-export FHMIN_WAV=0
-export FHOUT_WAV=3
-export FHMAX_HF_WAV=120
-export FHOUT_HF_WAV=1
 export FHMAX_WAV_IBP=180
-if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi
+if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_WAV} ; fi
 
 # gridded and point output rate
 export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 ))
@@ -118,8 +107,8 @@ export RSTTYPE_WAV='T'          # generate second tier of restart files
 rst_dt_gfs=$(( restart_interval_gfs * 3600 ))  # TODO: This calculation needs to move to parsing_namelists_WW3.sh
 if [[ ${rst_dt_gfs} -gt 0 ]]; then
   export DT_1_RST_WAV=0 #${rst_dt_gfs:-0}   # time between restart files, set to DTRST=1 for a single restart file
-                                        #temporarily set to zero to avoid a clash in requested restart times 
-                                        #which makes the wave model crash a fix for the model issue will be coming
+                                            # temporarily set to zero to avoid a clash in requested restart times
+                                            # which makes the wave model crash a fix for the model issue will be coming
   export DT_2_RST_WAV=${rst_dt_gfs:-0}   # restart stride for checkpointing restart
 else
   rst_dt_fhmax=$(( FHMAX_WAV * 3600 ))
@@ -130,15 +119,15 @@ export RSTIOFF_WAV=0                   # first restart file offset relative to m
 #
 # Set runmember to default value if not GEFS cpl run
 #  (for a GFS coupled run, RUNMEN would be unset, this should default to -1)
-export RUNMEM=${RUNMEM:--1}
+export RUNMEM="-1"
 # Set wave model member tags if ensemble run
 # -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN
-if [[ ${RUNMEM} = -1 ]]; then
+if (( RUNMEM == -1 )); then
 # No suffix added to model ID in case of deterministic run
-  export waveMEMB=
+  export waveMEMB=""
 else
 # Extract member number only
-  export waveMEMB="${RUNMEM: -2}"
+  export waveMEMB="${RUNMEM}"
 fi
 
 # Determine if wave component needs input and/or is coupled
diff --git a/parm/config/gefs/config.wavepostbndpnt b/parm/config/gefs/config.wavepostbndpnt
new file mode 100644
index 0000000000..412c5fb42a
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpnt
+
+echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gefs/config.wavepostbndpntbll b/parm/config/gefs/config.wavepostbndpntbll
new file mode 100644
index 0000000000..6695ab0f84
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpntbll
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpntbll ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpntbll"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpntbll
+
+echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gefs/config.wavepostpnt b/parm/config/gefs/config.wavepostpnt
new file mode 100644
index 0000000000..e87237da82
--- /dev/null
+++ b/parm/config/gefs/config.wavepostpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostpnt
+
+echo "END: config.wavepostpnt"
diff --git a/parm/config/gefs/config.wavepostsbs b/parm/config/gefs/config.wavepostsbs
new file mode 100644
index 0000000000..82cec321da
--- /dev/null
+++ b/parm/config/gefs/config.wavepostsbs
@@ -0,0 +1,27 @@
+#! /usr/bin/env bash
+
+########## config.wavepostsbs ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostsbs"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostsbs
+
+# Subgrid info for grib2 encoding
+export WAV_SUBGRBSRC=""
+export WAV_SUBGRB=""
+
+# Options for point output (switch on/off boundary point output)
+export DOFLD_WAV='YES' # Field data
+export DOPNT_WAV='YES' # Station data
+export DOGRB_WAV='YES' # Create grib2 files
+if [[ -n "${waveinterpGRD}" ]]; then
+    export DOGRI_WAV='YES' # Create interpolated grids
+else
+    export DOGRI_WAV='NO' # Do not create interpolated grids
+fi
+export DOSPC_WAV='YES' # Spectral post
+export DOBLL_WAV='YES' # Bulletin post
+
+echo "END: config.wavepostsbs"
diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml
index ce5d8aeb3d..d2b486e7ca 100644
--- a/parm/config/gefs/yaml/defaults.yaml
+++ b/parm/config/gefs/yaml/defaults.yaml
@@ -2,5 +2,14 @@ base:
   DO_JEDIATMVAR: "NO"
   DO_JEDIATMENS: "NO"
   DO_JEDIOCNVAR: "NO"
-  DO_JEDILANDDA: "NO"
+  DO_JEDISNOWDA: "NO"
   DO_MERGENSST: "NO"
+  DO_BUFRSND: "NO"
+  DO_GEMPAK: "NO"
+  DO_AWIPS: "NO"
+  KEEPDATA: "NO"
+  DO_EXTRACTVARS: "NO"
+  FHMAX_GFS: 120
+  FHMAX_HF_GFS: 0
+  REPLAY_ICS: "NO"
+  USE_OCN_PERTURB_FILES: "false"
diff --git a/parm/config/gfs/config.aero b/parm/config/gfs/config.aero
index 32993554b4..2fae019574 100644
--- a/parm/config/gfs/config.aero
+++ b/parm/config/gfs/config.aero
@@ -20,6 +20,9 @@ case ${machine} in
   "WCOSS2")
     AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions"
     ;;
+  "GAEA")
+    AERO_INPUTS_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/gocart_emissions"
+    ;;
   "JET")
     AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions"
     ;;
@@ -30,12 +33,12 @@ case ${machine} in
 esac
 export AERO_INPUTS_DIR
 
-export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.aero"
-export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.aero"
+export AERO_DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table.aero"
+export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero"
 # Biomass burning emission dataset. Choose from: gbbepx, qfed, none
 export AERO_EMIS_FIRE="qfed"
 # Directory containing GOCART configuration files
-export AERO_CONFIG_DIR="${HOMEgfs}/parm/ufs/gocart"
+export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart"
 
 # Aerosol convective scavenging factors (list of string array elements)
 # Element syntax: '<tracer_name>:<factor>'. Use <tracer_name> = * to set default factor for all aerosol tracers
diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl
index 32ba43b7ba..24a5e92644 100644
--- a/parm/config/gfs/config.aeroanl
+++ b/parm/config/gfs/config.aeroanl
@@ -6,25 +6,26 @@
 echo "BEGIN: config.aeroanl"
 
 export CASE_ANL=${CASE}
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml
+export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2"
 export STATICB_TYPE='identity'
-export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml
-export FIXgdas=${HOMEgfs}/fix/gdas
-export BERROR_DATA_DIR=${FIXgdas}/bump/aero/${CASE_ANL}/
+export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2"
+export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/"
 export BERROR_DATE="20160630.000000"
 
+export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2"
+export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2"
+
 export io_layout_x=@IO_LAYOUT_X@
 export io_layout_y=@IO_LAYOUT_Y@
 
-export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x
+export JEDIEXE="${EXECgfs}/gdas.x"
 
 if [[ "${DOIAU}" == "YES" ]]; then
   export aero_bkg_times="3,6,9"
-  export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml
+  export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2"
 else
   export aero_bkg_times="6"
-  export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml
+  export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2"
 fi
 
 echo "END: config.aeroanl"
diff --git a/parm/config/gfs/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal
index 230ec5205a..34e5d8f116 100644
--- a/parm/config/gfs/config.aeroanlfinal
+++ b/parm/config/gfs/config.aeroanlfinal
@@ -6,5 +6,5 @@
 echo "BEGIN: config.aeroanlfinal"
 
 # Get task specific resources
-. $EXPDIR/config.resources aeroanlfinal
+source "${EXPDIR}/config.resources" aeroanlfinal
 echo "END: config.aeroanlfinal"
diff --git a/parm/config/gfs/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit
index 72175b8d0c..7036d3d27b 100644
--- a/parm/config/gfs/config.aeroanlinit
+++ b/parm/config/gfs/config.aeroanlinit
@@ -6,5 +6,5 @@
 echo "BEGIN: config.aeroanlinit"
 
 # Get task specific resources
-. $EXPDIR/config.resources aeroanlinit
+source "${EXPDIR}/config.resources" aeroanlinit
 echo "END: config.aeroanlinit"
diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun
index da13df2831..012e5b79f3 100644
--- a/parm/config/gfs/config.aeroanlrun
+++ b/parm/config/gfs/config.aeroanlrun
@@ -6,6 +6,6 @@
 echo "BEGIN: config.aeroanlrun"
 
 # Get task specific resources
-. $EXPDIR/config.resources aeroanlrun
+source "${EXPDIR}/config.resources" aeroanlrun
 
 echo "END: config.aeroanlrun"
diff --git a/parm/config/gfs/config.anal b/parm/config/gfs/config.anal
index e3a17f9c6a..27ff8742e4 100644
--- a/parm/config/gfs/config.anal
+++ b/parm/config/gfs/config.anal
@@ -12,20 +12,13 @@ if [[ ${DONST} = "YES" ]]; then
   . ${EXPDIR}/config.nsst
 fi
 
-if [[ "${CDUMP}" = "gfs" ]] ; then
+if [[ "${RUN}" == "gfs" ]] ; then
   export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero.
   export GENDIAG="NO"
   export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,'
   export DIAG_TARBALL="YES"
 fi
 
-export npe_gsi=${npe_anal}
-
-if [[ "${CDUMP}" == "gfs" ]] ; then
-  export npe_gsi=${npe_anal_gfs}
-  export nth_anal=${nth_anal_gfs}
-fi
-
 # Set parameters specific to L127
 if [[ ${LEVS} = "128" ]]; then
   export GRIDOPTS="nlayers(63)=1,nlayers(64)=1,"
@@ -45,51 +38,58 @@ export AMSR2BF=${AMSR2BF:-/dev/null}
 
 # Set default values for info files and observation error
 #  NOTE:  Remember to set PRVT in config.prep as OBERROR is set below
-export CONVINFO=${FIXgsi}/global_convinfo.txt
-export OZINFO=${FIXgsi}/global_ozinfo.txt
-export SATINFO=${FIXgsi}/global_satinfo.txt
-export OBERROR=${FIXgsi}/prepobs_errtable.global
-
+export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt
+export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt
+export SATINFO=${FIXgfs}/gsi/global_satinfo.txt
+export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global
+
+if [[ ${GSI_SOILANAL} = "YES" ]]; then
+    export hofx_2m_sfcfile=".true."
+    export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway
+    export paranc=".false." # temporary until sfc io coded for parance (PR being prepared by T. Gichamo)
+    export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt
+    export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt
+fi
 
 # Use experimental dumps in EMC GFS v16 parallels
 if [[ ${RUN_ENVIR} == "emc" ]]; then
   #   Set info files and prepobs.errtable.global for GFS v16 retrospective parallels
   if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900
-    export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900
+    export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900
   fi
 
   #   Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps
   if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706
-    export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706
+    export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706
   fi
 
   #   Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations
   if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718
-    export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718
+    export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718
   fi
 
   #   Assimilate COSMIC-2
   if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612
-    export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612
+    export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718
   fi
 
   #   Assimilate HDOB
   if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412
   fi
 
   #   Assimilate Metop-C GNSSRO
   if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612
   fi
 
   #   Assimilate DO-2 GeoOptics
   if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then
-    export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712
+    export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712
   fi
 
   #   NOTE:
@@ -98,38 +98,38 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then
   #   needed at this time.
   #   Assimilate COSMIC-2 GPS
   #   if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then
-  #   export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312
+  #   export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312
   #   fi
 
   #   Turn off assmilation of OMPS during period of bad data
   if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then
-    export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600
+    export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600
   fi
 
 
   #   Set satinfo for start of GFS v16 parallels
   if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
-    export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900
+    export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900
   fi
 
   #   Turn on assimilation of Metop-C AMSUA and MHS
   if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then
-    export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706
+    export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706
   fi
 
   #   Turn off assimilation of Metop-A MHS
   if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then
-    export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012
+    export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012
   fi
 
   #   Turn off assimilation of S-NPP CrIS
   if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then
-    export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118
+    export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118
   fi
 
   #   Turn off assimilation of MetOp-A IASI
   if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then
-    export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206
+    export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206
   fi
 
   #   NOTE:
@@ -139,8 +139,14 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then
   #
   #   Turn off assmilation of all Metop-A MHS
   #   if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then
-  #     export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312
+  #     export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312
   #   fi
 fi
 
+# Flag to turn on (.true.) or off (.false.) the infrared cloud and aerosol detection software
+# for AIRS, CrIS, and IASI.  Default is .false.
+export AIRS_CADS=".false."
+export CRIS_CADS=".false."
+export IASI_CADS=".false."
+
 echo "END: config.anal"
diff --git a/parm/config/gfs/config.analcalc b/parm/config/gfs/config.analcalc
index 9405114ecc..d9501503f0 100644
--- a/parm/config/gfs/config.analcalc
+++ b/parm/config/gfs/config.analcalc
@@ -6,10 +6,6 @@
 echo "BEGIN: config.analcalc"
 
 # Get task specific resources
-. $EXPDIR/config.resources analcalc
-
-if [[ "$CDUMP" == "gfs" ]]; then
-   export nth_echgres=$nth_echgres_gfs
-fi
+. ${EXPDIR}/config.resources analcalc
 
 echo "END: config.analcalc"
diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl
index abfbd80734..9a06088ecc 100644
--- a/parm/config/gfs/config.atmanl
+++ b/parm/config/gfs/config.atmanl
@@ -5,17 +5,34 @@
 
 echo "BEGIN: config.atmanl"
 
-export CASE_ANL=${CASE}
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml
-export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml
-export STATICB_TYPE="gsibec"
-export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml
+export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2"
+export JCB_ALGO_YAML=@JCB_ALGO_YAML@
+
+export STATICB_TYPE=@STATICB_TYPE@
+export LOCALIZATION_TYPE="bump"
 export INTERP_METHOD='barycentric'
 
+if [[ ${DOHYBVAR} = "YES" ]]; then
+    # shellcheck disable=SC2153
+    export CASE_ANL=${CASE_ENS}
+    export BERROR_YAML="atmosphere_background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}"
+else
+    export CASE_ANL=${CASE}
+    export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}"
+fi
+
+export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2"
+export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2"
+export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2"
+export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2"
+export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2"
+
+export layout_x_atmanl=@LAYOUT_X_ATMANL@
+export layout_y_atmanl=@LAYOUT_Y_ATMANL@
+
 export io_layout_x=@IO_LAYOUT_X@
 export io_layout_y=@IO_LAYOUT_Y@
 
-export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x
+export JEDIEXE=${EXECgfs}/gdas.x
 
 echo "END: config.atmanl"
diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc
new file mode 100644
index 0000000000..ab7efa3a60
--- /dev/null
+++ b/parm/config/gfs/config.atmanlfv3inc
@@ -0,0 +1,14 @@
+#! /usr/bin/env bash
+
+########## config.atmanlfv3inc ##########
+# Atm Var Analysis specific
+
+echo "BEGIN: config.atmanlfv3inc"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmanlfv3inc
+
+export JCB_ALGO=fv3jedi_fv3inc_variational
+export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x
+
+echo "END: config.atmanlfv3inc"
diff --git a/parm/config/gfs/config.atmanlinit b/parm/config/gfs/config.atmanlinit
index bc95ef4962..1aec88bcc2 100644
--- a/parm/config/gfs/config.atmanlinit
+++ b/parm/config/gfs/config.atmanlinit
@@ -7,4 +7,5 @@ echo "BEGIN: config.atmanlinit"
 
 # Get task specific resources
 . "${EXPDIR}/config.resources" atmanlinit
+
 echo "END: config.atmanlinit"
diff --git a/parm/config/gfs/config.atmanlrun b/parm/config/gfs/config.atmanlrun
deleted file mode 100644
index 68b7615718..0000000000
--- a/parm/config/gfs/config.atmanlrun
+++ /dev/null
@@ -1,11 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.atmanlrun ##########
-# Atm Var Analysis specific
-
-echo "BEGIN: config.atmanlrun"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" atmanlrun
-
-echo "END: config.atmanlrun"
diff --git a/parm/config/gfs/config.atmanlvar b/parm/config/gfs/config.atmanlvar
new file mode 100644
index 0000000000..cbc0334a08
--- /dev/null
+++ b/parm/config/gfs/config.atmanlvar
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.atmanlvar ##########
+# Atm Var Analysis specific
+
+echo "BEGIN: config.atmanlvar"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmanlvar
+
+echo "END: config.atmanlvar"
diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl
index 58fd7b6e22..ddd3d88659 100644
--- a/parm/config/gfs/config.atmensanl
+++ b/parm/config/gfs/config.atmensanl
@@ -5,14 +5,21 @@
 
 echo "BEGIN: config.atmensanl"
 
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml
-export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml
+export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2"
+export JCB_ALGO_YAML=@JCB_ALGO_YAML@
+
 export INTERP_METHOD='barycentric'
 
+export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2"
+export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2"
+export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2"
+
+export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@
+export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@
+
 export io_layout_x=@IO_LAYOUT_X@
 export io_layout_y=@IO_LAYOUT_Y@
 
-export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x
+export JEDIEXE=${EXECgfs}/gdas.x
 
 echo "END: config.atmensanl"
diff --git a/parm/config/gfs/config.atmensanlfv3inc b/parm/config/gfs/config.atmensanlfv3inc
new file mode 100644
index 0000000000..2dc73f3f6e
--- /dev/null
+++ b/parm/config/gfs/config.atmensanlfv3inc
@@ -0,0 +1,14 @@
+#! /usr/bin/env bash
+
+########## config.atmensanlfv3inc ##########
+# Atm Var Analysis specific
+
+echo "BEGIN: config.atmensanlfv3inc"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmensanlfv3inc
+
+export JCB_ALGO=fv3jedi_fv3inc_lgetkf
+export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x
+
+echo "END: config.atmensanlfv3inc"
diff --git a/parm/config/gfs/config.atmensanlinit b/parm/config/gfs/config.atmensanlinit
index 34429023bb..0eee2ffa82 100644
--- a/parm/config/gfs/config.atmensanlinit
+++ b/parm/config/gfs/config.atmensanlinit
@@ -7,4 +7,5 @@ echo "BEGIN: config.atmensanlinit"
 
 # Get task specific resources
 . "${EXPDIR}/config.resources" atmensanlinit
+
 echo "END: config.atmensanlinit"
diff --git a/parm/config/gfs/config.atmensanlletkf b/parm/config/gfs/config.atmensanlletkf
new file mode 100644
index 0000000000..1fdc57ae62
--- /dev/null
+++ b/parm/config/gfs/config.atmensanlletkf
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.atmensanlletkf ##########
+# Atm Ens Analysis specific
+
+echo "BEGIN: config.atmensanlletkf"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" atmensanlletkf
+
+echo "END: config.atmensanlletkf"
diff --git a/parm/config/gfs/config.atmensanlrun b/parm/config/gfs/config.atmensanlrun
deleted file mode 100644
index 01f211a17a..0000000000
--- a/parm/config/gfs/config.atmensanlrun
+++ /dev/null
@@ -1,11 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.atmensanlrun ##########
-# Atm Ens Analysis specific
-
-echo "BEGIN: config.atmensanlrun"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" atmensanlrun
-
-echo "END: config.atmensanlrun"
diff --git a/parm/config/gfs/config.atmos_products b/parm/config/gfs/config.atmos_products
index c3e861b281..e2827bc98f 100644
--- a/parm/config/gfs/config.atmos_products
+++ b/parm/config/gfs/config.atmos_products
@@ -12,13 +12,15 @@ echo "BEGIN: config.atmos_products"
 export NFHRS_PER_GROUP=3
 
 # Scripts used by this job
-export INTERP_ATMOS_MASTERSH="${HOMEgfs}/ush/interp_atmos_master.sh"
-export INTERP_ATMOS_SFLUXSH="${HOMEgfs}/ush/interp_atmos_sflux.sh"
+export INTERP_ATMOS_MASTERSH="${USHgfs}/interp_atmos_master.sh"
+export INTERP_ATMOS_SFLUXSH="${USHgfs}/interp_atmos_sflux.sh"
 
 if [[ "${RUN:-}" == "gdas" ]]; then
   export downset=1
   export FHOUT_PGBS=${FHOUT:-1}  # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg
   export FLXGF="NO"  # Create interpolated sflux.1p00 file
+  export WGNE="NO"  # WGNE products are created for first FHMAX_WGNE forecast hours
+  export FHMAX_WGNE=0
 elif [[ "${RUN:-}" == "gfs" ]]; then
   #JKHexport downset=2    ## create pgrb2b files
   export downset=1        ## JKH  
@@ -27,9 +29,9 @@ elif [[ "${RUN:-}" == "gfs" ]]; then
 fi
 
 # paramlist files for the different forecast hours and downsets
-export paramlista="${HOMEgfs}/parm/post/global_1x1_paramlist_g2"
-export paramlista_anl="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.anl"
-export paramlista_f000="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.f000"
-export paramlistb="${HOMEgfs}/parm/post/global_master-catchup_parmlist_g2"
+export paramlista="${PARMgfs}/product/gfs.fFFF.paramlist.a.txt"
+export paramlista_anl="${PARMgfs}/product/gfs.anl.paramlist.a.txt"
+export paramlista_f000="${PARMgfs}/product/gfs.f000.paramlist.a.txt"
+export paramlistb="${PARMgfs}/product/gfs.fFFF.paramlist.b.txt"
 
 echo "END: config.atmos_products"
diff --git a/parm/config/gfs/config.awips b/parm/config/gfs/config.awips
index 3b78d4bb4b..61f0dc5652 100644
--- a/parm/config/gfs/config.awips
+++ b/parm/config/gfs/config.awips
@@ -8,9 +8,6 @@ echo "BEGIN: config.awips"
 # Get task specific resources
 . "${EXPDIR}/config.resources" awips
 
-export AWIPS20KM1P0DEGSH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG"
-export AWIPSG2SH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_G2"
-
 # No. of concurrent awips jobs
 export NAWIPSGRP=42
 
diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base
new file mode 120000
index 0000000000..9a8441d520
--- /dev/null
+++ b/parm/config/gfs/config.base
@@ -0,0 +1 @@
+config.base.hera
\ No newline at end of file
diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn
deleted file mode 120000
index 6e9cfcec1a..0000000000
--- a/parm/config/gfs/config.base.emc.dyn
+++ /dev/null
@@ -1 +0,0 @@
-config.base.emc.dyn_hera
\ No newline at end of file
diff --git a/parm/config/gfs/config.base.emc.dyn_emc b/parm/config/gfs/config.base.emc.dyn_emc
deleted file mode 100644
index 88a9643ab8..0000000000
--- a/parm/config/gfs/config.base.emc.dyn_emc
+++ /dev/null
@@ -1,405 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.base ##########
-# Common to all steps
-
-echo "BEGIN: config.base"
-
-# Machine environment
-export machine="@MACHINE@"
-
-# EMC parallel or NCO production
-export RUN_ENVIR="emc"
-
-# Account, queue, etc.
-export ACCOUNT="@ACCOUNT@"
-export QUEUE="@QUEUE@"
-export QUEUE_SERVICE="@QUEUE_SERVICE@"
-export PARTITION_BATCH="@PARTITION_BATCH@"
-export PARTITION_SERVICE="@PARTITION_SERVICE@"
-
-# Project to use in mass store:
-export HPSS_PROJECT="@HPSS_PROJECT@"
-
-# Directories relative to installation areas:
-export HOMEgfs=@HOMEgfs@
-export PARMgfs="${HOMEgfs}/parm"
-export FIXgfs="${HOMEgfs}/fix"
-export USHgfs="${HOMEgfs}/ush"
-export UTILgfs="${HOMEgfs}/util"
-export EXECgfs="${HOMEgfs}/exec"
-export SCRgfs="${HOMEgfs}/scripts"
-
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
-
-########################################################################
-
-# GLOBAL static environment parameters
-export PACKAGEROOT="@PACKAGEROOT@"    # TODO: set via prod_envir in Ops
-export COMROOT="@COMROOT@"    # TODO: set via prod_envir in Ops
-export COMINsyn="@COMINsyn@"
-export DMPDIR="@DMPDIR@"
-export BASE_CPLIC="@BASE_CPLIC@"
-
-# USER specific paths
-export HOMEDIR="@HOMEDIR@"
-export STMP="@STMP@"
-export PTMP="@PTMP@"
-export NOSCRUB="@NOSCRUB@"
-
-# Base directories for various builds
-export BASE_GIT="@BASE_GIT@"
-
-# Toggle to turn on/off GFS downstream processing.
-export DO_GOES="@DO_GOES@" # GOES products
-export DO_BUFRSND="NO"     # BUFR sounding products
-export DO_GEMPAK="NO"      # GEMPAK products
-export DO_AWIPS="NO"       # AWIPS products
-export DO_NPOESS="NO"      # NPOESS products
-export DO_TRACKER="YES"    # Hurricane track verification
-export DO_GENESIS="YES"    # Cyclone genesis verification
-export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU)
-# The monitor is not yet supported on Hercules
-if [[ "${machine}" == "HERCULES" ]]; then
-   export DO_VERFOZN="NO"     # Ozone data assimilation monitoring
-   export DO_VERFRAD="NO"     # Radiance data assimilation monitoring
-   export DO_VMINMON="NO"     # GSI minimization monitoring
-else
-   export DO_VERFOZN="YES"    # Ozone data assimilation monitoring
-   export DO_VERFRAD="YES"    # Radiance data assimilation monitoring
-   export DO_VMINMON="YES"    # GSI minimization monitoring
-fi
-export DO_MOS="NO"         # GFS Model Output Statistics - Only supported on WCOSS2
-
-# NO for retrospective parallel; YES for real-time parallel
-#  arch.sh uses REALTIME for MOS.  Need to set REALTIME=YES
-#  if want MOS written to HPSS.   Should update arch.sh to
-#  use RUNMOS flag
-export REALTIME="YES"
-
-# Experiment mode (cycled or forecast-only)
-export MODE="@MODE@" # cycled/forecast-only
-
-####################################################
-# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE
-# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW
-# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT
-# CLEAR
-####################################################
-# Build paths relative to $HOMEgfs
-export FIXgsi="${HOMEgfs}/fix/gsi"
-export HOMEpost="${HOMEgfs}"
-export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}"
-
-# CONVENIENT utility scripts and other environment parameters
-export NCP="/bin/cp -p"
-export NMV="/bin/mv"
-export NLN="/bin/ln -sf"
-export VERBOSE="YES"
-export KEEPDATA="NO"
-export CHGRP_RSTPROD="@CHGRP_RSTPROD@"
-export CHGRP_CMD="@CHGRP_CMD@"
-export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump"
-export NCLEN="${HOMEgfs}/ush/getncdimlen"
-
-# Machine environment, jobs, and other utility scripts
-export BASE_ENV="${HOMEgfs}/env"
-export BASE_JOB="${HOMEgfs}/jobs/rocoto"
-
-# EXPERIMENT specific environment parameters
-export SDATE=@SDATE@
-export EDATE=@EDATE@
-export EXP_WARM_START="@EXP_WARM_START@"
-export assim_freq=6
-export PSLOT="@PSLOT@"
-export EXPDIR="@EXPDIR@/${PSLOT}"
-export ROTDIR="@COMROOT@/${PSLOT}"
-export ROTDIR_DUMP="YES"                #Note: A value of "NO" does not currently work
-export DUMP_SUFFIX=""
-if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then
-    export DUMP_SUFFIX="p"              # Use dumps from NCO GFS v15.3 parallel
-fi
-export DATAROOT="${STMP}/RUNDIRS/${PSLOT}"  # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}"  # TODO: Should be removed; use DATAROOT instead
-export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
-export ATARDIR="@ATARDIR@"
-
-# Commonly defined parameters in JJOBS
-export envir=${envir:-"prod"}
-export NET="gfs"  # NET is defined in the job-card (ecf)
-export RUN=${RUN:-${CDUMP:-"gfs"}}  # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy
-# TODO: determine where is RUN actually used in the workflow other than here
-# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be
-#       consistent w/ EE2?
-
-# Get all the COM path templates
-source "${EXPDIR}/config.com"
-
-export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
-export LOGSCRIPT=${LOGSCRIPT:-""}
-#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
-#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"}
-export REDOUT="1>"
-export REDERR="2>"
-
-export SENDECF=${SENDECF:-"NO"}
-export SENDSDM=${SENDSDM:-"NO"}
-export SENDDBN_NTC=${SENDDBN_NTC:-"NO"}
-export SENDDBN=${SENDDBN:-"NO"}
-export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn}
-
-# APP settings
-export APP=@APP@
-
-# Defaults:
-export DO_ATM="YES"
-export DO_COUPLED="NO"
-export DO_WAVE="NO"
-export DO_OCN="NO"
-export DO_ICE="NO"
-export DO_AERO="NO"
-export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
-export FRAC_GRID=".true."
-
-# Set operational resolution
-export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used?
-
-# Resolution specific parameters
-export LEVS=128
-export CASE="@CASECTL@"
-export CASE_ENS="@CASEENS@"
-export OCNRES="@OCNRES@"
-export ICERES="${OCNRES}"
-# These are the currently recommended grid-combinations
-case "${CASE}" in
-    "C48")
-        export waveGRD='glo_500'
-        ;;
-    "C96" | "C192")
-        export waveGRD='glo_200'
-        ;;
-    "C384")
-        export waveGRD='glo_025'
-        ;;
-    "C768" | "C1152")
-        export waveGRD='mx025'
-        ;;
-    *)
-        echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!"
-        exit 1
-        ;;
-esac
-
-case "${APP}" in
-  ATM)
-    ;;
-  ATMA)
-    export DO_AERO="YES"
-    ;;
-  ATMW)
-    export DO_COUPLED="YES"
-    export DO_WAVE="YES"
-    export WAVE_CDUMP="both"
-    ;;
-  NG-GODAS)
-    export DO_ATM="NO"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-    ;;
-  S2S*)
-    export DO_COUPLED="YES"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-
-    if [[ "${APP}" =~ A$ ]]; then
-        export DO_AERO="YES"
-    fi
-
-    if [[ "${APP}" =~ ^S2SW ]]; then
-        export DO_WAVE="YES"
-        export WAVE_CDUMP="both"
-    fi
-    ;;
-  *)
-    echo "Unrecognized APP: '${APP}'"
-    exit 1
-    ;;
-esac
-
-# Surface cycle update frequency
-if [[ "${CDUMP}" =~ "gdas" ]] ; then
-   export FHCYC=1
-   export FTSFS=10
-elif [[ "${CDUMP}" =~ "gfs" ]] ; then
-   export FHCYC=24
-fi
-
-# Output frequency of the forecast model (for cycling)
-export FHMIN=0
-export FHMAX=9
-export FHOUT=3           # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false)
-
-# Cycle to run EnKF  (set to BOTH for both gfs and gdas)
-export EUPD_CYC="gdas"
-
-# GFS cycle info
-export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
-
-# GFS output and frequency
-export FHMIN_GFS=0
-
-export FHMAX_GFS_00=120
-export FHMAX_GFS_06=120
-export FHMAX_GFS_12=120
-export FHMAX_GFS_18=120
-current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var}
-
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
-export FHMAX_HF_GFS=0
-export FHOUT_HF_GFS=1
-if (( gfs_cyc != 0 )); then
-    export STEP_GFS=$(( 24 / gfs_cyc ))
-else
-    export STEP_GFS="0"
-fi
-export ILPOST=1           # gempak output frequency up to F120
-
-# GFS restart interval in hours
-#JKHexport restart_interval_gfs=12
-export restart_interval_gfs=-1                        ## JKH
-# NOTE: Do not set this to zero.  Instead set it to $FHMAX_GFS
-# TODO: Remove this variable from config.base and reference from config.fcst
-# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used
-
-export QUILTING=".true."
-export OUTPUT_GRID="gaussian_grid"
-export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST
-export WRITE_NSFLIP=".true."
-
-# IAU related parameters
-export DOIAU="@DOIAU@"        # Enable 4DIAU for control with 3 increments
-export IAUFHRS="3,6,9"
-export IAU_FHROT=${IAUFHRS%%,*}
-export IAU_DELTHRS=6
-export IAU_OFFSET=6
-export DOIAU_ENKF=${DOIAU:-"YES"}   # Enable 4DIAU for EnKF ensemble
-export IAUFHRS_ENKF="3,6,9"
-export IAU_DELTHRS_ENKF=6
-
-# Use Jacobians in eupd and thereby remove need to run eomg
-export lobsdiag_forenkf=".true."
-
-# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA
-#   export DO_WAVE="NO"
-#   echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE"
-# fi
-
-# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL
-export imp_physics=8
-
-# Shared parameters
-# DA engine
-export DO_JEDIATMVAR="@DO_JEDIATMVAR@"
-export DO_JEDIATMENS="@DO_JEDIATMENS@"
-export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@"
-export DO_JEDILANDDA="@DO_JEDILANDDA@"
-export DO_MERGENSST="@DO_MERGENSST@"
-
-# Hybrid related
-export DOHYBVAR="@DOHYBVAR@"
-export NMEM_ENS=@NMEM_ENS@
-export NMEM_ENS_GFS=@NMEM_ENS@
-export SMOOTH_ENKF="NO"
-export l4densvar=".true."
-export lwrite4danl=".true."
-
-# EnKF output frequency
-if [[ ${DOHYBVAR} = "YES" ]]; then
-    export FHMIN_ENKF=3
-    export FHMAX_ENKF=9
-    export FHMAX_ENKF_GFS=120
-    export FHOUT_ENKF_GFS=3
-    if [[ ${l4densvar} = ".true." ]]; then
-        export FHOUT=1
-        export FHOUT_ENKF=1
-    else
-        export FHOUT_ENKF=3
-    fi
-fi
-
-# if 3DVAR and IAU
-if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES"  ]]; then
-    export IAUFHRS="6"
-    export IAU_FHROT="3"
-    export IAU_FILTER_INCREMENTS=".true."
-    export IAUFHRS_ENKF="6"
-fi
-
-# Check if cycle is cold starting, DOIAU off, or free-forecast mode
-if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then
-  export IAU_OFFSET=0
-  export IAU_FHROT=0
-  export IAUFHRS="6"
-fi
-
-if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi
-
-# turned on nsst in anal and/or fcst steps, and turn off rtgsst
-export DONST="YES"
-if [[ ${DONST} = "YES" ]]; then export FNTSFA="        "; fi
-
-# The switch to apply SST elevation correction or not
-export nst_anl=.true.
-
-# Make the nsstbufr file on the fly or use the GDA version
-export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@"
-
-# Make the aircraft prepbufr file on the fly or use the GDA version
-export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@"
-
-# Analysis increments to zero in CALCINCEXEC
-export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'"
-
-# Write analysis files for early cycle EnKF
-export DO_CALC_INCREMENT_ENKF_GFS="YES"
-
-# Stratospheric increments to zero
-export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'"
-export INCVARS_EFOLD="5"
-
-# Swith to generate netcdf or binary diagnostic files.  If not specified,
-# script default to binary diagnostic files.   Set diagnostic file
-# variables here since used in DA job
-export netcdf_diag=".true."
-export binary_diag=".false."
-
-# Verification options
-export DO_METP="NO"          # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack
-export DO_FIT2OBS="YES"      # Run fit to observations package
-
-# Archiving options
-export HPSSARCH="@HPSSARCH@"        # save data to HPSS archive
-export LOCALARCH="@LOCALARCH@"        # save data to local archive
-if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then
-   echo "Both HPSS and local archiving selected.  Please choose one or the other."
-   exit 2
-fi
-export ARCH_CYC=00           # Archive data at this cycle for warm_start capability
-export ARCH_WARMICFREQ=4     # Archive frequency in days for warm_start capability
-export ARCH_FCSTICFREQ=1     # Archive frequency in days for gdas and gfs forecast-only capability
-
-#--online archive of nemsio files for fit2obs verification
-export FITSARC="YES"
-export FHMAX_FITS=132
-[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
-
-echo "END: config.base"
diff --git a/parm/config/gfs/config.base.emc.dyn_hera b/parm/config/gfs/config.base.emc.dyn_hera
deleted file mode 100644
index 231b48b0b2..0000000000
--- a/parm/config/gfs/config.base.emc.dyn_hera
+++ /dev/null
@@ -1,405 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.base ##########
-# Common to all steps
-
-echo "BEGIN: config.base"
-
-# Machine environment
-export machine="@MACHINE@"
-
-# EMC parallel or NCO production
-export RUN_ENVIR="emc"
-
-# Account, queue, etc.
-export ACCOUNT="@ACCOUNT@"
-export QUEUE="@QUEUE@"
-export QUEUE_SERVICE="@QUEUE_SERVICE@"
-export PARTITION_BATCH="@PARTITION_BATCH@"
-export PARTITION_SERVICE="@PARTITION_SERVICE@"
-
-# Project to use in mass store:
-export HPSS_PROJECT="@HPSS_PROJECT@"
-
-# Directories relative to installation areas:
-export HOMEgfs=@HOMEgfs@
-export PARMgfs="${HOMEgfs}/parm"
-export FIXgfs="${HOMEgfs}/fix"
-export USHgfs="${HOMEgfs}/ush"
-export UTILgfs="${HOMEgfs}/util"
-export EXECgfs="${HOMEgfs}/exec"
-export SCRgfs="${HOMEgfs}/scripts"
-
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
-
-########################################################################
-
-# GLOBAL static environment parameters
-export PACKAGEROOT="@PACKAGEROOT@"    # TODO: set via prod_envir in Ops
-export COMROOT="@COMROOT@"    # TODO: set via prod_envir in Ops
-export COMINsyn="@COMINsyn@"
-export DMPDIR="@DMPDIR@"
-export BASE_CPLIC="@BASE_CPLIC@"
-
-# USER specific paths
-export HOMEDIR="@HOMEDIR@"
-export STMP="@STMP@"
-export PTMP="@PTMP@"
-export NOSCRUB="@NOSCRUB@"
-
-# Base directories for various builds
-export BASE_GIT="@BASE_GIT@"
-
-# Toggle to turn on/off GFS downstream processing.
-export DO_GOES="@DO_GOES@" # GOES products
-export DO_BUFRSND="NO"     # BUFR sounding products
-export DO_GEMPAK="NO"      # GEMPAK products
-export DO_AWIPS="NO"       # AWIPS products
-export DO_NPOESS="NO"       # NPOESS products
-export DO_TRACKER="NO"    # Hurricane track verification                 ## JKH
-export DO_GENESIS="NO"    # Cyclone genesis verification                 ## JKH
-export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU)
-# The monitor is not yet supported on Hercules
-if [[ "${machine}" == "HERCULES" ]]; then
-   export DO_VERFOZN="NO"     # Ozone data assimilation monitoring
-   export DO_VERFRAD="NO"     # Radiance data assimilation monitoring
-   export DO_VMINMON="NO"     # GSI minimization monitoring
-else
-   export DO_VERFOZN="YES"    # Ozone data assimilation monitoring
-   export DO_VERFRAD="YES"    # Radiance data assimilation monitoring
-   export DO_VMINMON="YES"    # GSI minimization monitoring
-fi
-export DO_MOS="NO"         # GFS Model Output Statistics - Only supported on WCOSS2
-
-# NO for retrospective parallel; YES for real-time parallel
-#  arch.sh uses REALTIME for MOS.  Need to set REALTIME=YES
-#  if want MOS written to HPSS.   Should update arch.sh to
-#  use RUNMOS flag
-export REALTIME="YES"
-
-# Experiment mode (cycled or forecast-only)
-export MODE="@MODE@" # cycled/forecast-only
-
-####################################################
-# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE
-# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW
-# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT
-# CLEAR
-####################################################
-# Build paths relative to $HOMEgfs
-export FIXgsi="${HOMEgfs}/fix/gsi"
-export HOMEpost="${HOMEgfs}"
-export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}"
-
-# CONVENIENT utility scripts and other environment parameters
-export NCP="/bin/cp -p"
-export NMV="/bin/mv"
-export NLN="/bin/ln -sf"
-export VERBOSE="YES"
-export KEEPDATA="NO"
-export CHGRP_RSTPROD="@CHGRP_RSTPROD@"
-export CHGRP_CMD="@CHGRP_CMD@"
-export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump"
-export NCLEN="${HOMEgfs}/ush/getncdimlen"
-
-# Machine environment, jobs, and other utility scripts
-export BASE_ENV="${HOMEgfs}/env"
-export BASE_JOB="${HOMEgfs}/jobs/rocoto"
-
-# EXPERIMENT specific environment parameters
-export SDATE=@SDATE@
-export EDATE=@EDATE@
-export EXP_WARM_START="@EXP_WARM_START@"
-export assim_freq=6
-export PSLOT="@PSLOT@"
-export EXPDIR="@EXPDIR@/${PSLOT}"
-export ROTDIR="@COMROOT@/${PSLOT}"
-export ROTDIR_DUMP="YES"                #Note: A value of "NO" does not currently work
-export DUMP_SUFFIX=""
-if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then
-    export DUMP_SUFFIX="p"              # Use dumps from NCO GFS v15.3 parallel
-fi
-export DATAROOT="${STMP}/RUNDIRS/${PSLOT}"  # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}"  # TODO: Should be removed; use DATAROOT instead
-export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
-export ATARDIR="@ATARDIR@"
-
-# Commonly defined parameters in JJOBS
-export envir=${envir:-"prod"}
-export NET="gfs"  # NET is defined in the job-card (ecf)
-export RUN=${RUN:-${CDUMP:-"gfs"}}  # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy
-# TODO: determine where is RUN actually used in the workflow other than here
-# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be
-#       consistent w/ EE2?
-
-# Get all the COM path templates
-source "${EXPDIR}/config.com"
-
-export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
-export LOGSCRIPT=${LOGSCRIPT:-""}
-#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
-#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"}
-export REDOUT="1>"
-export REDERR="2>"
-
-export SENDECF=${SENDECF:-"NO"}
-export SENDSDM=${SENDSDM:-"NO"}
-export SENDDBN_NTC=${SENDDBN_NTC:-"NO"}
-export SENDDBN=${SENDDBN:-"NO"}
-export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn}
-
-# APP settings
-export APP=@APP@
-
-# Defaults:
-export DO_ATM="YES"
-export DO_COUPLED="NO"
-export DO_WAVE="NO"
-export DO_OCN="NO"
-export DO_ICE="NO"
-export DO_AERO="NO"
-export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
-export FRAC_GRID=".true."
-
-# Set operational resolution
-export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used?
-
-# Resolution specific parameters
-export LEVS=128
-export CASE="@CASECTL@"
-export CASE_ENS="@CASEENS@"
-export OCNRES="@OCNRES@"
-export ICERES="${OCNRES}"
-# These are the currently recommended grid-combinations
-case "${CASE}" in
-    "C48")
-        export waveGRD='glo_500'
-        ;;
-    "C96" | "C192")
-        export waveGRD='glo_200'
-        ;;
-    "C384")
-        export waveGRD='glo_025'
-        ;;
-    "C768" | "C1152")
-        export waveGRD='mx025'
-        ;;
-    *)
-        echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!"
-        exit 1
-        ;;
-esac
-
-case "${APP}" in
-  ATM)
-    ;;
-  ATMA)
-    export DO_AERO="YES"
-    ;;
-  ATMW)
-    export DO_COUPLED="YES"
-    export DO_WAVE="YES"
-    export WAVE_CDUMP="both"
-    ;;
-  NG-GODAS)
-    export DO_ATM="NO"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-    ;;
-  S2S*)
-    export DO_COUPLED="YES"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-
-    if [[ "${APP}" =~ A$ ]]; then
-        export DO_AERO="YES"
-    fi
-
-    if [[ "${APP}" =~ ^S2SW ]]; then
-        export DO_WAVE="YES"
-        export WAVE_CDUMP="both"
-    fi
-    ;;
-  *)
-    echo "Unrecognized APP: '${APP}'"
-    exit 1
-    ;;
-esac
-
-# Surface cycle update frequency
-if [[ "${CDUMP}" =~ "gdas" ]] ; then
-   export FHCYC=1
-   export FTSFS=10
-elif [[ "${CDUMP}" =~ "gfs" ]] ; then
-   export FHCYC=24
-fi
-
-# Output frequency of the forecast model (for cycling)
-export FHMIN=0
-export FHMAX=9
-export FHOUT=3           # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false)
-
-# Cycle to run EnKF  (set to BOTH for both gfs and gdas)
-export EUPD_CYC="gdas"
-
-# GFS cycle info
-export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
-
-# GFS output and frequency
-export FHMIN_GFS=0
-
-export FHMAX_GFS_00=120
-export FHMAX_GFS_06=120
-export FHMAX_GFS_12=120
-export FHMAX_GFS_18=120
-current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var}
-
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
-export FHMAX_HF_GFS=0
-export FHOUT_HF_GFS=1
-if (( gfs_cyc != 0 )); then
-    export STEP_GFS=$(( 24 / gfs_cyc ))
-else
-    export STEP_GFS="0"
-fi
-export ILPOST=1           # gempak output frequency up to F120
-
-# GFS restart interval in hours
-#JKHexport restart_interval_gfs=12
-export restart_interval_gfs=-1                        ## JKH
-# NOTE: Do not set this to zero.  Instead set it to $FHMAX_GFS
-# TODO: Remove this variable from config.base and reference from config.fcst
-# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used
-
-export QUILTING=".true."
-export OUTPUT_GRID="gaussian_grid"
-export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST
-export WRITE_NSFLIP=".true."
-
-# IAU related parameters
-export DOIAU="@DOIAU@"        # Enable 4DIAU for control with 3 increments
-export IAUFHRS="3,6,9"
-export IAU_FHROT=${IAUFHRS%%,*}
-export IAU_DELTHRS=6
-export IAU_OFFSET=6
-export DOIAU_ENKF=${DOIAU:-"YES"}   # Enable 4DIAU for EnKF ensemble
-export IAUFHRS_ENKF="3,6,9"
-export IAU_DELTHRS_ENKF=6
-
-# Use Jacobians in eupd and thereby remove need to run eomg
-export lobsdiag_forenkf=".true."
-
-# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA
-#   export DO_WAVE="NO"
-#   echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE"
-# fi
-
-# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL
-export imp_physics=8
-
-# Shared parameters
-# DA engine
-export DO_JEDIATMVAR="@DO_JEDIATMVAR@"
-export DO_JEDIATMENS="@DO_JEDIATMENS@"
-export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@"
-export DO_JEDILANDDA="@DO_JEDILANDDA@"
-export DO_MERGENSST="@DO_MERGENSST@"
-
-# Hybrid related
-export DOHYBVAR="@DOHYBVAR@"
-export NMEM_ENS=@NMEM_ENS@
-export NMEM_ENS_GFS=@NMEM_ENS@
-export SMOOTH_ENKF="NO"
-export l4densvar=".true."
-export lwrite4danl=".true."
-
-# EnKF output frequency
-if [[ ${DOHYBVAR} = "YES" ]]; then
-    export FHMIN_ENKF=3
-    export FHMAX_ENKF=9
-    export FHMAX_ENKF_GFS=120
-    export FHOUT_ENKF_GFS=3
-    if [[ ${l4densvar} = ".true." ]]; then
-        export FHOUT=1
-        export FHOUT_ENKF=1
-    else
-        export FHOUT_ENKF=3
-    fi
-fi
-
-# if 3DVAR and IAU
-if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES"  ]]; then
-    export IAUFHRS="6"
-    export IAU_FHROT="3"
-    export IAU_FILTER_INCREMENTS=".true."
-    export IAUFHRS_ENKF="6"
-fi
-
-# Check if cycle is cold starting, DOIAU off, or free-forecast mode
-if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then
-  export IAU_OFFSET=0
-  export IAU_FHROT=0
-  export IAUFHRS="6"
-fi
-
-if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi
-
-# turned on nsst in anal and/or fcst steps, and turn off rtgsst
-export DONST="YES"
-if [[ ${DONST} = "YES" ]]; then export FNTSFA="        "; fi
-
-# The switch to apply SST elevation correction or not
-export nst_anl=.true.
-
-# Make the nsstbufr file on the fly or use the GDA version
-export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@"
-
-# Make the aircraft prepbufr file on the fly or use the GDA version
-export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@"
-
-# Analysis increments to zero in CALCINCEXEC
-export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'"
-
-# Write analysis files for early cycle EnKF
-export DO_CALC_INCREMENT_ENKF_GFS="YES"
-
-# Stratospheric increments to zero
-export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'"
-export INCVARS_EFOLD="5"
-
-# Swith to generate netcdf or binary diagnostic files.  If not specified,
-# script default to binary diagnostic files.   Set diagnostic file
-# variables here since used in DA job
-export netcdf_diag=".true."
-export binary_diag=".false."
-
-# Verification options
-export DO_METP="NO"          # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack
-export DO_FIT2OBS="NO"      # Run fit to observations package             ## JKH
-
-# Archiving options
-export HPSSARCH="@HPSSARCH@"        # save data to HPSS archive
-export LOCALARCH="@LOCALARCH@"        # save data to local archive
-if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then
-   echo "Both HPSS and local archiving selected.  Please choose one or the other."
-   exit 2
-fi
-export ARCH_CYC=00           # Archive data at this cycle for warm_start capability
-export ARCH_WARMICFREQ=4     # Archive frequency in days for warm_start capability
-export ARCH_FCSTICFREQ=1     # Archive frequency in days for gdas and gfs forecast-only capability
-
-#--online archive of nemsio files for fit2obs verification
-export FITSARC="YES"
-export FHMAX_FITS=132
-[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
-
-echo "END: config.base"
diff --git a/parm/config/gfs/config.base.emc.dyn_jet b/parm/config/gfs/config.base.emc.dyn_jet
deleted file mode 100644
index be130a79ef..0000000000
--- a/parm/config/gfs/config.base.emc.dyn_jet
+++ /dev/null
@@ -1,405 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.base ##########
-# Common to all steps
-
-echo "BEGIN: config.base"
-
-# Machine environment
-export machine="@MACHINE@"
-
-# EMC parallel or NCO production
-export RUN_ENVIR="emc"
-
-# Account, queue, etc.
-export ACCOUNT="@ACCOUNT@"
-export QUEUE="@QUEUE@"
-export QUEUE_SERVICE="@QUEUE_SERVICE@"
-export PARTITION_BATCH="@PARTITION_BATCH@"
-export PARTITION_SERVICE="@PARTITION_SERVICE@"
-
-# Project to use in mass store:
-export HPSS_PROJECT="@HPSS_PROJECT@"
-
-# Directories relative to installation areas:
-export HOMEgfs=@HOMEgfs@
-export PARMgfs="${HOMEgfs}/parm"
-export FIXgfs="${HOMEgfs}/fix"
-export USHgfs="${HOMEgfs}/ush"
-export UTILgfs="${HOMEgfs}/util"
-export EXECgfs="${HOMEgfs}/exec"
-export SCRgfs="${HOMEgfs}/scripts"
-
-export FIXam="${FIXgfs}/am"
-export FIXaer="${FIXgfs}/aer"
-export FIXcpl="${FIXgfs}/cpl"
-export FIXlut="${FIXgfs}/lut"
-export FIXorog="${FIXgfs}/orog"
-export FIXcice="${FIXgfs}/cice"
-export FIXmom="${FIXgfs}/mom6"
-export FIXreg2grb2="${FIXgfs}/reg2grb2"
-export FIXugwd="${FIXgfs}/ugwd"
-
-########################################################################
-
-# GLOBAL static environment parameters
-export PACKAGEROOT="@PACKAGEROOT@"    # TODO: set via prod_envir in Ops
-export COMROOT="@COMROOT@"    # TODO: set via prod_envir in Ops
-export COMINsyn="@COMINsyn@"
-export DMPDIR="@DMPDIR@"
-export BASE_CPLIC="@BASE_CPLIC@"
-
-# USER specific paths
-export HOMEDIR="@HOMEDIR@"
-export STMP="@STMP@"
-export PTMP="@PTMP@"
-export NOSCRUB="@NOSCRUB@"
-
-# Base directories for various builds
-export BASE_GIT="@BASE_GIT@"
-
-# Toggle to turn on/off GFS downstream processing.
-export DO_GOES="@DO_GOES@" # GOES products
-export DO_BUFRSND="NO"     # BUFR sounding products
-export DO_GEMPAK="NO"      # GEMPAK products
-export DO_AWIPS="NO"       # AWIPS products
-export DO_NPOESS="NO"      # NPOESS products
-export DO_TRACKER="YES"    # Hurricane track verification
-export DO_GENESIS="NO"    # Cyclone genesis verification            ## JKH
-export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU)
-# The monitor is not yet supported on Hercules
-if [[ "${machine}" == "HERCULES" ]]; then
-   export DO_VERFOZN="NO"     # Ozone data assimilation monitoring
-   export DO_VERFRAD="NO"     # Radiance data assimilation monitoring
-   export DO_VMINMON="NO"     # GSI minimization monitoring
-else
-   export DO_VERFOZN="YES"    # Ozone data assimilation monitoring
-   export DO_VERFRAD="YES"    # Radiance data assimilation monitoring
-   export DO_VMINMON="YES"    # GSI minimization monitoring
-fi
-export DO_MOS="NO"         # GFS Model Output Statistics - Only supported on WCOSS2
-
-# NO for retrospective parallel; YES for real-time parallel
-#  arch.sh uses REALTIME for MOS.  Need to set REALTIME=YES
-#  if want MOS written to HPSS.   Should update arch.sh to
-#  use RUNMOS flag
-export REALTIME="YES"
-
-# Experiment mode (cycled or forecast-only)
-export MODE="@MODE@" # cycled/forecast-only
-
-####################################################
-# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE
-# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW
-# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT
-# CLEAR
-####################################################
-# Build paths relative to $HOMEgfs
-export FIXgsi="${HOMEgfs}/fix/gsi"
-export HOMEpost="${HOMEgfs}"
-export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}"
-
-# CONVENIENT utility scripts and other environment parameters
-export NCP="/bin/cp -p"
-export NMV="/bin/mv"
-export NLN="/bin/ln -sf"
-export VERBOSE="YES"
-export KEEPDATA="NO"
-export CHGRP_RSTPROD="@CHGRP_RSTPROD@"
-export CHGRP_CMD="@CHGRP_CMD@"
-export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump"
-export NCLEN="${HOMEgfs}/ush/getncdimlen"
-
-# Machine environment, jobs, and other utility scripts
-export BASE_ENV="${HOMEgfs}/env"
-export BASE_JOB="${HOMEgfs}/jobs/rocoto"
-
-# EXPERIMENT specific environment parameters
-export SDATE=@SDATE@
-export EDATE=@EDATE@
-export EXP_WARM_START="@EXP_WARM_START@"
-export assim_freq=6
-export PSLOT="@PSLOT@"
-export EXPDIR="@EXPDIR@/${PSLOT}"
-export ROTDIR="@COMROOT@/${PSLOT}"
-export ROTDIR_DUMP="YES"                #Note: A value of "NO" does not currently work
-export DUMP_SUFFIX=""
-if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then
-    export DUMP_SUFFIX="p"              # Use dumps from NCO GFS v15.3 parallel
-fi
-export DATAROOT="${STMP}/RUNDIRS/${PSLOT}"  # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}"  # TODO: Should be removed; use DATAROOT instead
-export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
-export ATARDIR="@ATARDIR@"
-
-# Commonly defined parameters in JJOBS
-export envir=${envir:-"prod"}
-export NET="gfs"  # NET is defined in the job-card (ecf)
-export RUN=${RUN:-${CDUMP:-"gfs"}}  # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy
-# TODO: determine where is RUN actually used in the workflow other than here
-# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be
-#       consistent w/ EE2?
-
-# Get all the COM path templates
-source "${EXPDIR}/config.com"
-
-export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
-export LOGSCRIPT=${LOGSCRIPT:-""}
-#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
-#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"}
-export REDOUT="1>"
-export REDERR="2>"
-
-export SENDECF=${SENDECF:-"NO"}
-export SENDSDM=${SENDSDM:-"NO"}
-export SENDDBN_NTC=${SENDDBN_NTC:-"NO"}
-export SENDDBN=${SENDDBN:-"NO"}
-export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn}
-
-# APP settings
-export APP=@APP@
-
-# Defaults:
-export DO_ATM="YES"
-export DO_COUPLED="NO"
-export DO_WAVE="NO"
-export DO_OCN="NO"
-export DO_ICE="NO"
-export DO_AERO="NO"
-export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
-export FRAC_GRID=".true."
-
-# Set operational resolution
-export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used?
-
-# Resolution specific parameters
-export LEVS=128
-export CASE="@CASECTL@"
-export CASE_ENS="@CASEENS@"
-export OCNRES="@OCNRES@"
-export ICERES="${OCNRES}"
-# These are the currently recommended grid-combinations
-case "${CASE}" in
-    "C48")
-        export waveGRD='glo_500'
-        ;;
-    "C96" | "C192")
-        export waveGRD='glo_200'
-        ;;
-    "C384")
-        export waveGRD='glo_025'
-        ;;
-    "C768" | "C1152")
-        export waveGRD='mx025'
-        ;;
-    *)
-        echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!"
-        exit 1
-        ;;
-esac
-
-case "${APP}" in
-  ATM)
-    ;;
-  ATMA)
-    export DO_AERO="YES"
-    ;;
-  ATMW)
-    export DO_COUPLED="YES"
-    export DO_WAVE="YES"
-    export WAVE_CDUMP="both"
-    ;;
-  NG-GODAS)
-    export DO_ATM="NO"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-    ;;
-  S2S*)
-    export DO_COUPLED="YES"
-    export DO_OCN="YES"
-    export DO_ICE="YES"
-
-    if [[ "${APP}" =~ A$ ]]; then
-        export DO_AERO="YES"
-    fi
-
-    if [[ "${APP}" =~ ^S2SW ]]; then
-        export DO_WAVE="YES"
-        export WAVE_CDUMP="both"
-    fi
-    ;;
-  *)
-    echo "Unrecognized APP: '${APP}'"
-    exit 1
-    ;;
-esac
-
-# Surface cycle update frequency
-if [[ "${CDUMP}" =~ "gdas" ]] ; then
-   export FHCYC=1
-   export FTSFS=10
-elif [[ "${CDUMP}" =~ "gfs" ]] ; then
-   export FHCYC=24
-fi
-
-# Output frequency of the forecast model (for cycling)
-export FHMIN=0
-export FHMAX=9
-export FHOUT=3           # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false)
-
-# Cycle to run EnKF  (set to BOTH for both gfs and gdas)
-export EUPD_CYC="gdas"
-
-# GFS cycle info
-export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
-
-# GFS output and frequency
-export FHMIN_GFS=0
-
-export FHMAX_GFS_00=168             ## JKH
-export FHMAX_GFS_06=168             ## JKH
-export FHMAX_GFS_12=168             ## JKH
-export FHMAX_GFS_18=168             ## JKH
-current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var}
-
-export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops
-export FHMAX_HF_GFS=0
-export FHOUT_HF_GFS=1
-if (( gfs_cyc != 0 )); then
-    export STEP_GFS=$(( 24 / gfs_cyc ))
-else
-    export STEP_GFS="0"
-fi
-export ILPOST=1           # gempak output frequency up to F120
-
-# GFS restart interval in hours
-#JKHexport restart_interval_gfs=12
-export restart_interval_gfs=-1                        ## JKH
-# NOTE: Do not set this to zero.  Instead set it to $FHMAX_GFS
-# TODO: Remove this variable from config.base and reference from config.fcst
-# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used
-
-export QUILTING=".true."
-export OUTPUT_GRID="gaussian_grid"
-export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST
-export WRITE_NSFLIP=".true."
-
-# IAU related parameters
-export DOIAU="@DOIAU@"        # Enable 4DIAU for control with 3 increments
-export IAUFHRS="3,6,9"
-export IAU_FHROT=${IAUFHRS%%,*}
-export IAU_DELTHRS=6
-export IAU_OFFSET=6
-export DOIAU_ENKF=${DOIAU:-"YES"}   # Enable 4DIAU for EnKF ensemble
-export IAUFHRS_ENKF="3,6,9"
-export IAU_DELTHRS_ENKF=6
-
-# Use Jacobians in eupd and thereby remove need to run eomg
-export lobsdiag_forenkf=".true."
-
-# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA
-#   export DO_WAVE="NO"
-#   echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE"
-# fi
-
-# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL
-export imp_physics=8
-
-# Shared parameters
-# DA engine
-export DO_JEDIATMVAR="@DO_JEDIATMVAR@"
-export DO_JEDIATMENS="@DO_JEDIATMENS@"
-export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@"
-export DO_JEDILANDDA="@DO_JEDILANDDA@"
-export DO_MERGENSST="@DO_MERGENSST@"
-
-# Hybrid related
-export DOHYBVAR="@DOHYBVAR@"
-export NMEM_ENS=@NMEM_ENS@
-export NMEM_ENS_GFS=@NMEM_ENS@
-export SMOOTH_ENKF="NO"
-export l4densvar=".true."
-export lwrite4danl=".true."
-
-# EnKF output frequency
-if [[ ${DOHYBVAR} = "YES" ]]; then
-    export FHMIN_ENKF=3
-    export FHMAX_ENKF=9
-    export FHMAX_ENKF_GFS=120
-    export FHOUT_ENKF_GFS=3
-    if [[ ${l4densvar} = ".true." ]]; then
-        export FHOUT=1
-        export FHOUT_ENKF=1
-    else
-        export FHOUT_ENKF=3
-    fi
-fi
-
-# if 3DVAR and IAU
-if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES"  ]]; then
-    export IAUFHRS="6"
-    export IAU_FHROT="3"
-    export IAU_FILTER_INCREMENTS=".true."
-    export IAUFHRS_ENKF="6"
-fi
-
-# Check if cycle is cold starting, DOIAU off, or free-forecast mode
-if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then
-  export IAU_OFFSET=0
-  export IAU_FHROT=0
-  export IAUFHRS="6"
-fi
-
-if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi
-
-# turned on nsst in anal and/or fcst steps, and turn off rtgsst
-export DONST="YES"
-if [[ ${DONST} = "YES" ]]; then export FNTSFA="        "; fi
-
-# The switch to apply SST elevation correction or not
-export nst_anl=.true.
-
-# Make the nsstbufr file on the fly or use the GDA version
-export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@"
-
-# Make the aircraft prepbufr file on the fly or use the GDA version
-export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@"
-
-# Analysis increments to zero in CALCINCEXEC
-export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'"
-
-# Write analysis files for early cycle EnKF
-export DO_CALC_INCREMENT_ENKF_GFS="YES"
-
-# Stratospheric increments to zero
-export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'"
-export INCVARS_EFOLD="5"
-
-# Swith to generate netcdf or binary diagnostic files.  If not specified,
-# script default to binary diagnostic files.   Set diagnostic file
-# variables here since used in DA job
-export netcdf_diag=".true."
-export binary_diag=".false."
-
-# Verification options
-export DO_METP="NO"          # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack
-export DO_FIT2OBS="NO"      # Run fit to observations package         ## JKH
-
-# Archiving options
-export HPSSARCH="@HPSSARCH@"        # save data to HPSS archive
-export LOCALARCH="@LOCALARCH@"        # save data to local archive
-if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then
-   echo "Both HPSS and local archiving selected.  Please choose one or the other."
-   exit 2
-fi
-export ARCH_CYC=00           # Archive data at this cycle for warm_start capability
-export ARCH_WARMICFREQ=4     # Archive frequency in days for warm_start capability
-export ARCH_FCSTICFREQ=1     # Archive frequency in days for gdas and gfs forecast-only capability
-
-#--online archive of nemsio files for fit2obs verification
-export FITSARC="YES"
-export FHMAX_FITS=132
-[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS}
-
-echo "END: config.base"
diff --git a/parm/config/gfs/config.cleanup b/parm/config/gfs/config.cleanup
index 1908c91bb5..44e2690f65 100644
--- a/parm/config/gfs/config.cleanup
+++ b/parm/config/gfs/config.cleanup
@@ -12,6 +12,11 @@ export CLEANUP_COM="YES"   # NO=retain ROTDIR.  YES default in cleanup.sh
 export RMOLDSTD=144
 export RMOLDEND=24
 
+if [[ "${DO_GEMPAK}" == "YES" ]]; then
+    export RMOLDSTD=346
+    export RMOLDEND=222
+fi
+
 # Specify the list of files to exclude from the first stage of cleanup
 # Because arrays cannot be exported, list is a single string of comma-
 # separated values. This string is split to form an array at runtime.
@@ -22,4 +27,4 @@ case ${RUN} in
 esac
 export exclude_string
 
-echo "END: config.cleanup"
\ No newline at end of file
+echo "END: config.cleanup"
diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com
index db648b5866..222ffdae95 100644
--- a/parm/config/gfs/config.com
+++ b/parm/config/gfs/config.com
@@ -5,11 +5,11 @@ echo "BEGIN: config.com"
 
 # These are just templates. All templates must use single quotations so variable
 #   expansion does not occur when this file is sourced. Substitution happens later
-#   during runtime. It is recommended to use the helper function `generate_com()`,
+#   during runtime. It is recommended to use the helper function `declare_from_tmpl()`,
 #   to do this substitution, which is defined in `ush/preamble.sh`.
 #
-#   Syntax for generate_com():
-#       generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]]
+#   Syntax for declare_from_tmpl():
+#       declare_from_tmpl [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]]
 #
 #       options:
 #           -r: Make variable read-only (same as `decalre -r`)
@@ -20,14 +20,14 @@ echo "BEGIN: config.com"
 #
 #   Examples:
 #       # Current cycle and RUN
-#       YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
+#       YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
 #
 #       # Previous cycle and gdas
-#       RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
+#       RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
 #           COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
 #
 #       # Current cycle and COM for first member
-#       MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY
+#       MEMDIR='mem001' YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY
 #
 
 #
@@ -49,10 +49,12 @@ COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}'
 declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}'
 
 declare -rx COM_CONF_TMPL=${COM_BASE}'/conf'
+declare -rx COM_OBS_JEDI=${COM_BASE}'/obs_jedi'
+
 declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input'
 declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart'
 declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos'
-declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land'
+declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow'
 declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history'
 declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master'
 declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2'
@@ -80,15 +82,19 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history'
 declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart'
 declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input'
 declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean'
-declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D'
-declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D'
-declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect'
+declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean'
+declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf'
 declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2'
 declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}'
 
+declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice'
+declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice'
 declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input'
 declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history'
 declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart'
+declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf'
+declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2'
+declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}'
 
 declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history'
 declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem'
diff --git a/parm/config/gfs/config.earc b/parm/config/gfs/config.earc
index de73a93731..00a2fa95bd 100644
--- a/parm/config/gfs/config.earc
+++ b/parm/config/gfs/config.earc
@@ -8,7 +8,25 @@ echo "BEGIN: config.earc"
 # Get task specific resources
 . $EXPDIR/config.resources earc
 
-export NMEM_EARCGRP=10
+# Set the number of ensemble members to archive per earc job
+case "${CASE_ENS}" in
+   "C48" | "C96")
+      export NMEM_EARCGRP=80
+      ;;
+   "C192")
+      export NMEM_EARCGRP=20
+      ;;
+   "C384" | "C768")
+      export NMEM_EARCGRP=10
+      ;;
+   "C1152")
+      export NMEM_EARCGRP=4
+      ;;
+   *)
+      echo "FATAL ERROR: Unknown ensemble resolution ${CASE_ENS}, ABORT!"
+      exit 1
+      ;;
+esac
 
 #--starting and ending hours of previous cycles to be removed from rotating directory
 export RMOLDSTD_ENKF=144
diff --git a/parm/config/gfs/config.efcs b/parm/config/gfs/config.efcs
index 283ec3ab7e..1837cf0619 100644
--- a/parm/config/gfs/config.efcs
+++ b/parm/config/gfs/config.efcs
@@ -5,14 +5,10 @@
 
 echo "BEGIN: config.efcs"
 
-# Turn off components in ensemble via _ENKF, or use setting from deterministic
-export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}}
-export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}}
-export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}}
-export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}}
+export CASE="${CASE_ENS}"
 
 # Source model specific information that is resolution dependent
-string="--fv3 ${CASE_ENS}"
+string="--fv3 ${CASE}"
 # Ocean/Ice/Waves ensemble configurations are identical to deterministic member
 [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}"
 [[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}"
@@ -25,15 +21,23 @@ source "${EXPDIR}/config.ufs" ${string}
 # Get task specific resources
 . "${EXPDIR}/config.resources" efcs
 
+# nggps_diag_nml
+export FHOUT=${FHOUT_ENKF:-3}
+if [[ ${RUN} == "enkfgfs" ]]; then
+    export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}}
+fi
+
+# model_configure
+export FHMIN=${FHMIN_ENKF:-3}
+export FHMAX=${FHMAX_ENKF:-9}
+if [[ ${RUN} == "enkfgfs" ]]; then
+   export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}}
+fi
+
 # Use serial I/O for ensemble (lustre?)
 export OUTPUT_FILETYPE_ATM="netcdf"
 export OUTPUT_FILETYPE_SFC="netcdf"
 
-# Number of enkf members per fcst job
-export NMEM_EFCSGRP=2
-export NMEM_EFCSGRP_GFS=1
-export RERUN_EFCSGRP="NO"
-
 # Turn off inline UPP for EnKF forecast
 export WRITE_DOPOST=".false."
 
@@ -56,17 +60,35 @@ export SPPT_LSCALE=500000.
 export SPPT_LOGIT=".true."
 export SPPT_SFCLIMIT=".true."
 
-if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then
-    export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da"
+if [[ "${QUILTING}" == ".true." ]] && [[ "${OUTPUT_GRID}" == "gaussian_grid" ]]; then
+    export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da"
 else
-    export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da_orig"
+    export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da_orig"
 fi
 
-# For IAU, write restarts at beginning of window also
-if [[ "${DOIAU_ENKF:-}" = "YES" ]]; then
-  export restart_interval="3"
-else
-  export restart_interval="6"
+# Model config option for Ensemble
+# export TYPE=nh                  # choices:  nh, hydro
+# export MONO=non-mono            # choices:  mono, non-mono
+
+# gfs_physics_nml
+export FHSWR=3600.
+export FHLWR=3600.
+export IEMS=1
+export ISOL=2
+export ICO2=2
+export dspheat=".true."
+export shal_cnv=".true."
+export FHZER=6
+
+# Set PREFIX_ATMINC to r when recentering on
+if [[ ${RECENTER_ENKF:-"YES"} == "YES" ]]; then
+   export PREFIX_ATMINC="r"
+fi
+
+# Set restart interval to enable restarting forecasts after failures
+export restart_interval=${restart_interval_enkfgdas:-6}
+if [[ ${RUN} == "enkfgfs" ]]; then
+    export restart_interval=${restart_interval_enkfgfs:-12}
 fi
 
 echo "END: config.efcs"
diff --git a/parm/config/gfs/config.eobs b/parm/config/gfs/config.eobs
index 21f982addc..7b7823e764 100644
--- a/parm/config/gfs/config.eobs
+++ b/parm/config/gfs/config.eobs
@@ -11,12 +11,11 @@ echo "BEGIN: config.eobs"
 # Number of enkf members per innovation job
 export NMEM_EOMGGRP=8  
 export RERUN_EOMGGRP="YES"
-export npe_gsi=$npe_eobs
 
 # GSI namelist options related to observer for EnKF
 export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0"
 export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0"
-if [ $LEVS = "128" ]; then
+if (( LEVS == 128 )); then
    export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1,"
    export SETUP_INVOBS="gpstop=55,nsig_ext=56,"
 fi
diff --git a/parm/config/gfs/config.epos b/parm/config/gfs/config.epos
index 8026a2ba2e..f1da929b62 100644
--- a/parm/config/gfs/config.epos
+++ b/parm/config/gfs/config.epos
@@ -14,7 +14,4 @@ if [ $l4densvar = ".false." ]; then
     export NEPOSGRP=3
 fi
 
-# Generate ensemble spread files
-export ENKF_SPREAD="YES"
-
 echo "END: config.epos"
diff --git a/parm/config/gfs/config.esfc b/parm/config/gfs/config.esfc
index 2bb3d48bb4..684dea4ee3 100644
--- a/parm/config/gfs/config.esfc
+++ b/parm/config/gfs/config.esfc
@@ -12,8 +12,19 @@ echo "BEGIN: config.esfc"
 # Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at 
 # center of analysis window.  
 
-if [ $DOIAU_ENKF = "YES" ]; then
+if [[ ${DOIAU_ENKF} = "YES" ]]; then
    export DOSFCANL_ENKF="NO"
 fi
 
+# Turn off NST in JEDIATMENS
+if [[ "${DO_JEDIATMENS}" == "YES" ]]; then
+   export DONST="NO"
+fi
+
+# set up soil analysis
+if [[ ${GSI_SOILANAL} = "YES" ]]; then
+    export DO_LNDINC=".true."
+    export LND_SOI_FILE="lnd_incr"
+fi
+
 echo "END: config.esfc"
diff --git a/parm/config/gfs/config.eupd b/parm/config/gfs/config.eupd
index 1ac90d2b75..2ff48240ae 100644
--- a/parm/config/gfs/config.eupd
+++ b/parm/config/gfs/config.eupd
@@ -8,7 +8,7 @@ echo "BEGIN: config.eupd"
 # Get task specific resources
 . $EXPDIR/config.resources eupd
 
-export npe_enkf=$npe_eupd
+export ntasks_enkf=${ntasks}
 
 # Use NAM_ENKF below for serial EnKF
 ##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9"
diff --git a/parm/config/gfs/config.fbwind b/parm/config/gfs/config.fbwind
new file mode 100644
index 0000000000..49fdb9e7b4
--- /dev/null
+++ b/parm/config/gfs/config.fbwind
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.gempak ##########
+# GFS fbwind step specific
+
+echo "BEGIN: config.fbwind"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" fbwind
+
+echo "END: config.fbwind"
diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst
index 8266c68c83..74ce6e82f1 100644
--- a/parm/config/gfs/config.fcst
+++ b/parm/config/gfs/config.fcst
@@ -5,12 +5,21 @@
 
 echo "BEGIN: config.fcst"
 
-# Turn off waves if not used for this CDUMP
-case ${WAVE_CDUMP} in
-  both | "${CDUMP/enkf}" ) ;; # Don't change
+export USE_ESMF_THREADING="YES"  # Toggle to use ESMF-managed threading or traditional threading in UFSWM
+export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately)
+
+# Turn off waves if not used for this RUN
+case ${WAVE_RUN} in
+  both | "${RUN/enkf}" ) ;; # Don't change
   *) DO_WAVE="NO" ;; # Turn waves off
 esac
 
+# Turn off aerosols if not used for this RUN
+case ${AERO_FCST_RUN} in
+  both | "${RUN/enkf}" ) ;; # Don't change
+  *) DO_AERO="NO" ;; # Turn aerosols off
+esac
+
 # Source model specific information that is resolution dependent
 string="--fv3 ${CASE}"
 [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}"
@@ -21,6 +30,26 @@ string="--fv3 ${CASE}"
 # shellcheck disable=SC2086
 source "${EXPDIR}/config.ufs" ${string}
 
+# Forecast length for GFS forecast
+case ${RUN} in
+  *gfs)
+    # shellcheck disable=SC2153
+    export FHMAX=${FHMAX_GFS}
+    # shellcheck disable=SC2153
+    export FHOUT=${FHOUT_GFS}
+    export FHMAX_HF=${FHMAX_HF_GFS}
+    export FHOUT_HF=${FHOUT_HF_GFS}
+    export FHOUT_OCN=${FHOUT_OCN_GFS}
+    export FHOUT_ICE=${FHOUT_ICE_GFS}
+    ;;
+  *gdas)
+    export FHMAX_HF=0
+    export FHOUT_HF=0
+    ;;
+  *)
+    echo "FATAL ERROR: Unsupported RUN '${RUN}'"
+    exit 1
+esac
 
 # Get task specific resources
 source "${EXPDIR}/config.resources" fcst
@@ -37,16 +66,14 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O
 
 #######################################################################
 
-export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh"
-#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py"  # Temp. while this is worked on
-export FCSTEXECDIR="${HOMEgfs}/exec"
+export FORECASTSH="${SCRgfs}/exglobal_forecast.sh"
+#export FORECASTSH="${SCRgfs}/exglobal_forecast.py"  # Temp. while this is worked on
 export FCSTEXEC="ufs_model.x"
 
 #######################################################################
 # Model configuration
 export TYPE="nh"
 export MONO="non-mono"
-#JKHexport range_warn=".false."                  ## JKH 
 
 # Use stratosphere h2o physics
 export h2o_phys=".true."
@@ -93,37 +120,36 @@ if (( gwd_opt == 2 )); then
     export do_ugwp_v0_orog_only=".false."
     export do_ugwp_v0_nst_only=".false."
     export do_gsl_drag_ls_bl=".true."
-    export do_gsl_drag_ss=".true."
+    export do_gsl_drag_ss=".true."    #KYW (Checked with Mike)
+    #export do_gsl_drag_ss=".false."
     export do_gsl_drag_tofd=".true."
+    export do_gwd_opt_psl=".false."
+    #export do_gwd_opt_psl=".true."    #KYW (Checked with Mike)
     export do_ugwp_v1_orog_only=".false."
-    export alpha_fd=35.0
+    export alpha_fd     = 35.0    #KYW (Checked with Mike)
     launch_level=$(echo "${LEVS}/2.35" |bc)
     export launch_level
-    if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then
-       export cdmbgwd=${cdmbgwd_gsl}
-    fi
 fi
 
 # Sponge layer settings
-export tau=0.
-export rf_cutoff=10.
 export d2_bg_k1=0.20
 export d2_bg_k2=0.04
 export dz_min=6
 export n_sponge=42
-if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then
-   export tau=5.0
-   export rf_cutoff=1.0e3
-   export d2_bg_k1=0.20
-   export d2_bg_k2=0.0
-fi
 
-# PBL/turbulence schemes
+# PBL/turbulance schemes
 export hybedmf=".false."
+if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then
+  export satmedmf=".false."
+else
+  export satmedmf=".true."
+fi
+export isatmedmf=1
+#JKH
 if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then
    export satmedmf=".false."
    export isatmedmf=0
-   export shal_cnv=".false."
+   export CPP
    export do_mynnedmf=".true."
    export do_mynnsfclay=".false."
    export icloud_bl=1
@@ -131,20 +157,21 @@ if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS
    export bl_mynn_edmf=1
    export bl_mynn_edmf_mom=1
    export lcnorm=".true."                           ## JKH
-else
-   export satmedmf=".true."
-   export isatmedmf=1
 fi
 tbf=""
 if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi
 
-#Convection schemes
+#Convection schemes      ### JKH  - affects field table name
 export progsigma=".true."
 tbp=""
 if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi
 
 # Radiation options
-export IAER=1011    ; #spectral band mapping method for aerosol optical properties
+if [[ "${DO_AERO}" == "YES" ]]; then
+    export IAER=2011  # spectral band mapping method for aerosol optical properties
+else
+    export IAER=1011    
+fi
 export iovr_lw=3    ; #de-correlation length cloud overlap method (Barker, 2008)
 export iovr_sw=3    ; #de-correlation length cloud overlap method (Barker, 2008)
 export iovr=3       ; #de-correlation length cloud overlap method (Barker, 2008)
@@ -162,21 +189,22 @@ export doGP_lwscat=.false.
 export iopt_sfc="3"
 export iopt_trs="2"
 
+#JKH
 # Convection Options: 2-SASAS, 3-GF
 export progsigma=".true."
 if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" ]] ; then
     export imfdeepcnv=5
     export imfshalcnv=-1                 ## JKH - no shallow GF
-elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then 
+elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then
     export imfdeepcnv=5
-    export imfshalcnv=5              
-elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then 
+    export imfshalcnv=5
+elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then
     export progsigma=.false.
     export imfdeepcnv=5
-    export imfshalcnv=5              
-elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then 
+    export imfshalcnv=5
+elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then
     export imfdeepcnv=3
-    export imfshalcnv=3              
+    export imfshalcnv=3
 else
     export imfdeepcnv=2
     if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then
@@ -185,11 +213,7 @@ else
       export imfshalcnv=2
     fi
 fi
-
-#Convection schemes      ### JKH  - affects field table name
-tbp=""
-if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi
-
+#JKH
 
 # Microphysics configuration
 export dnats=0
@@ -200,12 +224,12 @@ export random_clds=".true."
 case ${imp_physics} in
     99) # ZhaoCarr
         export ncld=1
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}"
         export nwat=2
         ;;
     6)  # WSM6
         export ncld=2
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}"
         export nwat=6
         ;;
     8)  # Thompson
@@ -218,31 +242,46 @@ case ${imp_physics} in
         export lradar=".true."
         export ttendlim="-999"
         export sedi_semi=.true.
+        if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi
         export decfl=10
-
-        if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" || 
+#JKH
+        if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" ||
               "${CCPP_SUITE}" == "FV3_GFS_v17_p8_thompson" ]] ; then
-          #JKH  keep dt_inner $DELTIM/2 (75) if running aerosol-aware Thompson
-          export dt_inner=$((DELTIM/2))
+          #JKH  set dt_inner to 50 if running aerosol-aware Thompson
+          export dt_inner=50
           export ltaerosol=".true."
-          export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}"
+          export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}"
         else
           export dt_inner=$((DELTIM/2))
           if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi
           export ltaerosol=".false."
-          export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}"
+          export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}"
         fi
-
+#JKH
         export hord_mt_nh_nonmono=5
         export hord_xx_nh_nonmono=5
         export vtdm4_nh_nonmono=0.02
         export nord=2
         export dddmp=0.1
         export d4_bg=0.12
+
+        if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then
+          export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}"
+          export ltaerosol=".true."
+          export lcnorm=".true."
+          export do_mynnedmf=".true."
+          export do_mynnsfclay=".true."
+          export imfshalcnv=5
+          export imfdeepcnv=5
+          export betascu=0.5
+          export betamcu=1.5
+          export betadcu=8.0
+        fi
+
         ;;
     11) # GFDL
         export ncld=5
-        export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}"
+        export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}"
         export nwat=6
         export dnats=1
         export cal_pre=".false."
@@ -268,6 +307,9 @@ export DO_SKEB=${DO_SKEB:-"NO"}
 export DO_SHUM=${DO_SHUM:-"NO"}
 export DO_LAND_PERT=${DO_LAND_PERT:-"NO"}
 export DO_CA=${DO_CA:-"YES"}
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  export DO_CA="NO"  # CA does not work with nesting.
+fi
 
 #coupling settings
 export cplmode="ufs.frac"
@@ -284,38 +326,21 @@ export FSICL="0"
 export FSICS="0"
 
 #---------------------------------------------------------------------
-
-# ideflate: netcdf zlib lossless compression (0-9): 0 no compression
-# nbits: netcdf lossy compression level (0-32): 0 lossless
-export ideflate=1
-export nbits=14
-export ishuffle=0
-# compression for RESTART files written by FMS
-export shuffle=1
-export deflate_level=1
-
-#---------------------------------------------------------------------
-# Disable the use of coupler.res; get model start time from model_configure
-export USE_COUPLER_RES="NO"
-
-if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters
+if [[ "${RUN}" =~ "gdas" ]] ; then # GDAS cycle specific parameters
 
     # Variables used in DA cycling
-    export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da"
+    export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da"
 
-    if [[ "${DOIAU}" == "YES" ]]; then
-      export restart_interval="3"
-    else
-      export restart_interval="6"
-    fi
+    # Write gfs restart files to rerun fcst from any break point
+    export restart_interval=${restart_interval_gdas:-6}
 
     # Turn on dry mass adjustment in GDAS
     export adjust_dry_mass=".true."
 
-elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters
+elif [[ "${RUN}" =~ "gfs" ]] ; then # GFS cycle specific parameters
 
     # Write more variables to output
-    export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table"
+    export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
 
     # Write gfs restart files to rerun fcst from any break point
     export restart_interval=${restart_interval_gfs:-12}
diff --git a/parm/config/gfs/config.fit2obs b/parm/config/gfs/config.fit2obs
index 46baaa9e45..9b3fb87ead 100644
--- a/parm/config/gfs/config.fit2obs
+++ b/parm/config/gfs/config.fit2obs
@@ -8,8 +8,8 @@ echo "BEGIN: config.fit2obs"
 # Get task specific resources
 . "${EXPDIR}/config.resources" fit2obs
 
-export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global
-export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt
+export PRVT=${FIXgfs}/gsi/prepobs_errtable.global
+export HYBLEVS=${FIXgfs}/am/global_hyblev.l${LEVS}.txt
 
 export VBACKUP_FITS=24
 export OUTPUT_FILETYPE="netcdf"
diff --git a/parm/config/gfs/config.ice b/parm/config/gfs/config.ice
index 205458020f..055bd1e2bb 100644
--- a/parm/config/gfs/config.ice
+++ b/parm/config/gfs/config.ice
@@ -6,4 +6,9 @@ echo "BEGIN: config.ice"
 export min_seaice="1.0e-6"
 export use_cice_alb=".true."
 
+export MESH_ICE="mesh.mx${ICERES}.nc"
+
+export CICE_GRID="grid_cice_NEMS_mx${ICERES}.nc"
+export CICE_MASK="kmtu_cice_NEMS_mx${ICERES}.nc"
+
 echo "END: config.ice"
diff --git a/parm/config/gfs/config.landanl b/parm/config/gfs/config.landanl
deleted file mode 100644
index 70ebae7529..0000000000
--- a/parm/config/gfs/config.landanl
+++ /dev/null
@@ -1,34 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.landanl ##########
-# configuration common to land analysis tasks
-
-echo "BEGIN: config.landanl"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" landanl
-
-obs_list_name=gdas_land_gts_only.yaml
-if [[ "${cyc}" = "18" ]]; then
-    obs_list_name=gdas_land_prototype.yaml
-fi
-
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name}
-
-# Name of the JEDI executable and its yaml template
-export JEDIEXE="${HOMEgfs}/exec/fv3jedi_letkf.x"
-export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml"
-
-# Ensemble member properties
-export SNOWDEPTHVAR="snodl"
-export BESTDDEV="30."  # Background Error Std. Dev. for LETKFOI
-
-# Name of the executable that applies increment to bkg and its namelist template
-export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe"
-export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/apply_incr_nml.j2"
-
-export io_layout_x=@IO_LAYOUT_X@
-export io_layout_y=@IO_LAYOUT_Y@
-
-echo "END: config.landanl"
diff --git a/parm/config/gfs/config.marineanalletkf b/parm/config/gfs/config.marineanalletkf
new file mode 100644
index 0000000000..fde3433a13
--- /dev/null
+++ b/parm/config/gfs/config.marineanalletkf
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+########## config.marineanalletkf ##########
+# Ocn Analysis specific
+
+echo "BEGIN: config.marineanalletkf"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" marineanalletkf
+
+export MARINE_LETKF_EXEC="${JEDI_BIN}/gdas.x"
+export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2"
+export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2"
+
+export GRIDGEN_EXEC="${JEDI_BIN}/gdas_soca_gridgen.x"
+export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml"
+
+echo "END: config.marineanalletkf"
diff --git a/parm/config/gfs/config.marinebmat b/parm/config/gfs/config.marinebmat
new file mode 100644
index 0000000000..d88739dced
--- /dev/null
+++ b/parm/config/gfs/config.marinebmat
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+########## config.marinebmat ##########
+# configuration for the marine B-matrix
+
+echo "BEGIN: config.marinebmat"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" marinebmat
+
+echo "END: config.marinebmat"
diff --git a/parm/config/gfs/config.metp b/parm/config/gfs/config.metp
index c90903f6a5..564966fd6d 100644
--- a/parm/config/gfs/config.metp
+++ b/parm/config/gfs/config.metp
@@ -8,6 +8,8 @@ echo "BEGIN: config.metp"
 # Get task specific resources
 . "${EXPDIR}/config.resources" metp
 
+export nproc=${tasks_per_node:-1}
+
 export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus
 export RUN_GRID2OBS_STEP1="YES"  # Run grid-to-obs verification using METplus
 export RUN_PRECIP_STEP1="YES"    # Run precip verification using METplus
@@ -21,8 +23,9 @@ export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd
 export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh
 ## INPUT DATA SETTINGS
 export model=${PSLOT}
-export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2"
+export model_file_format="pgbf{lead?fmt=%2H}.${RUN}.{init?fmt=%Y%m%d%H}.grib2"
 export model_hpss_dir=${ATARDIR}/..
+export model_dir=${ARCDIR}/..
 export get_data_from_hpss="NO"
 export hpss_walltime="10"
 ## OUTPUT SETTINGS
@@ -38,19 +41,19 @@ export log_MET_output_to_METplus="yes"
 # GRID-TO-GRID STEP 1: gfsmetpg2g1
 export g2g1_type_list="anom pres sfc"
 export g2g1_anom_truth_name="self_anl"
-export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2"
+export g2g1_anom_truth_file_format="pgbanl.${RUN}.{valid?fmt=%Y%m%d%H}.grib2"
 export g2g1_anom_fhr_min=${FHMIN_GFS}
 export g2g1_anom_fhr_max=${FHMAX_GFS}
 export g2g1_anom_grid="G002"
 export g2g1_anom_gather_by="VSDB"
 export g2g1_pres_truth_name="self_anl"
-export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2"
+export g2g1_pres_truth_file_format="pgbanl.${RUN}.{valid?fmt=%Y%m%d%H}.grib2"
 export g2g1_pres_fhr_min=${FHMIN_GFS}
 export g2g1_pres_fhr_max=${FHMAX_GFS}
 export g2g1_pres_grid="G002"
 export g2g1_pres_gather_by="VSDB"
 export g2g1_sfc_truth_name="self_f00"
-export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2"
+export g2g1_sfc_truth_file_format="pgbf00.${RUN}.{valid?fmt=%Y%m%d%H}.grib2"
 export g2g1_sfc_fhr_min=${FHMIN_GFS}
 export g2g1_sfc_fhr_max=${FHMAX_GFS}
 export g2g1_sfc_grid="G002"
@@ -86,7 +89,7 @@ export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow exper
 export precip1_type_list="ccpa_accum24hr"
 export precip1_ccpa_accum24hr_model_bucket="06"
 export precip1_ccpa_accum24hr_model_var="APCP"
-export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2"
+export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${RUN}.{init?fmt=%Y%m%d%H}.grib2"
 export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS}
 export precip1_ccpa_accum24hr_fhr_max="180"
 export precip1_ccpa_accum24hr_grid="G211"
diff --git a/parm/config/gfs/config.nsst b/parm/config/gfs/config.nsst
index db4367b2c0..7bda81f058 100644
--- a/parm/config/gfs/config.nsst
+++ b/parm/config/gfs/config.nsst
@@ -10,6 +10,11 @@ echo "BEGIN: config.nsst"
 # nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled
 export NST_MODEL=2
 
+# Set NST_MODEL for JEDIATMVAR or JEDIATMENS
+if [[ "${DO_JEDIATMVAR}" == "YES" || "${DO_JEDIATMENS}" == "YES" ]]; then
+   export NST_MODEL=1
+fi
+
 # nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON,
 export NST_SPINUP=0
 cdate="${PDY}${cyc}"
diff --git a/parm/config/gfs/config.oceanice_products b/parm/config/gfs/config.oceanice_products
new file mode 100644
index 0000000000..9e5c5b1c68
--- /dev/null
+++ b/parm/config/gfs/config.oceanice_products
@@ -0,0 +1,15 @@
+#! /usr/bin/env bash
+
+########## config.oceanice_products ##########
+
+echo "BEGIN: config.oceanice_products"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" oceanice_products
+
+export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products.yaml"
+
+# No. of forecast hours to process in a single job
+export NFHRS_PER_GROUP=3
+
+echo "END: config.oceanice_products"
diff --git a/parm/config/gfs/config.ocn b/parm/config/gfs/config.ocn
index 37f6a966aa..317a76e58a 100644
--- a/parm/config/gfs/config.ocn
+++ b/parm/config/gfs/config.ocn
@@ -2,8 +2,7 @@
 
 echo "BEGIN: config.ocn"
 
-# MOM_input template to use
-export MOM_INPUT="MOM_input_template_${OCNRES}"
+export MESH_OCN="mesh.mx${OCNRES}.nc"
 
 export DO_OCN_SPPT="NO"  # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False)
 export DO_OCN_PERT_EPBL="NO"  # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False)
@@ -17,6 +16,14 @@ if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then
 else
   export ODA_INCUPD="False"
 fi
-export ODA_INCUPD_NHOURS="3.0"  # In MOM_input, this is time interval for applying increment
+
+# Time interval for applying the increment
+if [[ "${DOIAU}" == "YES" ]]; then
+  export ODA_INCUPD_NHOURS="6.0"
+else
+  export ODA_INCUPD_NHOURS="3.0"
+fi
+
+
 
 echo "END: config.ocn"
diff --git a/parm/config/gfs/config.ocnanal b/parm/config/gfs/config.ocnanal
index 38a6cbd52a..4d58f2dedf 100644
--- a/parm/config/gfs/config.ocnanal
+++ b/parm/config/gfs/config.ocnanal
@@ -6,21 +6,15 @@
 echo "BEGIN: config.ocnanal"
 
 export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config"
-export OBS_LIST=@SOCA_OBS_LIST@
-export OBS_YAML="${OBS_LIST}"
-export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml"
+export OBS_LIST=@SOCA_OBS_LIST@  # TODO(GA): doesn't look necessary as is to have
+export OBS_YAML="${OBS_LIST}"    #           OBS_LIST and OBS_YAML pick one or add logic
 export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@
-export SOCA_VARS=tocn,socn,ssh
-export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@
 export SOCA_NINNER=@SOCA_NINNER@
-export CASE_ANL=@CASE_ANL@
 export DOMAIN_STACK_SIZE=116640000  #TODO: Make the stack size resolution dependent
-export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin
+export SOCA_ENS_BKG_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/soca_ens_bkg_stage.yaml.j2"
+export SOCA_FIX_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage_${OCNRES}.yaml.j2"
 
-export COMIN_OBS=@COMIN_OBS@
-
-# NICAS
-export NICAS_RESOL=@NICAS_RESOL@
-export NICAS_GRID_SIZE=@NICAS_GRID_SIZE@
+export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin  # TODO(GA): remove once analysis "run"
+                                                   #          and "checkpoint" are refactored
 
 echo "END: config.ocnanal"
diff --git a/parm/config/gfs/config.ocnanalbmat b/parm/config/gfs/config.ocnanalbmat
deleted file mode 100644
index 024da5f51b..0000000000
--- a/parm/config/gfs/config.ocnanalbmat
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-########## config.ocnanalbmat ##########
-# Ocn Analysis specific
-
-echo "BEGIN: config.ocnanalbmat"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" ocnanalbmat
-
-echo "END: config.ocnanalbmat"
diff --git a/parm/config/gfs/config.ocnanalecen b/parm/config/gfs/config.ocnanalecen
new file mode 100644
index 0000000000..b64c2bcf62
--- /dev/null
+++ b/parm/config/gfs/config.ocnanalecen
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+########## config.ocnanalecen ##########
+# Ocn Analysis specific
+
+echo "BEGIN: config.ocnanalecen"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" ocnanalecen
+
+echo "END: config.ocnanalecen"
diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost
deleted file mode 100644
index 851c476e6c..0000000000
--- a/parm/config/gfs/config.ocnpost
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.ocnpost ##########
-
-echo "BEGIN: config.ocnpost"
-
-# Get task specific resources
-source "${EXPDIR}/config.resources" ocnpost
-
-# Convert netcdf files to grib files using post job
-#-------------------------------------------
-case "${OCNRES}" in
-    "025") export MAKE_OCN_GRIB="YES";;
-    "050") export MAKE_OCN_GRIB="NO";;
-    "100") export MAKE_OCN_GRIB="NO";;
-    "500") export MAKE_OCN_GRIB="NO";;
-    *) export MAKE_OCN_GRIB="NO";;
-esac
-
-if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then
-  #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES
-  #This should be removed when this is updated
-  export MAKE_OCN_GRIB="NO"
-fi
-
-# No. of forecast hours to process in a single job
-export NFHRS_PER_GROUP=3
-
-echo "END: config.ocnpost"
diff --git a/parm/config/gfs/config.postsnd b/parm/config/gfs/config.postsnd
index 53d66bf4f6..7ec0ad6321 100644
--- a/parm/config/gfs/config.postsnd
+++ b/parm/config/gfs/config.postsnd
@@ -8,7 +8,6 @@ echo "BEGIN: config.postsnd"
 # Get task specific resources
 . $EXPDIR/config.resources postsnd
 
-export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND
 export ENDHOUR=180
 if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi
 
diff --git a/parm/config/gfs/config.prep b/parm/config/gfs/config.prep
index d5ac1925f7..e719d03d1d 100644
--- a/parm/config/gfs/config.prep
+++ b/parm/config/gfs/config.prep
@@ -13,33 +13,28 @@ export cdate10=${PDY}${cyc}
 
 # Relocation and syndata QC
 export PROCESS_TROPCY=${PROCESS_TROPCY:-NO}
-export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh"
+export TROPCYQCRELOSH="${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh"
 
-export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos}
+export COMINtcvital=${COMINtcvital:-${DMPDIR}/${RUN}.${PDY}/${cyc}/atmos}
 export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat}
 
-export HOMERELO=$HOMEgfs
-export EXECRELO=${HOMERELO}/exec
-export FIXRELO=${HOMERELO}/fix/am
-export USHRELO=${HOMERELO}/ush
-
 # Adjust observation error for GFS v16 parallels
 #
 #   NOTE:  Remember to set OBERROR in config.anal as PRVT is set below
 #
 # Set default prepobs_errtable.global
-export PRVT=$FIXgsi/prepobs_errtable.global
+export PRVT=${FIXgfs}/gsi/prepobs_errtable.global
 
 
 # Set prepobs.errtable.global for GFS v16 retrospective parallels
 if [[ $RUN_ENVIR == "emc" ]]; then
   if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then
-    export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900
+    export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900
   fi
 
 #   Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps
   if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then
-    export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706
+    export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706
   fi
 
 #   NOTE:
diff --git a/parm/config/gfs/config.prepatmiodaobs b/parm/config/gfs/config.prepatmiodaobs
index ed9b246120..e29cf67b07 100644
--- a/parm/config/gfs/config.prepatmiodaobs
+++ b/parm/config/gfs/config.prepatmiodaobs
@@ -8,7 +8,4 @@ echo "BEGIN: config.prepatmiodaobs"
 # Get task specific resources
 . "${EXPDIR}/config.resources" prepatmiodaobs
 
-export BUFR2IODASH="${HOMEgfs}/ush/run_bufr2ioda.py"
-export IODAPARM="${HOMEgfs}/sorc/gdas.cd/parm/ioda/bufr2ioda"
-
 echo "END: config.prepatmiodaobs"
diff --git a/parm/config/gfs/config.preplandobs b/parm/config/gfs/config.preplandobs
deleted file mode 100644
index 20ae20b5ad..0000000000
--- a/parm/config/gfs/config.preplandobs
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.preplandobs ##########
-# Land Obs Prep specific
-
-echo "BEGIN: config.preplandobs"
-
-# Get task specific resources
-. "${EXPDIR}/config.resources" preplandobs
-
-export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_gts.yaml"
-export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x"
-export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2"
-export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml"
-export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe"
-export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py"
-
-echo "END: config.preplandobs"
diff --git a/parm/config/gfs/config.prepobsaero b/parm/config/gfs/config.prepobsaero
new file mode 100644
index 0000000000..f70138991c
--- /dev/null
+++ b/parm/config/gfs/config.prepobsaero
@@ -0,0 +1,17 @@
+#!/bin/bash -x
+
+########## config.prepobsaero ##########
+# Prepare and thin/superob aerosol observations
+
+echo "BEGIN: config.prepobsaero"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" prepobsaero
+
+export OBSPROCYAML="${PARMgfs}/gdas/aero/obs/lists/gdas_aero_obsproc.yaml.j2"
+export OBSPROCEXE="${EXECgfs}/gdas_obsprovider2ioda.x"
+export VIIRS_DATA_DIR="/scratch2/NCEPDEV/stmp3/Yaping.Wang/VIIRS/AWS/"
+export SENSORS="npp,n20"
+
+
+echo "END: config.prepaeroobs"
diff --git a/parm/config/gfs/config.prepoceanobs b/parm/config/gfs/config.prepoceanobs
index d7c4e37bb9..746ce79257 100644
--- a/parm/config/gfs/config.prepoceanobs
+++ b/parm/config/gfs/config.prepoceanobs
@@ -6,15 +6,20 @@ echo "BEGIN: config.prepoceanobs"
 
 export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x
 
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config
-export OBSPROC_YAML=@OBSPROC_YAML@
+export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@
+
+export OBS_YAML_DIR="${PARMgfs}/gdas/soca/obs/config"
+export OBSPREP_YAML=@OBSPREP_YAML@
 export OBS_LIST=@SOCA_OBS_LIST@
-[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml
 export OBS_YAML=${OBS_LIST}
 
 # ocean analysis needs own dmpdir until standard dmpdir has full ocean obs
 export DMPDIR=@DMPDIR@
 
+# For BUFR2IODA json and python scripts
+export JSON_TMPL_DIR="${PARMgfs}/gdas/ioda/bufr2ioda"
+export BUFR2IODA_PY_DIR="${USHgfs}"
+
 # Get task specific resources
 . "${EXPDIR}/config.resources" prepoceanobs
 echo "END: config.prepoceanobs"
diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowobs
new file mode 100644
index 0000000000..60ca16ce9e
--- /dev/null
+++ b/parm/config/gfs/config.prepsnowobs
@@ -0,0 +1,21 @@
+#! /usr/bin/env bash
+
+########## config.prepsnowobs ##########
+# Snow Obs Prep specific
+
+echo "BEGIN: config.prepsnowobs"
+
+# Get task specific resources
+. "${EXPDIR}/config.resources" prepsnowobs
+
+export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2"
+export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2"
+
+export BUFR2IODAX="${EXECgfs}/bufr2ioda.x"
+
+export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe"
+export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2"
+
+export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py"
+
+echo "END: config.prepsnowobs"
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index c179c33df4..7b737d05f0 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -1,29 +1,32 @@
 #! /usr/bin/env bash
+# shellcheck disable=SC2034
 
 ########## config.resources ##########
 # Set resource information for job tasks
 # e.g. walltime, node, cores per node, memory etc.
+# Note: machine-specific resources should be placed into the appropriate config file:
+#       config.resources.${machine}
 
-if [[ $# -ne 1 ]]; then
+if (( $# != 1 )); then
 
     echo "Must specify an input task argument to set resource variables!"
     echo "argument can be any one of the following:"
     echo "stage_ic aerosol_init"
-    echo "prep preplandobs prepatmiodaobs"
-    echo "atmanlinit atmanlrun atmanlfinal"
-    echo "atmensanlinit atmensanlrun atmensanlfinal"
-    echo "landanl"
-    echo "aeroanlinit aeroanlrun aeroanlfinal"
+    echo "prep prepsnowobs prepatmiodaobs"
+    echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal"
+    echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal"
+    echo "snowanl"
+    echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal"
     echo "anal sfcanl analcalc analdiag fcst echgres"
     echo "upp atmos_products"
     echo "tracker genesis genesis_fsu"
     echo "verfozn verfrad vminmon fit2obs metp arch cleanup"
     echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
-    echo "init_chem mom6ic ocnpost"
+    echo "init_chem mom6ic oceanice_products"
     echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
     echo "wavegempak waveawipsbulls waveawipsgridded"
     echo "postsnd awips gempak npoess"
-    echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy"
+    echo "ocnanalprep prepoceanobs marinebmat ocnanalrun ocnanalecen marineanalletkf ocnanalchkpt ocnanalpost ocnanalvrfy"
     exit 1
 
 fi
@@ -32,1164 +35,1269 @@ step=$1
 
 echo "BEGIN: config.resources"
 
-if [[ "${machine}" = "WCOSS2" ]]; then
-   export npe_node_max=128
-elif [[ "${machine}" = "JET" ]]; then
-   if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then
-     export npe_node_max=16
-   elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then
-     export npe_node_max=24
-   elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then
-     export npe_node_max=16
-   elif [[ ${PARTITION_BATCH} = "kjet" ]]; then
-     export npe_node_max=40
-   fi
-elif [[ "${machine}" = "HERA" ]]; then
-   export npe_node_max=40
-elif [[ "${machine}" = "S4" ]]; then
-   if [[ ${PARTITION_BATCH} = "s4" ]]; then
-      export npe_node_max=32
-   elif [[ ${PARTITION_BATCH} = "ivy" ]]; then
-      export npe_node_max=20
-   fi
-elif [[ "${machine}" = "AWSPW" ]]; then
-     export PARTITION_BATCH="compute"
-     export npe_node_max=40
-elif [[ "${machine}" = "ORION" ]]; then
-   export npe_node_max=40
-elif [[ "${machine}" = "HERCULES" ]]; then
-   export npe_node_max=80
-fi
-
-if [[ "${step}" = "prep" ]]; then
-    export wtime_prep='00:30:00'
-    export npe_prep=4
-    export npe_node_prep=2
-    export nth_prep=1
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export is_exclusive=True
-    else
-      export memory_prep="40G"
+case ${machine} in
+  "WCOSS2")
+              max_tasks_per_node=128
+              # shellcheck disable=SC2034
+              mem_node_max="500GB"
+    ;;
+  "HERA")
+              max_tasks_per_node=40
+              # shellcheck disable=SC2034
+              mem_node_max="96GB"
+    ;;
+  "GAEA")
+              max_tasks_per_node=128
+              # shellcheck disable=SC2034
+              mem_node_max="251GB"
+    ;;
+  "ORION")
+              max_tasks_per_node=40
+              # shellcheck disable=SC2034
+              mem_node_max="192GB"
+    ;;
+  "HERCULES")
+              max_tasks_per_node=80
+              # shellcheck disable=SC2034
+              mem_node_max="512GB"
+    ;;
+  "JET")
+    case ${PARTITION_BATCH} in
+      "xjet")
+              max_tasks_per_node=24
+              # shellcheck disable=SC2034
+              mem_node_max="61GB"
+        ;;
+      "vjet")
+              max_tasks_per_node=16
+              # shellcheck disable=SC2034
+              mem_node_max="61GB"
+        ;;
+      "sjet")
+              max_tasks_per_node=16
+              # shellcheck disable=SC2034
+              mem_node_max="29GB"
+        ;;
+      "kjet")
+              max_tasks_per_node=40
+              # shellcheck disable=SC2034
+              mem_node_max="88GB"
+        ;;
+      *)
+        echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}"
+        exit 3
+    esac
+    ;;
+  "S4")
+    case ${PARTITION_BATCH} in
+      "s4")   max_tasks_per_node=32
+              # shellcheck disable=SC2034
+              mem_node_max="168GB"
+        ;;
+      "ivy")
+              max_tasks_per_node=20
+              # shellcheck disable=SC2034
+              mem_node_max="128GB"
+        ;;
+      *)
+        echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}"
+        exit 3
+    esac
+    ;;
+  "AWSPW")
+    export PARTITION_BATCH="compute"
+    max_tasks_per_node=40
+    # TODO Supply a max mem/node value for AWS
+    # shellcheck disable=SC2034
+    mem_node_max=""
+    ;;
+  "CONTAINER")
+    max_tasks_per_node=1
+    # TODO Supply a max mem/node value for a container
+    # shellcheck disable=SC2034
+    mem_node_max=""
+    ;;
+  *)
+    echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}"
+    exit 2
+    ;;
+esac
+
+export max_tasks_per_node
+
+case ${step} in
+  "prep")
+    walltime='00:30:00'
+    ntasks=4
+    tasks_per_node=2
+    threads_per_task=1
+    memory="40GB"
+    ;;
+
+  "prepsnowobs")
+    walltime="00:05:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    ;;
+
+  "prepatmiodaobs")
+    walltime="00:30:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "aerosol_init")
+    walltime="00:05:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
+    memory="6GB"
+    ;;
+
+  "waveinit")
+    walltime="00:10:00"
+    ntasks=12
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
+    memory="2GB"
+    ;;
+
+  "waveprep")
+    walltime="00:10:00"
+    ntasks_gdas=5
+    ntasks_gfs=65
+    threads_per_task=1
+
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS_gdas=${ntasks_gdas}
+    NTASKS_gfs=${ntasks_gfs}
+    memory_gdas="100GB"
+    memory_gfs="150GB"
+    ;;
+
+  "wavepostsbs")
+    walltime_gdas="00:20:00"
+    walltime_gfs="03:00:00"
+    ntasks=8
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
+    memory_gdas="10GB"
+    memory_gfs="10GB"
+    ;;
+
+  # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes.
+  # Limit the number of tasks/node to 40.
+  "wavepostbndpnt")
+    walltime="03:00:00"
+    ntasks=240
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export is_exclusive=True
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        tasks_per_node=40
+        export is_exclusive=False
     fi
-
-elif [[ "${step}" = "preplandobs" ]]; then
-    export wtime_preplandobs="00:05:00"
-    npe_preplandobs=1
-    export npe_preplandobs
-    export nth_preplandobs=1
-    npe_node_preplandobs=1
-    export npe_node_preplandobs
-
-elif [[ "${step}" = "prepatmiodaobs" ]]; then
-    export wtime_prepatmiodaobs="00:10:00"
-    export npe_prepatmiodaobs=1
-    export nth_prepatmiodaobs=1
-    npe_node_prepatmiodaobs=$(echo "${npe_node_max} / ${nth_prepatmiodaobs}" | bc)
-    export npe_node_prepatmiodaobs
-
-elif [[ "${step}" = "aerosol_init" ]]; then
-    export wtime_aerosol_init="00:05:00"
-    export npe_aerosol_init=1
-    export nth_aerosol_init=1
-    npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc)
-    export npe_node_aerosol_init
-    export NTASKS=${npe_aerosol_init}
-    export memory_aerosol_init="6G"
-
-elif [[ "${step}" = "waveinit" ]]; then
-
-    export wtime_waveinit="00:10:00"
-    export npe_waveinit=12
-    export nth_waveinit=1
-    npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc)
-    export npe_node_waveinit
-    export NTASKS=${npe_waveinit}
-    export memory_waveinit="2GB"
-
-elif [[ "${step}" = "waveprep" ]]; then
-
-    export wtime_waveprep="00:10:00"
-    export npe_waveprep=5
-    export npe_waveprep_gfs=65
-    export nth_waveprep=1
-    export nth_waveprep_gfs=1
-    npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc)
-    export npe_node_waveprep
-    npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc)
-    export npe_node_waveprep_gfs
-    export NTASKS=${npe_waveprep}
-    export NTASKS_gfs=${npe_waveprep_gfs}
-    export memory_waveprep="100GB"
-    export memory_waveprep_gfs="150GB"
-
-elif [[ "${step}" = "wavepostsbs" ]]; then
-
-    export wtime_wavepostsbs="00:20:00"
-    export wtime_wavepostsbs_gfs="03:00:00"
-    export npe_wavepostsbs=8
-    export nth_wavepostsbs=1
-    npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc)
-    export npe_node_wavepostsbs
-    export NTASKS=${npe_wavepostsbs}
-    export memory_wavepostsbs="10GB"
-    export memory_wavepostsbs_gfs="10GB"
-
-elif [[ "${step}" = "wavepostbndpnt" ]]; then
-
-    export wtime_wavepostbndpnt="01:00:00"
-    export npe_wavepostbndpnt=240
-    export nth_wavepostbndpnt=1
-    npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc)
-    export npe_node_wavepostbndpnt
-    export NTASKS=${npe_wavepostbndpnt}
+    NTASKS=${ntasks}
+    ;;
+
+  "wavepostbndpntbll")
+    walltime="01:00:00"
+    ntasks=448
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ "${step}" = "wavepostbndpntbll" ]]; then
-
-    export wtime_wavepostbndpntbll="01:00:00"
-    export npe_wavepostbndpntbll=448
-    export nth_wavepostbndpntbll=1
-    npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc)
-    export npe_node_wavepostbndpntbll
-    export NTASKS=${npe_wavepostbndpntbll}
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        tasks_per_node=40
+        export is_exclusive=False
+    fi
+    NTASKS=${ntasks}
+    ;;
+
+  "wavepostpnt")
+    walltime="04:00:00"
+    ntasks=200
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ "${step}" = "wavepostpnt" ]]; then
-
-    export wtime_wavepostpnt="04:00:00"
-    export npe_wavepostpnt=200
-    export nth_wavepostpnt=1
-    npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc)
-    export npe_node_wavepostpnt
-    export NTASKS=${npe_wavepostpnt}
+    if [[ ${tasks_per_node} -gt 40 ]]; then
+        tasks_per_node=40
+        export is_exclusive=False
+    fi
+    NTASKS=${ntasks}
+    ;;
+
+  "wavegempak")
+    walltime="02:00:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
+    memory="1GB"
+    ;;
+
+  "waveawipsbulls")
+    walltime="00:20:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
     export is_exclusive=True
-
-elif [[ "${step}" = "wavegempak" ]]; then
-
-    export wtime_wavegempak="02:00:00"
-    export npe_wavegempak=1
-    export nth_wavegempak=1
-    npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc)
-    export npe_node_wavegempak
-    export NTASKS=${npe_wavegempak}
-    export memory_wavegempak="1GB"
-
-elif [[ "${step}" = "waveawipsbulls" ]]; then
-
-    export wtime_waveawipsbulls="00:20:00"
-    export npe_waveawipsbulls=1
-    export nth_waveawipsbulls=1
-    npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc)
-    export npe_node_waveawipsbulls
-    export NTASKS=${npe_waveawipsbulls}
+    ;;
+
+  "waveawipsgridded")
+    walltime="02:00:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    NTASKS=${ntasks}
+    memory_gfs="1GB"
+    ;;
+
+  "atmanlinit")
+    export layout_x=${layout_x_atmanl}
+    export layout_y=${layout_y_atmanl}
+
+    export layout_gsib_x=$(( layout_x * 3 ))
+    export layout_gsib_y=$(( layout_y * 2 ))
+
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="3072M"
+    ;;
+
+  "atmanlvar")
+    export layout_x=${layout_x_atmanl}
+    export layout_y=${layout_y_atmanl}
+
+    walltime="00:30:00"
+    ntasks_gdas=$(( layout_x * layout_y * 6 ))
+    ntasks_gfs=$(( layout_x * layout_y * 6 ))
+    threads_per_task_gdas=1
+    threads_per_task_gfs=${threads_per_task_gdas}
+    tasks_per_node_gdas=$(( max_tasks_per_node / threads_per_task_gdas ))
+    tasks_per_node_gfs=$(( max_tasks_per_node / threads_per_task_gfs ))
+    memory="96GB"
     export is_exclusive=True
-
-elif [[ ${step} = "waveawipsgridded" ]]; then
-
-    export wtime_waveawipsgridded="02:00:00"
-    export npe_waveawipsgridded=1
-    export nth_waveawipsgridded=1
-    npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc)
-    export npe_node_waveawipsgridded
-    export NTASKS=${npe_waveawipsgridded}
-    export memory_waveawipsgridded_gfs="1GB"
-
-elif [[ "${step}" = "atmanlinit" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    layout_gsib_x=$(echo "${layout_x} * 3" | bc)
-    export layout_gsib_x
-    layout_gsib_y=$(echo "${layout_y} * 2" | bc)
-    export layout_gsib_y
-
-    export wtime_atmanlinit="00:10:00"
-    export npe_atmanlinit=1
-    export nth_atmanlinit=1
-    npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc)
-    export npe_node_atmanlinit
-    export memory_atmanlinit="3072M"
-
-elif [[ "${step}" = "atmanlrun" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    export wtime_atmanlrun="00:30:00"
-    npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmanlrun
-    npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmanlrun_gfs
-    export nth_atmanlrun=1
-    export nth_atmanlrun_gfs=${nth_atmanlrun}
-    npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc)
-    export npe_node_atmanlrun
+    ;;
+
+  "atmanlfv3inc")
+    export layout_x=${layout_x_atmanl}
+    export layout_y=${layout_y_atmanl}
+
+    walltime="00:30:00"
+    ntasks_gdas=$(( layout_x * layout_y * 6 ))
+    ntasks_gfs=$(( layout_x * layout_y * 6 ))
+    threads_per_task_gdas=1
+    threads_per_task_gfs=${threads_per_task_gdas}
+    tasks_per_node_gdas=$(( max_tasks_per_node / threads_per_task_gdas ))
+    tasks_per_node_gfs=$(( max_tasks_per_node / threads_per_task_gfs ))
+    memory="96GB"
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "atmanlfinal" ]]; then
-
-    export wtime_atmanlfinal="00:30:00"
-    export npe_atmanlfinal=${npe_node_max}
-    export nth_atmanlfinal=1
-    npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc)
-    export npe_node_atmanlfinal
+  "atmanlfinal")
+    walltime="00:30:00"
+    ntasks=${max_tasks_per_node}
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "landanl" ]]; then
-   # below lines are for creating JEDI YAML
-   case ${CASE} in
-     C768)
+  "snowanl")
+    # below lines are for creating JEDI YAML
+    case ${CASE} in
+      "C768")
         layout_x=6
         layout_y=6
         ;;
-     C384)
+      "C384")
         layout_x=5
         layout_y=5
         ;;
-     C192 | C96 | C48)
+      "C192" | "C96" | "C48")
         layout_x=1
         layout_y=1
         ;;
-     *)
-        echo "FATAL ERROR: Resolution not supported for land analysis'"
-        exit 1
-   esac
-
-   export layout_x
-   export layout_y
-
-   export wtime_landanl="00:15:00"
-   npe_landanl=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-   export npe_landanl
-   export nth_landanl=1
-   npe_node_landanl=$(echo "${npe_node_max} / ${nth_landanl}" | bc)
-   export npe_node_landanl
-
-elif [[ "${step}" = "aeroanlinit" ]]; then
-
-   # below lines are for creating JEDI YAML
-   case ${CASE} in
-      C768)
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
+    esac
+
+    export layout_x
+    export layout_y
+
+    walltime="00:15:00"
+    ntasks=$(( layout_x * layout_y * 6 ))
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "prepobsaero")
+    walltime="00:30:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="96GB"
+    ;;
+
+  "aeroanlinit")
+    # below lines are for creating JEDI YAML
+    case ${CASE} in
+      "C768")
         layout_x=8
         layout_y=8
         ;;
-      C384)
+      "C384")
         layout_x=8
         layout_y=8
         ;;
-      C192 | C96)
+      "C192" | "C96")
         layout_x=8
         layout_y=8
         ;;
-      C48 )
+      "C48" )
         # this case is for testing only
         layout_x=1
         layout_y=1
         ;;
       *)
-          echo "FATAL ERROR: Resolution not supported for aerosol analysis'"
-          exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
     esac
 
     export layout_x
     export layout_y
-
-    export wtime_aeroanlinit="00:10:00"
-    export npe_aeroanlinit=1
-    export nth_aeroanlinit=1
-    npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc)
-    export npe_node_aeroanlinit
-    export memory_aeroanlinit="3072M"
-
-elif [[ "${step}" = "aeroanlrun" ]]; then
-
-   case ${CASE} in
-      C768)
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="3072M"
+    ;;
+
+  "aeroanlrun")
+    case ${CASE} in
+      "C768")
         layout_x=8
         layout_y=8
         ;;
-      C384)
+      "C384")
         layout_x=8
         layout_y=8
         ;;
-      C192 | C96)
+      "C192" | "C96")
         layout_x=8
         layout_y=8
         ;;
-      C48 )
+      "C48" )
         # this case is for testing only
         layout_x=1
         layout_y=1
         ;;
       *)
-          echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!"
-          exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
     esac
 
     export layout_x
     export layout_y
 
-    export wtime_aeroanlrun="00:30:00"
-    npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_aeroanlrun
-    npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_aeroanlrun_gfs
-    export nth_aeroanlrun=1
-    export nth_aeroanlrun_gfs=1
-    npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc)
-    export npe_node_aeroanlrun
+    walltime="00:30:00"
+    ntasks=$(( layout_x * layout_y * 6 ))
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
+    ;;
+
+  "aeroanlfinal")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="3072M"
+    ;;
+
+  "ocnanalprep")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="24GB"
+    ;;
+
+  "prepoceanobs")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="48GB"
+    ;;
+
+  "marinebmat")
+    npes=16
+    ntasks=16
+    case ${OCNRES} in
+      "025") ntasks=480;;
+      "050")  ntasks=16;;
+      "500")  ntasks=16;;
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}"
+        exit 4
+    esac
 
-elif [[ "${step}" = "aeroanlfinal" ]]; then
-
-    export wtime_aeroanlfinal="00:10:00"
-    export npe_aeroanlfinal=1
-    export nth_aeroanlfinal=1
-    npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc)
-    export npe_node_aeroanlfinal
-    export memory_aeroanlfinal="3072M"
-
-elif [[ "${step}" = "ocnanalprep" ]]; then
-
-    export wtime_ocnanalprep="00:10:00"
-    export npe_ocnanalprep=1
-    export nth_ocnanalprep=1
-    npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc)
-    export npe_node_ocnanalprep
-    export memory_ocnanalprep="24GB"
-
-elif [[ "${step}" = "prepoceanobs" ]]; then
-
-    export wtime_prepoceanobs="00:10:00"
-    export npe_prepoceanobs=1
-    export nth_prepoceanobs=1
-    npe_node_prepoceanobs=$(echo "${npe_node_max} / ${nth_prepoceanobs}" | bc)
-    export npe_node_prepoceanobs
-    export memory_prepoceanobs="24GB"
-
-
-elif [[ "${step}" = "ocnanalbmat" ]]; then
-   npes=16
-   case ${CASE} in
-      C384)
-        npes=480
+    walltime="00:30:00"
+    threads_per_task=1
+    export is_exclusive=True
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ocnanalrun")
+    ntasks=16
+    case ${OCNRES} in
+      "025")
+        ntasks=480
+        memory="96GB"
         ;;
-      C96)
-        npes=16
+      "050")
+        ntasks=16
+        memory="96GB"
         ;;
-      C48)
-        npes=16
+      "500")
+        ntasks=16
+        memory="24GB"
         ;;
       *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}"
+        exit 4
     esac
 
-    export wtime_ocnanalbmat="00:30:00"
-    export npe_ocnanalbmat=${npes}
-    export nth_ocnanalbmat=1
+    walltime="00:15:00"
+    threads_per_task=1
     export is_exclusive=True
-    npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc)
-    export npe_node_ocnanalbmat
-
-elif [[ "${step}" = "ocnanalrun" ]]; then
-   npes=16
-   case ${CASE} in
-      C384)
-        npes=480
-        memory_ocnanalrun="128GB"
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ocnanalecen")
+    ntasks=16
+    case ${OCNRES} in
+      "025")
+        ntasks=40
+        memory="96GB"
         ;;
-      C96)
-        npes=16
+      "050")
+        ntasks=16
+        memory="96GB"
         ;;
-      C48)
-        npes=16
-        memory_ocnanalrun="64GB"
+      "500")
+        ntasks=16
+        memory="24GB"
         ;;
       *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}"
+        exit 4
     esac
 
-    export wtime_ocnanalrun="00:15:00"
-    export npe_ocnanalrun=${npes}
-    export nth_ocnanalrun=2
+    walltime="00:10:00"
+    threads_per_task=1
     export is_exclusive=True
-    npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc)
-    export npe_node_ocnanalrun
-    export memory_ocnanalrun
-
-elif [[ "${step}" = "ocnanalchkpt" ]]; then
-
-   export wtime_ocnanalchkpt="00:10:00"
-   export npe_ocnanalchkpt=1
-   export nth_ocnanalchkpt=1
-   npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc)
-   export npe_node_ocnanalchkpt
-   case ${CASE} in
-      C384)
-        export memory_ocnanalchkpt="128GB"
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "marineanalletkf")
+    ntasks=16
+    case ${OCNRES} in
+      "025")
+        ntasks=480
+        memory="96GB"
         ;;
-      C96)
-        export memory_ocnanalchkpt="32GB"
+      "050")
+        ntasks=16
+        memory="96GB"
         ;;
-      C48)
-        export memory_ocnanalchkpt="32GB"
+      "500")
+        ntasks=16
+        memory="24GB"
         ;;
       *)
-          echo "FATAL: Resolution not supported'"
-          exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}"
+        exit 4
     esac
 
-elif [[ "${step}" = "ocnanalpost" ]]; then
-
-    export wtime_ocnanalpost="00:30:00"
-    export npe_ocnanalpost=${npe_node_max}
-    export nth_ocnanalpost=1
-    npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc)
-    export npe_node_ocnanalpost
-
-elif [[ "${step}" = "ocnanalvrfy" ]]; then
-
-    export wtime_ocnanalvrfy="00:35:00"
-    export npe_ocnanalvrfy=1
-    export nth_ocnanalvrfy=1
-    npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc)
-    export npe_node_ocnanalvrfy
-    export memory_ocnanalvrfy="24GB"
-
-elif [[ "${step}" = "anal" ]]; then
-
-    export wtime_anal="00:50:00"
-    export wtime_anal_gfs="00:40:00"
-    export npe_anal=780
-    export nth_anal=5
-    export npe_anal_gfs=825
-    export nth_anal_gfs=5
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export nth_anal=8
-      export nth_anal_gfs=8
-    fi
-    if [[ "${CASE}" = "C384" ]]; then
-      export npe_anal=160
-      export npe_anal_gfs=160
-      export nth_anal=10
-      export nth_anal_gfs=10
-      if [[ "${machine}" = "S4" ]]; then
-         #On the S4-s4 partition, this is accomplished by increasing the task
-         #count to a multiple of 32
-         if [[ ${PARTITION_BATCH} = "s4" ]]; then
-            export npe_anal=416
-            export npe_anal_gfs=416
-         fi
-         #S4 is small, so run this task with just 1 thread
-         export nth_anal=1
-         export nth_anal_gfs=1
-         export wtime_anal="02:00:00"
-      fi
-    fi
-    if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then
-      export npe_anal=84
-      export npe_anal_gfs=84
-      if [[ "${machine}" = "S4" ]]; then
-         export nth_anal=4
-         export nth_anal_gfs=4
-         #Adjust job count for S4
-         if [[ "${PARTITION_BATCH}" = "s4" ]]; then
-            export npe_anal=88
-            export npe_anal_gfs=88
-         elif [[ ${PARTITION_BATCH} = "ivy" ]]; then
-            export npe_anal=90
-            export npe_anal_gfs=90
-         fi
-      fi
+    walltime="00:10:00"
+    threads_per_task=1
+    export is_exclusive=True
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+
+  "ocnanalchkpt")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    case ${OCNRES} in
+      "025")
+        memory="128GB"
+        ntasks=40;;
+      "050")
+        memory="32GB"
+        ntasks=16;;
+      "500")
+        memory="32GB"
+        ntasks=8;;
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}"
+        exit 4
+    esac
+    ;;
+
+  "ocnanalpost")
+    walltime="00:30:00"
+    ntasks=${max_tasks_per_node}
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ocnanalvrfy")
+    walltime="00:35:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="24GB"
+    ;;
+
+  "anal")
+    walltime_gdas="01:20:00"
+    walltime_gfs="01:00:00"
+    case ${CASE} in
+      "C768")
+        ntasks_gdas=780
+        ntasks_gfs=825
+        threads_per_task=5
+        ;;
+      "C384")
+        ntasks_gdas=160
+        ntasks_gfs=160
+        threads_per_task=10
+        ;;
+      "C192" | "C96" | "C48")
+        ntasks_gdas=84
+        ntasks_gfs=84
+        threads_per_task=5
+        ;;
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
+        ;;
+    esac
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export threads_per_task_cycle=${threads_per_task}
+    export tasks_per_node_cycle=$(( max_tasks_per_node / threads_per_task_cycle ))
+    export is_exclusive=True
+    ;;
+
+  "analcalc")
+    walltime="00:15:00"
+    ntasks=127
+    export ntasks_calcanl="${ntasks}"
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export threads_per_task_echgres_gdas=4
+    export threads_per_task_echgres_gfs=12
+    export is_exclusive=True
+    memory="48GB"
+    if [[ "${CASE}" == "C384" || "${CASE}" == "C768" ]]; then
+       memory="${mem_node_max}"
     fi
-    npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc)
-    export npe_node_anal
-    export nth_cycle=${nth_anal}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
+    ;;
+
+  "analdiag")
+    walltime="00:15:00"
+    ntasks=96             # Should be at least twice ediag's tasks
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="48GB"
+    ;;
+
+  "sfcanl")
+    walltime="00:20:00"
+    ntasks=${ntiles:-6}
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "analcalc" ]]; then
-
-    export wtime_analcalc="00:10:00"
-    export npe_analcalc=127
-    export ntasks="${npe_analcalc}"
-    export nth_analcalc=1
-    export nth_echgres=4
-    export nth_echgres_gfs=12
-    npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc)
-    export npe_node_analcalc
+  "fcst" | "efcs")
     export is_exclusive=True
-    export memory_analcalc="48GB"
 
-elif [[ "${step}" = "analdiag" ]]; then
-
-    export wtime_analdiag="00:15:00"
-    export npe_analdiag=96             # Should be at least twice npe_ediag
-    export nth_analdiag=1
-    npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc)
-    export npe_node_analdiag
-    export memory_analdiag="48GB"
+    _RUN=${RUN:-"gfs"}
+    _RUN=${RUN/enkf/}
+
+    # Declare variables from config.ufs based on _RUN
+    # Export layout and write task variables, but not ntasks/threads
+    # Capitalize _RUN for write tasks
+    for var in layout_x layout_y ntasks_fv3 ntasks_quilt nthreads_fv3 nthreads_ufs \
+               WRITE_GROUP WRTTASK_PER_GROUP_PER_THREAD; do
+      if [[ ${var} =~ "layout" ]]; then
+        ufs_var_name="${var}_${_RUN}"
+        declare -x "${var}"="${!ufs_var_name}"
+      elif [[ ${var} =~ "WR" ]]; then
+        ufs_var_name="${var}_${_RUN^^}"
+        declare -x "${var}"="${!ufs_var_name}"
+      else
+        ufs_var_name="${var}_${_RUN}"
+        declare "${var}"="${!ufs_var_name}"
+      fi
+    done
 
-elif [[ "${step}" = "sfcanl" ]]; then
+    # Will not set mediator threads if we are skipping the mediator
+    if [[ ${_RUN} == "gfs" ]]; then
+      nthreads_mediator=${nthreads_mediator_gfs:-}
+    elif [[ ${_RUN} == "gdas" ]]; then
+      nthreads_mediator=${nthreads_mediator_gdas:-}
+    fi
 
-    export wtime_sfcanl="00:10:00"
-    export npe_sfcanl=6
-    export nth_sfcanl=1
-    npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc)
-    export npe_node_sfcanl
-    export is_exclusive=True
+    # Determine if using ESMF-managed threading or traditional threading
+    # If using traditional threading, set them to 1
+    if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+      export UFS_THREADS=1
+    else  # traditional threading
+      export UFS_THREADS=${nthreads_ufs:-1}
+      nthreads_fv3=1
+      nthreads_mediator=1
+      [[ "${DO_WAVE}" == "YES" ]] && nthreads_ww3=1
+      [[ "${DO_OCN}" == "YES" ]] && nthreads_mom6=1
+      [[ "${DO_ICE}" == "YES" ]] && nthreads_cice6=1
+    fi
 
-elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then
+    if (( ntiles > 6 )); then
+      export layout_x_nest=${layout_x_nest:-10}
+      export layout_y_nest=${layout_y_nest:-10}
+      export npx_nest=${npx_nest:-1441}
+      export npy_nest=${npy_nest:-961}
+    fi
 
-    export is_exclusive=True
+    # PETS for the atmosphere dycore
+    (( FV3PETS = ntasks_fv3 * nthreads_fv3 ))
+    echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})"
 
-    if [[ "${step}" = "fcst" ]]; then
-        _CDUMP_LIST=${CDUMP:-"gdas gfs"}
-    elif [[ "${step}" = "efcs" ]]; then
-        _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"}
+    # PETS for quilting
+    if [[ "${QUILTING:-}" == ".true." ]]; then
+      (( QUILTPETS = ntasks_quilt * nthreads_fv3 ))
+      (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD ))
+      export WRTTASK_PER_GROUP
+    else
+      QUILTPETS=0
+    fi
+    echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})"
+
+    # Total PETS for the atmosphere component
+    ATMTHREADS=${nthreads_fv3}
+    (( ATMPETS = FV3PETS + QUILTPETS ))
+    export ATMPETS ATMTHREADS
+    echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})"
+
+    # Total PETS for the coupled model (starting w/ the atmosphere)
+    NTASKS_TOT=${ATMPETS}
+
+    # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks.
+    # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance.
+    # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit
+    # TODO: Update reference when moved to ufs-weather-model RTD
+    MEDTHREADS=${nthreads_mediator:-1}
+    MEDPETS=${MEDPETS:-${FV3PETS}}
+    (( "${MEDPETS}" > 300 )) && MEDPETS=300
+    export MEDPETS MEDTHREADS
+    echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})"
+
+    CHMPETS=0; CHMTHREADS=0
+    if [[ "${DO_AERO}" == "YES" ]]; then
+      # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks).
+      (( CHMTHREADS = ATMTHREADS ))
+      (( CHMPETS = FV3PETS ))
+      # Do not add to NTASKS_TOT
+      echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})"
     fi
+    export CHMPETS CHMTHREADS
+
+    WAVPETS=0; WAVTHREADS=0
+    if [[ "${DO_WAVE}" == "YES" ]]; then
+      (( WAVPETS = ntasks_ww3 * nthreads_ww3 ))
+      (( WAVTHREADS = nthreads_ww3 ))
+      echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + WAVPETS ))
+    fi
+    export WAVPETS WAVTHREADS
+
+    OCNPETS=0; OCNTHREADS=0
+    if [[ "${DO_OCN}" == "YES" ]]; then
+      (( OCNPETS = ntasks_mom6 * nthreads_mom6 ))
+      (( OCNTHREADS = nthreads_mom6 ))
+      echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + OCNPETS ))
+    fi
+    export OCNPETS OCNTHREADS
+
+    ICEPETS=0; ICETHREADS=0
+    if [[ "${DO_ICE}" == "YES" ]]; then
+      (( ICEPETS = ntasks_cice6 * nthreads_cice6 ))
+      (( ICETHREADS = nthreads_cice6 ))
+      echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})"
+      (( NTASKS_TOT = NTASKS_TOT + ICEPETS ))
+    fi
+    export ICEPETS ICETHREADS
 
-    # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined
-    for _CDUMP in ${_CDUMP_LIST}; do
-        if [[ "${_CDUMP}" =~ "gfs" ]]; then
-          export layout_x=${layout_x_gfs}
-          export layout_y=${layout_y_gfs}
-          export WRITE_GROUP=${WRITE_GROUP_GFS}
-          export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS}
-          ntasks_fv3=${ntasks_fv3_gfs}
-          ntasks_quilt=${ntasks_quilt_gfs}
-          nthreads_fv3=${nthreads_fv3_gfs}
-        fi
-
-        # PETS for the atmosphere dycore
-        (( FV3PETS = ntasks_fv3 * nthreads_fv3 ))
-        echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})"
-
-        # PETS for quilting
-        if [[ "${QUILTING:-}" = ".true." ]]; then
-          (( QUILTPETS = ntasks_quilt * nthreads_fv3 ))
-          (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD ))
-          export WRTTASK_PER_GROUP
-        else
-          QUILTPETS=0
-        fi
-        echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})"
-
-        # Total PETS for the atmosphere component
-        ATMTHREADS=${nthreads_fv3}
-        (( ATMPETS = FV3PETS + QUILTPETS ))
-        export ATMPETS ATMTHREADS
-        echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})"
-
-        # Total PETS for the coupled model (starting w/ the atmosphere)
-        NTASKS_TOT=${ATMPETS}
-
-        # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks.
-        # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance.
-        # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit
-        # TODO: Update reference when moved to ufs-weather-model RTD
-        MEDTHREADS=${nthreads_mediator:-1}
-        MEDPETS=${MEDPETS:-${FV3PETS}}
-        [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300
-        export MEDPETS MEDTHREADS
-        echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})"
-
-        CHMPETS=0; CHMTHREADS=0
-        if [[ "${DO_AERO}" = "YES" ]]; then
-          # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks).
-          (( CHMTHREADS = ATMTHREADS ))
-          (( CHMPETS = FV3PETS ))
-          # Do not add to NTASKS_TOT
-          echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})"
-        fi
-        export CHMPETS CHMTHREADS
-
-        WAVPETS=0; WAVTHREADS=0
-        if [[ "${DO_WAVE}" = "YES" ]]; then
-          (( WAVPETS = ntasks_ww3 * nthreads_ww3 ))
-          (( WAVTHREADS = nthreads_ww3 ))
-          echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + WAVPETS ))
-        fi
-        export WAVPETS WAVTHREADS
-
-        OCNPETS=0; OCNTHREADS=0
-        if [[ "${DO_OCN}" = "YES" ]]; then
-          (( OCNPETS = ntasks_mom6 * nthreads_mom6 ))
-          (( OCNTHREADS = nthreads_mom6 ))
-          echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + OCNPETS ))
-        fi
-        export OCNPETS OCNTHREADS
-
-        ICEPETS=0; ICETHREADS=0
-        if [[ "${DO_ICE}" = "YES" ]]; then
-          (( ICEPETS = ntasks_cice6 * nthreads_cice6 ))
-          (( ICETHREADS = nthreads_cice6 ))
-          echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})"
-          (( NTASKS_TOT = NTASKS_TOT + ICEPETS ))
-        fi
-        export ICEPETS ICETHREADS
-
-        echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}"
-
-        if [[ "${_CDUMP}" =~ "gfs" ]]; then
-          declare -x "npe_${step}_gfs"="${NTASKS_TOT}"
-          declare -x "nth_${step}_gfs"=1  # ESMF handles threading for the UFS-weather-model
-          declare -x "npe_node_${step}_gfs"="${npe_node_max}"
-        else
-          declare -x "npe_${step}"="${NTASKS_TOT}"
-          declare -x "nth_${step}"=1  # ESMF handles threading for the UFS-weather-model
-          declare -x "npe_node_${step}"="${npe_node_max}"
-        fi
+    echo "Total PETS for ${RUN:-gfs} = ${NTASKS_TOT}"
 
-    done
+    declare -x "ntasks"="${NTASKS_TOT}"
+    declare -x "threads_per_task"="${UFS_THREADS}"
+    declare -x "tasks_per_node"="${max_tasks_per_node}"
 
     case "${CASE}" in
       "C48" | "C96" | "C192")
-        declare -x "wtime_${step}"="00:30:00"
-        declare -x "wtime_${step}_gfs"="03:00:00"
+        declare -x "walltime_gdas"="00:20:00"
+        declare -x "walltime_enkfgdas"="00:20:00"
+        declare -x "walltime_gfs"="03:00:00"
+        declare -x "walltime_enkfgfs"="00:20:00"
         ;;
       "C384")
-        declare -x "wtime_${step}"="00:20:00"
-        declare -x "wtime_${step}_gfs"="06:00:00"
+        declare -x "walltime_gdas"="00:30:00"
+        declare -x "walltime_enkfgdas"="00:30:00"
+        declare -x "walltime_gfs"="06:00:00"
+        declare -x "walltime_enkfgfs"="00:30:00"
         ;;
       "C768" | "C1152")
-        declare -x "wtime_${step}"="01:00:00"
-        declare -x "wtime_${step}_gfs"="06:00:00"
+        # Not valid resolutions for ensembles
+        declare -x "walltime_gdas"="00:40:00"
+        declare -x "walltime_gfs"="06:00:00"
         ;;
       *)
-        echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}"
-        exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
         ;;
     esac
 
-    unset _CDUMP _CDUMP_LIST
+    unset _RUN
     unset NTASKS_TOT
+    ;;
 
-elif [[ "${step}" = "ocnpost" ]]; then
-
-    export wtime_ocnpost="00:30:00"
-    export npe_ocnpost=1
-    export npe_node_ocnpost=1
-    export nth_ocnpost=1
-    export memory_ocnpost="96G"
-    if [[ "${machine}" == "JET" ]]; then
-       # JET only has 88GB of requestable memory per node
-       # so a second node is required to meet the requiremtn
-       npe_ocnpost=2
-    fi
-
-elif [[ "${step}" = "upp" ]]; then
+  "oceanice_products")
+    walltime="00:15:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="96GB"
+    ;;
 
+  "upp")
     case "${CASE}" in
       "C48" | "C96")
-        export npe_upp=${CASE:1}
+        ntasks=${CASE:1}
       ;;
-      "C192" | "C384" | "C768")
-        export npe_upp=120
+      "C192" | "C384" | "C768" )
+        ntasks=120
+        memory="${mem_node_max}"
       ;;
       *)
-        echo "FATAL ERROR: Resolution '${CASE}' not supported for UPP'"
-        exit 1
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
       ;;
     esac
-    export npe_node_upp=${npe_upp}
+    tasks_per_node=${ntasks}
 
-    export nth_upp=1
+    threads_per_task=1
 
-    export wtime_upp="00:15:00"
-    if [[ "${npe_node_upp}" -gt "${npe_node_max}" ]]; then
-      export npe_node_upp=${npe_node_max}
+    walltime="00:15:00"
+    if (( tasks_per_node > max_tasks_per_node )); then
+      tasks_per_node=${max_tasks_per_node}
     fi
     export is_exclusive=True
+    ;;
 
-elif [[ ${step} = "atmos_products" ]]; then
-
-    export wtime_atmos_products="00:15:00"
-    export npe_atmos_products=24
-    export nth_atmos_products=1
-    export npe_node_atmos_products="${npe_atmos_products}"
-    export wtime_atmos_products_gfs="${wtime_atmos_products}"
-    export npe_atmos_products_gfs="${npe_atmos_products}"
-    export nth_atmos_products_gfs="${nth_atmos_products}"
-    export npe_node_atmos_products_gfs="${npe_node_atmos_products}"
+  "atmos_products")
+    walltime="00:15:00"
+    ntasks=24
+    threads_per_task=1
+    tasks_per_node="${ntasks}"
     export is_exclusive=True
-
-elif [[ ${step} = "verfozn" ]]; then
-
-    export wtime_verfozn="00:05:00"
-    export npe_verfozn=1
-    export nth_verfozn=1
-    export npe_node_verfozn=1
-    export memory_verfozn="1G"
-
-elif [[ ${step} = "verfrad" ]]; then
-
-    export wtime_verfrad="00:40:00"
-    export npe_verfrad=1
-    export nth_verfrad=1
-    export npe_node_verfrad=1
-    export memory_verfrad="5G"
-
-elif [[ ${step} = "vminmon" ]]; then
-
-    export wtime_vminmon="00:05:00"
-    export npe_vminmon=1
-    export nth_vminmon=1
-    export npe_node_vminmon=1
-    export wtime_vminmon_gfs="00:05:00"
-    export npe_vminmon_gfs=1
-    export nth_vminmon_gfs=1
-    export npe_node_vminmon_gfs=1
-    export memory_vminmon="1G"
-
-elif [[ ${step} = "tracker" ]]; then
-
-    export wtime_tracker="00:10:00"
-    export npe_tracker=1
-    export nth_tracker=1
-    export npe_node_tracker=1
-    export memory_tracker="4G"
-
-elif [[ ${step} = "genesis" ]]; then
-
-    export wtime_genesis="00:25:00"
-    export npe_genesis=1
-    export nth_genesis=1
-    export npe_node_genesis=1
-    export memory_genesis="4G"
-
-elif [[ ${step} = "genesis_fsu" ]]; then
-
-    export wtime_genesis_fsu="00:10:00"
-    export npe_genesis_fsu=1
-    export nth_genesis_fsu=1
-    export npe_node_genesis_fsu=1
-    export memory_genesis_fsu="4G"
-
-elif [[ "${step}" = "fit2obs" ]]; then
-
-    export wtime_fit2obs="00:20:00"
-    export npe_fit2obs=3
-    export nth_fit2obs=1
-    export npe_node_fit2obs=1
-    export memory_fit2obs="20G"
-    if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi
-
-elif [[ "${step}" = "metp" ]]; then
-
-    export nth_metp=1
-    export wtime_metp="03:00:00"
-    export npe_metp=4
-    export npe_node_metp=4
-    export wtime_metp_gfs="06:00:00"
-    export npe_metp_gfs=4
-    export npe_node_metp_gfs=4
+    ;;
+
+  "verfozn")
+    walltime="00:05:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="1G"
+    ;;
+
+  "verfrad")
+    walltime="00:40:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="5G"
+    ;;
+
+  "vminmon")
+    walltime="00:05:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="1G"
+    ;;
+
+  "tracker")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="4G"
+    ;;
+
+  "genesis")
+    walltime="00:25:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="10G"
+    ;;
+
+  "genesis_fsu")
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=1
+    memory="10G"
+    ;;
+
+  "fit2obs")
+    walltime="00:20:00"
+    ntasks=3
+    threads_per_task=1
+    tasks_per_node=1
+    memory="20G"
+    [[ ${CASE} == "C768" ]] && memory="80GB"
+    ;;
+
+  "metp")
+    threads_per_task=1
+    walltime_gdas="03:00:00"
+    walltime_gfs="06:00:00"
+    ntasks=1
+    tasks_per_node=1
+    export memory="80G"
+    ;;
+
+  "echgres")
+    walltime="00:10:00"
+    ntasks=3
+    threads_per_task=${max_tasks_per_node}
+    tasks_per_node=1
+    ;;
+
+  "init")
+    walltime="00:30:00"
+    ntasks=24
+    threads_per_task=1
+    tasks_per_node=6
+    memory="70GB"
+    ;;
+
+  "init_chem")
+    walltime="00:30:00"
+    ntasks=1
+    tasks_per_node=1
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "echgres" ]]; then
-
-    export wtime_echgres="00:10:00"
-    export npe_echgres=3
-    export nth_echgres=${npe_node_max}
-    export npe_node_echgres=1
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      export memory_echgres="200GB"
-    fi
-
-elif [[ "${step}" = "init" ]]; then
-
-    export wtime_init="00:30:00"
-    export npe_init=24
-    export nth_init=1
-    export npe_node_init=6
-    export memory_init="70G"
-
-elif [[ "${step}" = "init_chem" ]]; then
-
-    export wtime_init_chem="00:30:00"
-    export npe_init_chem=1
-    export npe_node_init_chem=1
+  "mom6ic")
+    walltime="00:30:00"
+    ntasks=24
+    tasks_per_node=24
     export is_exclusive=True
-
-elif [[ "${step}" = "mom6ic" ]]; then
-
-    export wtime_mom6ic="00:30:00"
-    export npe_mom6ic=24
-    export npe_node_mom6ic=24
+    ;;
+
+  "arch" | "earc" | "getic")
+    walltime="06:00:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="4096M"
+    ;;
+
+  "cleanup")
+    walltime="00:15:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="4096M"
+    ;;
+
+  "stage_ic")
+    walltime="00:15:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
     export is_exclusive=True
+    ;;
+
+  "atmensanlinit")
+    export layout_x=${layout_x_atmensanl}
+    export layout_y=${layout_y_atmensanl}
+
+    walltime="00:10:00"
+    ntasks=1
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="3072M"
+    ;;
+
+  "atmensanlletkf")
+    export layout_x=${layout_x_atmensanl}
+    export layout_y=${layout_y_atmensanl}
+
+    walltime="00:30:00"
+    ntasks=$(( layout_x * layout_y * 6 ))
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="96GB"
+    export is_exclusive=True
+    ;;
 
-elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then
-
-    eval "export wtime_${step}='06:00:00'"
-    eval "export npe_${step}=1"
-    eval "export npe_node_${step}=1"
-    eval "export nth_${step}=1"
-    eval "export memory_${step}=4096M"
-    if [[ "${machine}" = "WCOSS2" ]]; then
-      eval "export memory_${step}=50GB"
-    fi
-
-elif [[ ${step} == "cleanup" ]]; then
-    export wtime_cleanup="01:00:00"
-    export npe_cleanup=1
-    export npe_node_cleanup=1
-    export nth_cleanup=1
-    export memory_cleanup="4096M"
-
-elif [[ ${step} = "stage_ic" ]]; then
+  "atmensanlfv3inc")
+    export layout_x=${layout_x_atmensanl}
+    export layout_y=${layout_y_atmensanl}
 
-    export wtime_stage_ic="00:15:00"
-    export npe_stage_ic=1
-    export npe_node_stage_ic=1
-    export nth_stage_ic=1
+    walltime="00:30:00"
+    ntasks=$(( layout_x * layout_y * 6 ))
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="96GB"
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "atmensanlinit" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    export wtime_atmensanlinit="00:10:00"
-    export npe_atmensanlinit=1
-    export nth_atmensanlinit=1
-    npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc)
-    export npe_node_atmensanlinit
-    export memory_atmensanlinit="3072M"
-
-elif [[ "${step}" = "atmensanlrun" ]]; then
-
-    # make below case dependent later
-    export layout_x=1
-    export layout_y=1
-
-    export wtime_atmensanlrun="00:30:00"
-    npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmensanlrun
-    npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc)
-    export npe_atmensanlrun_gfs
-    export nth_atmensanlrun=1
-    export nth_atmensanlrun_gfs=${nth_atmensanlrun}
-    npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc)
-    export npe_node_atmensanlrun
+  "atmensanlfinal")
+    walltime="00:30:00"
+    ntasks=${max_tasks_per_node}
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
+    ;;
 
-elif [[ "${step}" = "atmensanlfinal" ]]; then
+  "eobs" | "eomg")
+    if [[ "${step}" == "eobs" ]]; then
+      walltime="00:15:00"
+    else
+      walltime="00:30:00"
+    fi
 
-    export wtime_atmensanlfinal="00:30:00"
-    export npe_atmensanlfinal=${npe_node_max}
-    export nth_atmensanlfinal=1
-    npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc)
-    export npe_node_atmensanlfinal
+    case ${CASE} in
+      "C768")                 ntasks=200;;
+      "C384")                 ntasks=100;;
+      "C192" | "C96" | "C48") ntasks=40;;
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
+        ;;
+    esac
+    threads_per_task=2
+    # NOTE The number of tasks and cores used must be the same for eobs
+    # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then
-
-    export wtime_eobs="00:15:00"
-    export wtime_eomg="01:00:00"
-    if [[ "${CASE}" = "C768" ]]; then
-      export npe_eobs=200
-    elif [[ "${CASE}" = "C384" ]]; then
-      export npe_eobs=100
-    elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then
-      export npe_eobs=40
+    # Unset tasks_per_node if it is not a multiple of max_tasks_per_node
+    # to prevent dropping data on the floor.  This should be set int
+    # config.resources.{machine} instead.  This will result in an error at
+    # experiment setup time if not set in config.resources.{machine}.
+    if [[ $(( max_tasks_per_node % tasks_per_node )) != 0 ]]; then
+      unset max_tasks_per_node
     fi
-    export npe_eomg=${npe_eobs}
-    export nth_eobs=2
-    export nth_eomg=${nth_eobs}
-    npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc)
-    export npe_node_eobs
+    ;;
+
+  "ediag")
+    walltime="00:15:00"
+    ntasks=48
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    memory="30GB"
+    ;;
+
+  "eupd")
+    walltime="00:30:00"
+    case ${CASE} in
+      "C768")
+        ntasks=480
+        threads_per_task=6
+        ;;
+      "C384")
+        ntasks=270
+        threads_per_task=8
+        ;;
+      "C192" | "C96" | "C48")
+        ntasks=42
+        threads_per_task=2
+        ;;
+      *)
+        echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}"
+        exit 4
+        ;;
+    esac
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-    # The number of tasks and cores used must be the same for eobs
-    # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
-    # For S4, this is accomplished by running 10 tasks/node
-    if [[ ${machine} = "S4" ]]; then
-       export npe_node_eobs=10
-    elif [[ ${machine} = "HERCULES" ]]; then
-       # For Hercules, this is only an issue at C384; use 20 tasks/node
-       if [[ ${CASE} = "C384" ]]; then
-          export npe_node_eobs=20
-       fi
-    fi
-    export npe_node_eomg=${npe_node_eobs}
-
-elif [[ "${step}" = "ediag" ]]; then
-
-    export wtime_ediag="00:15:00"
-    export npe_ediag=48
-    export nth_ediag=1
-    npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc)
-    export npe_node_ediag
-    export memory_ediag="30GB"
-
-elif [[ "${step}" = "eupd" ]]; then
-
-    export wtime_eupd="00:30:00"
-    if [[ "${CASE}" = "C768" ]]; then
-      export npe_eupd=480
-      export nth_eupd=6
-      if [[ "${machine}" = "WCOSS2" ]]; then
-        export npe_eupd=315
-        export nth_eupd=14
-      fi
-    elif [[ "${CASE}" = "C384" ]]; then
-      export npe_eupd=270
-      export nth_eupd=8
-      if [[ "${machine}" = "WCOSS2" ]]; then
-        export npe_eupd=315
-        export nth_eupd=14
-      elif [[ "${machine}" = "S4" ]]; then
-         export npe_eupd=160
-         export nth_eupd=2
-      fi
-    elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then
-      export npe_eupd=42
-      export nth_eupd=2
-      if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then
-        export nth_eupd=4
-      fi
+    ;;
+
+  "ecen")
+    walltime="00:10:00"
+    ntasks=80
+    threads_per_task=4
+    if [[ ${CASE} == "C384" || ${CASE} == "C192" || ${CASE} == "C96" || ${CASE} == "C48" ]]; then
+      threads_per_task=2
     fi
-    npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc)
-    export npe_node_eupd
-    export is_exclusive=True
-
-elif [[ "${step}" = "ecen" ]]; then
-
-    export wtime_ecen="00:10:00"
-    export npe_ecen=80
-    export nth_ecen=4
-    if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi
-    if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi
-    npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc)
-    export npe_node_ecen
-    export nth_cycle=${nth_ecen}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    export threads_per_task_cycle=${threads_per_task}
+    export tasks_per_node_cycle=${tasks_per_node}
     export is_exclusive=True
-
-elif [[ "${step}" = "esfc" ]]; then
-
-    export wtime_esfc="00:08:00"
-    export npe_esfc=80
-    export nth_esfc=1
-    npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc)
-    export npe_node_esfc
-    export nth_cycle=${nth_esfc}
-    npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc)
-    export npe_node_cycle
-    export memory_esfc="80GB"
-
-elif [[ "${step}" = "epos" ]]; then
-
-    export wtime_epos="00:15:00"
-    export npe_epos=80
-    export nth_epos=1
-    npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc)
-    export npe_node_epos
+    ;;
+
+  "esfc")
+    walltime="00:15:00"
+    ntasks=80
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    threads_per_task_cycle=${threads_per_task}
+    tasks_per_node_cycle=$(( max_tasks_per_node / threads_per_task_cycle ))
+    ;;
+
+  "epos")
+    walltime="00:15:00"
+    [[ ${CASE} == "C768" ]] && walltime="00:25:00"
+    ntasks=80
+    threads_per_task=1
+    tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     export is_exclusive=True
-
-elif [[ "${step}" = "postsnd" ]]; then
-
-    export wtime_postsnd="02:00:00"
-    export npe_postsnd=40
-    export nth_postsnd=8
-    export npe_node_postsnd=10
-    export npe_postsndcfp=9
-    export npe_node_postsndcfp=1
-    postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc)
-    if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then
-        npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc)
-        export npe_node_postsnd
+    ;;
+
+  "postsnd")
+    walltime="02:00:00"
+    ntasks=40
+    threads_per_task=8
+    tasks_per_node=10
+    export ntasks_postsndcfp=9
+    export tasks_per_node_postsndcfp=1
+    postsnd_req_cores=$(( tasks_per_node * threads_per_task ))
+    if (( postsnd_req_cores > max_tasks_per_node )); then
+        tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
     fi
     export is_exclusive=True
-
-elif [[ "${step}" = "awips" ]]; then
-
-    export wtime_awips="03:30:00"
-    export npe_awips=1
-    export npe_node_awips=1
-    export nth_awips=1
-    export memory_awips="3GB"
-
-elif [[ ${step} = "npoess" ]]; then
-
-    export wtime_npoess="03:30:00"
-    export npe_npoess=1
-    export npe_node_npoess=1
-    export nth_npoess=1
-    export memory_npoess="3GB"
-
-elif [[ ${step} = "gempak" ]]; then
-
-    export wtime_gempak="03:00:00"
-    export npe_gempak=2
-    export npe_gempak_gfs=28
-    export npe_node_gempak=2
-    export npe_node_gempak_gfs=28
-    export nth_gempak=1
-    export memory_gempak="4GB"
-    export memory_gempak_gfs="2GB"
-
-elif [[ ${step} = "mos_stn_prep" ]]; then
-
-    export wtime_mos_stn_prep="00:10:00"
-    export npe_mos_stn_prep=3
-    export npe_node_mos_stn_prep=3
-    export nth_mos_stn_prep=1
-    export memory_mos_stn_prep="5GB"
-    export NTASK="${npe_mos_stn_prep}"
-    export PTILE="${npe_node_mos_stn_prep}"
-
-elif [[ ${step} = "mos_grd_prep" ]]; then
-
-    export wtime_mos_grd_prep="00:10:00"
-    export npe_mos_grd_prep=4
-    export npe_node_mos_grd_prep=4
-    export nth_mos_grd_prep=1
-    export memory_mos_grd_prep="16GB"
-    export NTASK="${npe_mos_grd_prep}"
-    export PTILE="${npe_node_mos_grd_prep}"
-
-elif [[ ${step} = "mos_ext_stn_prep" ]]; then
-
-    export wtime_mos_ext_stn_prep="00:15:00"
-    export npe_mos_ext_stn_prep=2
-    export npe_node_mos_ext_stn_prep=2
-    export nth_mos_ext_stn_prep=1
-    export memory_mos_ext_stn_prep="5GB"
-    export NTASK="${npe_mos_ext_stn_prep}"
-    export PTILE="${npe_node_mos_ext_stn_prep}"
-
-elif [[ ${step} = "mos_ext_grd_prep" ]]; then
-
-    export wtime_mos_ext_grd_prep="00:10:00"
-    export npe_mos_ext_grd_prep=7
-    export npe_node_mos_ext_grd_prep=7
-    export nth_mos_ext_grd_prep=1
-    export memory_mos_ext_grd_prep="3GB"
-    export NTASK="${npe_mos_ext_grd_prep}"
-    export PTILE="${npe_node_mos_ext_grd_prep}"
-
-elif [[ ${step} = "mos_stn_fcst" ]]; then
-
-    export wtime_mos_stn_fcst="00:10:00"
-    export npe_mos_stn_fcst=5
-    export npe_node_mos_stn_fcst=5
-    export nth_mos_stn_fcst=1
-    export memory_mos_stn_fcst="40GB"
-    export NTASK="${npe_mos_stn_fcst}"
-    export PTILE="${npe_node_mos_stn_fcst}"
-
-elif [[ ${step} = "mos_grd_fcst" ]]; then
-
-    export wtime_mos_grd_fcst="00:10:00"
-    export npe_mos_grd_fcst=7
-    export npe_node_mos_grd_fcst=7
-    export nth_mos_grd_fcst=1
-    export memory_mos_grd_fcst="50GB"
-    export NTASK="${npe_mos_grd_fcst}"
-    export PTILE="${npe_node_mos_grd_fcst}"
-
-elif [[ ${step} = "mos_ext_stn_fcst" ]]; then
-
-    export wtime_mos_ext_stn_fcst="00:20:00"
-    export npe_mos_ext_stn_fcst=3
-    export npe_node_mos_ext_stn_fcst=3
-    export nth_mos_ext_stn_fcst=1
-    export memory_mos_ext_stn_fcst="50GB"
-    export NTASK="${npe_mos_ext_stn_fcst}"
-    export PTILE="${npe_node_mos_ext_stn_fcst}"
+    ;;
+
+  "awips")
+    walltime="03:30:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="3GB"
+    ;;
+
+  "npoess")
+    walltime="03:30:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="3GB"
+    ;;
+
+  "gempak")
+    walltime="00:30:00"
+    ntasks_gdas=2
+    ntasks_gfs=28
+    tasks_per_node_gdas=2
+    tasks_per_node_gfs=28
+    threads_per_task=1
+    memory_gdas="4GB"
+    memory_gfs="2GB"
+    ;;
+
+  "fbwind")
+    walltime="00:05:00"
+    ntasks=1
+    threads_per_task=1
+    memory="4GB"
+    ;;
+
+  "mos_stn_prep")
+    walltime="00:10:00"
+    ntasks=3
+    tasks_per_node=3
+    threads_per_task=1
+    memory="5GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_grd_prep")
+    walltime="00:10:00"
+    ntasks=4
+    tasks_per_node=4
+    threads_per_task=1
+    memory="16GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_ext_stn_prep")
+    walltime="00:15:00"
+    ntasks=2
+    tasks_per_node=2
+    threads_per_task=1
+    memory="5GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_ext_grd_prep")
+    walltime="00:10:00"
+    ntasks=7
+    tasks_per_node=7
+    threads_per_task=1
+    memory="3GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_stn_fcst")
+    walltime="00:10:00"
+    ntasks=5
+    tasks_per_node=5
+    threads_per_task=1
+    memory="40GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_grd_fcst")
+    walltime="00:10:00"
+    ntasks=7
+    tasks_per_node=7
+    threads_per_task=1
+    memory="50GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_ext_stn_fcst")
+    walltime="00:20:00"
+    ntasks=3
+    tasks_per_node=3
+    threads_per_task=1
+    memory="50GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
     export prepost=True
-
-elif [[ ${step} = "mos_ext_grd_fcst" ]]; then
-
-    export wtime_mos_ext_grd_fcst="00:10:00"
-    export npe_mos_ext_grd_fcst=7
-    export npe_node_mos_ext_grd_fcst=7
-    export nth_mos_ext_grd_fcst=1
-    export memory_mos_ext_grd_fcst="50GB"
-    export NTASK="${npe_mos_ext_grd_fcst}"
-    export PTILE="${npe_node_mos_ext_grd_fcst}"
-
-elif [[ ${step} = "mos_stn_prdgen" ]]; then
-
-    export wtime_mos_stn_prdgen="00:10:00"
-    export npe_mos_stn_prdgen=1
-    export npe_node_mos_stn_prdgen=1
-    export nth_mos_stn_prdgen=1
-    export memory_mos_stn_prdgen="15GB"
-    export NTASK="${npe_mos_stn_prdgen}"
-    export PTILE="${npe_node_mos_stn_prdgen}"
+    ;;
+
+  "mos_ext_grd_fcst")
+    walltime="00:10:00"
+    ntasks=7
+    tasks_per_node=7
+    threads_per_task=1
+    memory="50GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    ;;
+
+  "mos_stn_prdgen")
+    walltime="00:10:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="15GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
     export prepost=True
-
-elif [[ ${step} = "mos_grd_prdgen" ]]; then
-
-    export wtime_mos_grd_prdgen="00:40:00"
-    export npe_mos_grd_prdgen=72
-    export npe_node_mos_grd_prdgen=18
-    export nth_mos_grd_prdgen=4
-    export memory_mos_grd_prdgen="20GB"
-    export NTASK="${npe_mos_grd_prdgen}"
-    export PTILE="${npe_node_mos_grd_prdgen}"
-    export OMP_NUM_THREADS="${nth_mos_grd_prdgen}"
-
-elif [[ ${step} = "mos_ext_stn_prdgen" ]]; then
-
-    export wtime_mos_ext_stn_prdgen="00:10:00"
-    export npe_mos_ext_stn_prdgen=1
-    export npe_node_mos_ext_stn_prdgen=1
-    export nth_mos_ext_stn_prdgen=1
-    export memory_mos_ext_stn_prdgen="15GB"
-    export NTASK="${npe_mos_ext_stn_prdgen}"
-    export PTILE="${npe_node_mos_ext_stn_prdgen}"
+    ;;
+
+  "mos_grd_prdgen")
+    walltime="00:40:00"
+    ntasks=72
+    tasks_per_node=18
+    threads_per_task=4
+    memory="20GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    export OMP_NUM_THREADS="${threads_per_task}"
+    ;;
+
+  "mos_ext_stn_prdgen")
+    walltime="00:10:00"
+    ntasks=1
+    tasks_per_node=1
+    threads_per_task=1
+    memory="15GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
     export prepost=True
+    ;;
+
+  "mos_ext_grd_prdgen")
+    walltime="00:30:00"
+    ntasks=96
+    tasks_per_node=6
+    threads_per_task=16
+    memory="30GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    export OMP_NUM_THREADS="${threads_per_task}"
+    ;;
+
+  "mos_wx_prdgen")
+    walltime="00:10:00"
+    ntasks=4
+    tasks_per_node=2
+    threads_per_task=2
+    memory="10GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    export OMP_NUM_THREADS="${threads_per_task}"
+    ;;
+
+  "mos_wx_ext_prdgen")
+    walltime="00:10:00"
+    ntasks=4
+    tasks_per_node=2
+    threads_per_task=2
+    memory="10GB"
+    NTASK="${ntasks}"
+    export PTILE="${tasks_per_node}"
+    export OMP_NUM_THREADS="${threads_per_task}"
+    ;;
+
+  *)
+    echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}"
+    exit 1
+    ;;
 
-elif [[ ${step} = "mos_ext_grd_prdgen" ]]; then
-
-    export wtime_mos_ext_grd_prdgen="00:30:00"
-    export npe_mos_ext_grd_prdgen=96
-    export npe_node_mos_ext_grd_prdgen=6
-    export nth_mos_ext_grd_prdgen=16
-    export memory_mos_ext_grd_prdgen="30GB"
-    export NTASK="${npe_mos_ext_grd_prdgen}"
-    export PTILE="${npe_node_mos_ext_grd_prdgen}"
-    export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}"
-
-elif [[ ${step} = "mos_wx_prdgen" ]]; then
-
-    export wtime_mos_wx_prdgen="00:10:00"
-    export npe_mos_wx_prdgen=4
-    export npe_node_mos_wx_prdgen=2
-    export nth_mos_wx_prdgen=2
-    export memory_mos_wx_prdgen="10GB"
-    export NTASK="${npe_mos_wx_prdgen}"
-    export PTILE="${npe_node_mos_wx_prdgen}"
-    export OMP_NUM_THREADS="${nth_mos_wx_prdgen}"
-
-elif [[ ${step} = "mos_wx_ext_prdgen" ]]; then
-
-    export wtime_mos_wx_ext_prdgen="00:10:00"
-    export npe_mos_wx_ext_prdgen=4
-    export npe_node_mos_wx_ext_prdgen=2
-    export nth_mos_wx_ext_prdgen=2
-    export memory_mos_wx_ext_prdgen="10GB"
-    export NTASK="${npe_mos_wx_ext_prdgen}"
-    export PTILE="${npe_node_mos_wx_ext_prdgen}"
-    export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}"
-
-else
-
-    echo "Invalid step = ${step}, ABORT!"
-    exit 2
+esac
 
+# Get machine-specific resources, overriding/extending the above assignments
+if [[ -f "${EXPDIR}/config.resources.${machine}" ]]; then
+   source "${EXPDIR}/config.resources.${machine}"
 fi
 
+# Check for RUN-specific variables and export them
+for resource_var in threads_per_task ntasks tasks_per_node NTASKS memory walltime; do
+   run_resource_var="${resource_var}_${RUN}"
+   if [[ -n "${!run_resource_var+0}" ]]; then
+      declare -x "${resource_var}"="${!run_resource_var}"
+   elif [[ -n "${!resource_var+0}" ]]; then
+      export "${resource_var?}"
+   fi
+done
+
 echo "END: config.resources"
diff --git a/parm/config/gfs/config.resources.GAEA b/parm/config/gfs/config.resources.GAEA
new file mode 100644
index 0000000000..51007b5b4f
--- /dev/null
+++ b/parm/config/gfs/config.resources.GAEA
@@ -0,0 +1,27 @@
+#! /usr/bin/env bash
+
+# Gaea-specific job resources
+
+case ${step} in
+  "eobs")
+    # The number of tasks and cores used must be the same for eobs
+    # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
+    case ${CASE} in
+      "C768" | "C384")
+        export tasks_per_node=50
+        ;;
+      *)
+        export tasks_per_node=40
+        ;;
+    esac
+    ;;
+
+  *)
+    ;;
+
+esac
+
+# shellcheck disable=SC2312
+for mem_var in $(env | grep '^memory_' | cut -d= -f1); do
+  unset "${mem_var}"
+done
diff --git a/parm/config/gfs/config.resources.HERA b/parm/config/gfs/config.resources.HERA
new file mode 100644
index 0000000000..36f50508c3
--- /dev/null
+++ b/parm/config/gfs/config.resources.HERA
@@ -0,0 +1,35 @@
+#! /usr/bin/env bash
+
+# Hera-specific job resources
+
+case ${step} in
+  "anal")
+    if [[ "${CASE}" == "C384" ]]; then
+      export ntasks=270
+      export threads_per_task_anal=8
+      export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    fi
+    ;;
+
+  "eupd")
+    case ${CASE} in
+      "C384")
+        export ntasks=80
+        ;;
+      "C192" | "C96" | "C48")
+        export threads_per_task=4
+        ;;
+      *)
+        ;;
+    esac
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ecen")
+    if [[ "${CASE}" == "C768" ]]; then export threads_per_task=6; fi
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  *)
+    ;;
+esac
diff --git a/parm/config/gfs/config.resources.HERCULES b/parm/config/gfs/config.resources.HERCULES
new file mode 100644
index 0000000000..7a5a74f69c
--- /dev/null
+++ b/parm/config/gfs/config.resources.HERCULES
@@ -0,0 +1,16 @@
+#! /usr/bin/env bash
+
+# Hercules-specific job resources
+
+case ${step} in
+  "eobs" | "eomg")
+    # The number of tasks and cores used must be the same for eobs
+    # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
+    # For Hercules, this is only an issue at C384; use 20 tasks/node
+    if [[ ${CASE} = "C384" ]]; then
+      export tasks_per_node=20
+    fi
+    ;;
+  *)
+    ;;
+esac
diff --git a/parm/config/gfs/config.resources.JET b/parm/config/gfs/config.resources.JET
new file mode 100644
index 0000000000..47b953c0f4
--- /dev/null
+++ b/parm/config/gfs/config.resources.JET
@@ -0,0 +1,52 @@
+#! /usr/bin/env bash
+
+# Jet-specific job resources
+
+case ${step} in
+  "anal")
+    if [[ "${CASE}" == "C384" ]]; then
+      export ntasks=270
+      export threads_per_task=8
+      export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    fi
+    ;;
+
+  "eobs")
+    if [[ "${PARTITION_BATCH}" == "xjet" ]]; then
+      # The number of tasks and cores used must be the same for eobs
+      # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
+      # This would also be an issues for vjet and sjet if anyone runs on those nodes.
+      export tasks_per_node=10
+    fi
+    ;;
+
+  "eupd")
+    case ${CASE} in
+      "C384")
+        export ntasks=80
+        ;;
+      "C192" | "C96" | "C48")
+        export threads_per_task=4
+        ;;
+      *)
+        ;;
+    esac
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ecen")
+    if [[ "${CASE}" == "C768" ]]; then export threads_per_task=6; fi
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "upp")
+    export memory="${mem_node_max}"
+    ;;
+
+  "esfc")
+    export memory="${mem_node_max}"
+    ;;
+
+  *)
+    ;;
+esac
diff --git a/parm/config/gfs/config.resources.ORION b/parm/config/gfs/config.resources.ORION
new file mode 100644
index 0000000000..e3e81b0182
--- /dev/null
+++ b/parm/config/gfs/config.resources.ORION
@@ -0,0 +1,17 @@
+#! /usr/bin/env bash
+
+# Orion-specific job resources
+
+case ${step} in
+  "anal")
+    # TODO:
+    # On Orion, after Rocky 9 upgrade, GSI performance is degraded.
+    # Remove this block once GSI issue is resolved
+    # https://github.com/NOAA-EMC/GSI/pull/764
+    # https://github.com/JCSDA/spack-stack/issues/1166
+    export wtime_anal_gdas="02:40:00"
+    export wtime_anal_gfs="02:00:00"
+  ;;
+  *)
+  ;;
+esac
diff --git a/parm/config/gfs/config.resources.S4 b/parm/config/gfs/config.resources.S4
new file mode 100644
index 0000000000..1af64bf250
--- /dev/null
+++ b/parm/config/gfs/config.resources.S4
@@ -0,0 +1,59 @@
+#! /usr/bin/env bash
+
+# S4-specific job resources
+
+case ${step} in
+  "anal")
+    case ${CASE} in
+      "C384")
+        #Some of the intermediate data can be lost if the number of tasks
+        #per node does not match the number of reserved cores/node.
+        #On the S4-s4 partition, this is accomplished by increasing the task
+        #count to a multiple of 32
+        if [[ ${PARTITION_BATCH} = "s4" ]]; then
+          export ntasks_gdas=416
+          export ntasks_gfs=416
+        fi
+        #S4 is small, so run this task with just 1 thread
+        export threads_per_task=1
+        export walltime_gdas="02:00:00"
+        export walltime_gfs="02:00:00"
+        ;;
+      "C192" | "C96" | "C48")
+        export threads_per_task=4
+        if [[ ${PARTITION_BATCH} == "s4" ]]; then
+          export ntasks_gdas=88
+          export ntasks_gfs=88
+        elif [[ ${PARTITION_BATCH} == "ivy" ]]; then
+          export ntasks_gdas=90
+          export ntasks_gfs=90
+        fi
+        ;;
+      *)
+        ;;
+    esac
+    export tasks_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "eobs")
+    # The number of tasks and cores used must be the same for eobs
+    # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details
+    # For S4, this is accomplished by running 10 tasks/node
+    export tasks_per_node=10
+    ;;
+
+  "eupd")
+    if [[ "${CASE}" == "C384" ]]; then
+      export ntasks=160
+      export threads_per_task=2
+    fi
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "ediag")
+     export memory="${mem_node_max}"
+  ;;
+
+  *)
+    ;;
+esac
diff --git a/parm/config/gfs/config.resources.WCOSS2 b/parm/config/gfs/config.resources.WCOSS2
new file mode 100644
index 0000000000..a0a69fa8d1
--- /dev/null
+++ b/parm/config/gfs/config.resources.WCOSS2
@@ -0,0 +1,59 @@
+#! /usr/bin/env bash
+
+# WCOSS2-specific job resources
+
+case ${step} in
+  "prep")
+    export is_exclusive=True
+    export memory="480GB"
+    ;;
+
+  "anal")
+    if [[ "${CASE}" == "C768" ]]; then
+        export threads_per_task=8
+        # Make ntasks a multiple of 16
+        export ntasks_gdas=784
+        export ntasks_gfs=832
+        export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    fi
+    ;;
+
+  "fit2obs")
+    export tasks_per_node=3
+    ;;
+
+  "echgres")
+    export memory="200GB"
+    ;;
+
+  "arch" | "earc" | "getic")
+    declare -x "memory"="50GB"
+    ;;
+
+  "eupd")
+    case ${CASE} in
+      "C768" | "C384")
+        export ntasks=315
+        export threads_per_task=14
+        ;;
+      *)
+      ;;
+    esac
+    export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
+    ;;
+
+  "eobs")
+    case ${CASE} in
+      "C768" | "C384")
+        export tasks_per_node=50
+        ;;
+      *)
+        export tasks_per_node=40
+        ;;
+    esac
+    ;;
+
+  *)
+  ;;
+
+esac
diff --git a/parm/config/gfs/config.sfcanl b/parm/config/gfs/config.sfcanl
index 9592fb77c9..e2fde8992a 100644
--- a/parm/config/gfs/config.sfcanl
+++ b/parm/config/gfs/config.sfcanl
@@ -8,4 +8,9 @@ echo "BEGIN: config.sfcanl"
 # Get task specific resources
 . $EXPDIR/config.resources sfcanl
 
+# Turn off NST in JEDIATMVAR
+if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then
+   export DONST="NO"
+fi
+
 echo "END: config.sfcanl"
diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl
new file mode 100644
index 0000000000..a2984f190b
--- /dev/null
+++ b/parm/config/gfs/config.snowanl
@@ -0,0 +1,30 @@
+#! /usr/bin/env bash
+
+########## config.snowanl ##########
+# configuration common to snow analysis tasks
+
+echo "BEGIN: config.snowanl"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" snowanl
+
+export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2"
+
+# Name of the JEDI executable and its yaml template
+export JEDIEXE="${EXECgfs}/gdas.x"
+export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2"
+
+# Ensemble member properties
+export SNOWDEPTHVAR="snodl"
+export BESTDDEV="30."  # Background Error Std. Dev. for LETKFOI
+
+# Name of the executable that applies increment to bkg and its namelist template
+export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe"
+export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2"
+
+export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2"
+
+export io_layout_x=@IO_LAYOUT_X@
+export io_layout_y=@IO_LAYOUT_Y@
+
+echo "END: config.snowanl"
diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic
index 7f3956af4d..9956e8af6a 100644
--- a/parm/config/gfs/config.stage_ic
+++ b/parm/config/gfs/config.stage_ic
@@ -8,7 +8,7 @@ echo "BEGIN: config.stage_ic"
 source "${EXPDIR}/config.resources" stage_ic
 
 case "${CASE}" in
-  "C48" | "C96")
+  "C48" | "C96" | "C192")
     export CPL_ATMIC="workflow_${CASE}_refactored"
     export CPL_ICEIC="workflow_${CASE}_refactored"
     export CPL_OCNIC="workflow_${CASE}_refactored"
@@ -21,16 +21,16 @@ case "${CASE}" in
     export CPL_WAVIC=workflow_C384_refactored
     ;;
   "C768")
-    export CPL_ATMIC=HR2_refactored
-    export CPL_ICEIC=HR1_refactored
-    export CPL_OCNIC=HR1_refactored
-    export CPL_WAVIC=HR1_refactored
+    export CPL_ATMIC=HR3C768
+    export CPL_ICEIC=HR3marine
+    export CPL_OCNIC=HR3marine
+    export CPL_WAVIC=HR3marine
     ;;
   "C1152")
-    export CPL_ATMIC=HR2_C1152_refactored
-    export CPL_ICEIC=HR3_refactored
-    export CPL_OCNIC=HR3_refactored
-    export CPL_WAVIC=HR1_refactored
+    export CPL_ATMIC=HR3C1152
+    export CPL_ICEIC=HR3marine
+    export CPL_OCNIC=HR3marine
+    export CPL_WAVIC=HR3marine
     ;;
   *)
     echo "FATAL ERROR Unrecognized resolution: ${CASE}"
@@ -38,4 +38,8 @@ case "${CASE}" in
     ;;
 esac
 
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  export CPL_ATMIC="GLOBAL-NEST_${CASE}"
+fi
+
 echo "END: config.stage_ic"
diff --git a/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt b/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt
index 5b3dab7a98..32f4d939af 100644
--- a/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt
+++ b/parm/config/gfs/config.ufs_c768_12x12_2th_1wg40wt
@@ -15,7 +15,7 @@ if (( $# <= 1 )); then
     echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072"
     echo "--mom6 500|100|025"
     echo "--cice6 500|100|025"
-    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025"
+    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16"
     echo "--gocart"
 
     exit 1
@@ -68,169 +68,271 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${
   skip_mediator=false
 fi
 
-case "${machine}" in
-  "WCOSS2")
-    npe_node_max=128
-    ;;
-  "HERA" | "ORION" )
-    npe_node_max=40
-    ;;
-  "HERCULES" )
-    npe_node_max=80
-    ;;
-  "JET")
-    case "${PARTITION_BATCH}" in
-      "xjet")
-        npe_node_max=24
-        ;;
-      "vjet" | "sjet")
-        npe_node_max=16
-        ;;
-      "kjet")
-        npe_node_max=40
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  "S4")
-    case "${PARTITION_BATCH}" in
-      "s4")
-        npe_node_max=32
-        ;;
-      "ivy")
-        npe_node_max=20
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  *)
-    echo "FATAL ERROR: Unrecognized machine ${machine}"
-    exit 14
-    ;;
-esac
-export npe_node_max
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  # Describe nest location, interaction with parent, etc.
+  export grid_type=0
+  export stretch_fac=1.0001
+  export TARGET_LAT=32.5
+  export TARGET_LON=-135.0
+  export NEST_LON1=-195.000000
+  export NEST_LAT1=-7.500000
+  export NEST_LON2=-75.000000
+  export NEST_LAT2=72.500000
+  export twowaynest=${twowaynest:-.true.}
+else
+  # No nest.
+  export grid_type=-1
+fi
 
 # (Standard) Model resolution dependent variables
 case "${fv3_res}" in
     "C48")
         export DELTIM=1200
-        export layout_x=1
-        export layout_y=1
+        export layout_x_gdas=1
+        export layout_y_gdas=1
         export layout_x_gfs=1
         export layout_y_gfs=1
-        export nthreads_fv3=1
+        export nthreads_fv3_gdas=1
         export nthreads_fv3_gfs=1
+        export nthreads_ufs_gdas=1
+        export nthreads_ufs_gfs=1
+        export xr_cnvcld=".false."  # Do not pass conv. clouds to Xu-Randall cloud fraction
         export cdmbgwd="0.071,2.1,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="40.0,1.77,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=6.0e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
+        export k_split=1
+        export n_split=4
+        export tau=8.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=3600
+        export WRITE_GROUP_GDAS=1
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1
         export WRITE_GROUP_GFS=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
         ;;
     "C96")
-        export DELTIM=600
-        export layout_x=2
-        export layout_y=2
-        export layout_x_gfs=2
-        export layout_y_gfs=2
-        export nthreads_fv3=1
-        export nthreads_fv3_gfs=1
-        export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="20.0,2.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=3.0e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
-        export WRITE_GROUP_GFS=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=450
+          export layout_x_gfs=4
+          export layout_y_gfs=4
+          export layout_x_nest=12
+          export layout_y_nest=10
+          export nest_refine=4
+          export nest_ioffset=4
+          export nest_joffset=9
+          export npx_nest=361
+          export npy_nest=241
+          export NEST_DLON=0.25
+          export NEST_DLAT=0.25
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=2
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=2
+        else
+          export DELTIM=600
+          export layout_x_gdas=2
+          export layout_y_gdas=2
+          export layout_x_gfs=2
+          export layout_y_gfs=2
+          export nthreads_fv3_gdas=1
+          export nthreads_fv3_gfs=1
+          export nthreads_ufs_gdas=1
+          export nthreads_ufs_gfs=1
+          export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction
+          export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="20.0,2.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=3.0e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=1
+          export n_split=4
+          export tau=8.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=1800
+          export WRITE_GROUP_GDAS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1
+          export WRITE_GROUP_GFS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
+        fi
         ;;
     "C192")
-        export DELTIM=450
-        export layout_x=4
-        export layout_y=6
-        export layout_x_gfs=4
-        export layout_y_gfs=6
-        export nthreads_fv3=1
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="0.23,1.5,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="10.0,3.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=1.5e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        export WRITE_GROUP_GFS=2
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=225
+          export layout_x_gfs=5
+          export layout_y_gfs=6
+          export layout_x_nest=15
+          export layout_y_nest=25
+          export nest_refine=4
+          export nest_ioffset=7
+          export nest_joffset=19
+          export npx_nest=721
+          export npy_nest=481
+          export NEST_DLON=0.125
+          export NEST_DLAT=0.125
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15
+        else
+          export DELTIM=600
+          export layout_x_gdas=4
+          export layout_y_gdas=6
+          export layout_x_gfs=4
+          export layout_y_gfs=6
+          export nthreads_fv3_gdas=1
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=1
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="0.23,1.5,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="10.0,3.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=1.5e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=6.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=1800
+          export WRITE_GROUP_GDAS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5
+        fi
         ;;
     "C384")
-        export DELTIM=300
-        export layout_x=8
-        export layout_y=8
-        export layout_x_gfs=8
-        export layout_y_gfs=8
-        export nthreads_fv3=2
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="1.1,0.72,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="5.0,5.0,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=0.8e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        export WRITE_GROUP_GFS=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=150
+          export layout_x_gdas=8
+          export layout_y_gdas=8
+          export layout_x_gfs=8
+          export layout_y_gfs=8
+          export layout_x_nest=34
+          export layout_y_nest=24
+          export nest_refine=4
+          export nest_ioffset=13
+          export nest_joffset=37
+          export npx_nest=1441
+          export npy_nest=961
+          export NEST_DLON=0.0625
+          export NEST_DLAT=0.0625
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=20
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20
+        else
+          export DELTIM=300
+          export layout_x_gdas=8
+          export layout_y_gdas=8
+          export layout_x_gfs=8
+          export layout_y_gfs=8
+          export nthreads_fv3_gdas=2
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=2
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="1.1,0.72,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="5.0,5.0,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=0.8e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=4.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=900
+          export WRITE_GROUP_GDAS=4
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          export WRITE_GROUP_GFS=4
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10
+        fi
         ;;
     "C768")
-        export DELTIM=150
-        export layout_x=8
-        export layout_y=12
-        export layout_x_gfs=12
-        export layout_y_gfs=12 
-        #JKHexport layout_y_gfs=16 
-        export nthreads_fv3=4
-        #JKHexport nthreads_fv3_gfs=4
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="4.0,0.15,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="2.5,7.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=0.5e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=2
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        #JKHexport WRITE_GROUP_GFS=4
-        #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
-        export WRITE_GROUP_GFS=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=75
+          export layout_x_gdas=16
+          export layout_y_gdas=10
+          export layout_x_gfs=16
+          export layout_y_gfs=10
+          export layout_x_nest=48
+          export layout_y_nest=45
+          export nthreads_fv3_nest=2
+          export nthreads_fv3_gdas=2
+          export nthreads_fv3_gfs=2
+          export nest_refine=4
+          export nest_ioffset=24
+          export nest_joffset=72
+          export npx_nest=2881
+          export npy_nest=1921
+          export NEST_DLON=0.0325
+          export NEST_DLAT=0.0325
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=90
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=90
+        else
+          export DELTIM=150
+          export layout_x_gdas=8
+          export layout_y_gdas=12
+          export layout_x_gfs=12
+          export layout_y_gfs=12
+	  #JKHexport layout_y_gfs=16
+          export nthreads_fv3_gdas=4
+	  #JKHexport nthreads_fv3_gfs=4
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=4
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="4.0,0.15,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="2.5,7.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=0.5e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=3.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=450
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          #JKHexport WRITE_GROUP_GFS=4
+          #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
+          export WRITE_GROUP_GFS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2
+        fi
         ;;
     "C1152")
-        export DELTIM=120
-        export layout_x=8
-        export layout_y=16
+        export DELTIM=150
+        export layout_x_gdas=8
+        export layout_y_gdas=16
         export layout_x_gfs=8
         export layout_y_gfs=16
-        export nthreads_fv3=4
+        export nthreads_fv3_gdas=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs_gdas=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.10,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="1.67,8.8,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.35e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
+        export k_split=2
+        export n_split=6
+        export tau=2.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=450
+        export WRITE_GROUP_GDAS=4
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20  # TODO: refine these numbers when a case is available
         ;;
     "C3072")
         export DELTIM=90
         export layout_x=16
-        export layout_y=32
-        export layout_x_gfs=16
+        export layout_y_gdas=32
+        export layout_x_gfs_gdas=16
         export layout_y_gfs=32
-        export nthreads_fv3=4
+        export nthreads_fv3_gdas=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs_gdas=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.05,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="0.625,14.1,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.13e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
+        export k_split=4
+        export n_split=5
+        export tau=0.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=300
+        export WRITE_GROUP_GDAS=4
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10  # TODO: refine these numbers when a case is available
         ;;
@@ -240,19 +342,22 @@ case "${fv3_res}" in
         ;;
 esac
 
-(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 ))
+(( WRTTASK_PER_GROUP_PER_THREAD_GDAS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS * 6 ))
 (( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 ))
-export WRTTASK_PER_GROUP_PER_THREAD
+export WRTTASK_PER_GROUP_PER_THREAD_GDAS
 export WRTTASK_PER_GROUP_PER_THREAD_GFS
 
-(( ntasks_fv3 = layout_x * layout_y * 6 ))
+(( ntasks_fv3_gdas = layout_x_gdas * layout_y_gdas * 6 ))
 (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 ))
-export ntasks_fv3
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  (( ntasks_fv3_gfs += layout_x_nest * layout_y_nest ))
+fi
+export ntasks_fv3_gdas
 export ntasks_fv3_gfs
 
-(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD ))
+(( ntasks_quilt_gdas = WRITE_GROUP_GDAS * WRTTASK_PER_GROUP_PER_THREAD_GDAS ))
 (( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS ))
-export ntasks_quilt
+export ntasks_quilt_gdas
 export ntasks_quilt_gfs
 
 # Determine whether to use parallel NetCDF based on resolution
@@ -279,13 +384,19 @@ export cplice=".false."
 export cplchm=".false."
 export cplwav=".false."
 export cplwav2atm=".false."
-export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1"
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  export CCPP_SUITE="${CCPP_SUITE:-FV3_global_nest_v1}"
+else
+#JKH  export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}"
+export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_mynn"
+fi
 model_list="atm"
 
 # Mediator specific settings
 if [[ "${skip_mediator}" == "false" ]]; then
   export cpl=".true."
-  export nthreads_mediator=${nthreads_fv3}  # Use same threads as FV3
+  export nthreads_mediator_gfs=${nthreads_fv3_gfs}  # Use same threads as FV3
+  export nthreads_mediator_gdas=${nthreads_fv3_gdas}
   export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1"  # TODO: Does this include FV3_GFS_v17_p8?  Can this be used instead of FV3_GFS_v17_p8?
 fi
 
@@ -307,9 +418,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
       CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
       MOM6_RESTART_SETTING='r'
       MOM6_RIVER_RUNOFF='False'
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
+        MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_MISVAL="0.0"
+      fi
       eps_imesh="4.0e-1"
       MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc"
-      MOM6_DIAG_MISVAL="0.0"
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
       ;;
@@ -326,12 +441,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RIVER_RUNOFF='False'
       eps_imesh="2.5e-1"
       TOPOEDITS="ufs.topo_edits_011818.nc"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='True'
       ;;
@@ -347,12 +462,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RESTART_SETTING='n'
       MOM6_RIVER_RUNOFF='True'
       eps_imesh="1.0e-1"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
@@ -369,12 +484,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RIVER_RUNOFF='True'
       MOM6_RESTART_SETTING="r"
       eps_imesh="1.0e-1"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
@@ -457,6 +572,10 @@ if [[ "${skip_ww3}" == "false" ]]; then
     "glo_025")
       ntasks_ww3=262
       ;;
+    "glo_100")
+      ntasks_ww3=20
+      nthreads_ww3=1
+      ;;
     "glo_200")
       ntasks_ww3=30
       nthreads_ww3=1
@@ -468,6 +587,14 @@ if [[ "${skip_ww3}" == "false" ]]; then
     "mx025")
       ntasks_ww3=80
       ;;
+    "uglo_100km")
+      ntasks_ww3=40
+      nthreads_ww3=1
+      ;;
+    "uglo_m1g16")
+      ntasks_ww3=1000
+      nthreads_ww3=1
+      ;;
     *)
       echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!"
       exit 1
@@ -484,39 +611,45 @@ if [[ "${skip_gocart}" == "false" ]]; then
 fi
 
 # Set the name of the UFS (previously nems) configure template to use
+# Default ufs.configure templates for supported model configurations
+if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+  tmpl_suffix="_esmf"
+fi
 case "${model_list}" in
   atm)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN"
     ;;
   atm.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN"
     ;;
   atm.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN"
     ;;
   *)
-    echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}"
+    echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}"
     exit 16
     ;;
 esac
 
+# Allow user to override the default template
+export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}}
+unset model_list default_template
+
 if [[ ! -r "${ufs_configure_template}" ]]; then
   echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable."
   exit 17
 fi
 
-unset model_list
-
 echo "END: config.ufs"
diff --git a/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt b/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt
index ad3f472873..bba65cb3e6 100644
--- a/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt
+++ b/parm/config/gfs/config.ufs_c768_16x16_2th_2wg40wt
@@ -15,7 +15,7 @@ if (( $# <= 1 )); then
     echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072"
     echo "--mom6 500|100|025"
     echo "--cice6 500|100|025"
-    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025"
+    echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16"
     echo "--gocart"
 
     exit 1
@@ -68,169 +68,271 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${
   skip_mediator=false
 fi
 
-case "${machine}" in
-  "WCOSS2")
-    npe_node_max=128
-    ;;
-  "HERA" | "ORION" )
-    npe_node_max=40
-    ;;
-  "HERCULES" )
-    npe_node_max=80
-    ;;
-  "JET")
-    case "${PARTITION_BATCH}" in
-      "xjet")
-        npe_node_max=24
-        ;;
-      "vjet" | "sjet")
-        npe_node_max=16
-        ;;
-      "kjet")
-        npe_node_max=40
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  "S4")
-    case "${PARTITION_BATCH}" in
-      "s4")
-        npe_node_max=32
-        ;;
-      "ivy")
-        npe_node_max=20
-        ;;
-      *)
-        echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!"
-        exit 1
-        ;;
-    esac
-    ;;
-  *)
-    echo "FATAL ERROR: Unrecognized machine ${machine}"
-    exit 14
-    ;;
-esac
-export npe_node_max
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  # Describe nest location, interaction with parent, etc.
+  export grid_type=0
+  export stretch_fac=1.0001
+  export TARGET_LAT=32.5
+  export TARGET_LON=-135.0
+  export NEST_LON1=-195.000000
+  export NEST_LAT1=-7.500000
+  export NEST_LON2=-75.000000
+  export NEST_LAT2=72.500000
+  export twowaynest=${twowaynest:-.true.}
+else
+  # No nest.
+  export grid_type=-1
+fi
 
 # (Standard) Model resolution dependent variables
 case "${fv3_res}" in
     "C48")
         export DELTIM=1200
-        export layout_x=1
-        export layout_y=1
+        export layout_x_gdas=1
+        export layout_y_gdas=1
         export layout_x_gfs=1
         export layout_y_gfs=1
-        export nthreads_fv3=1
+        export nthreads_fv3_gdas=1
         export nthreads_fv3_gfs=1
+        export nthreads_ufs_gdas=1
+        export nthreads_ufs_gfs=1
+        export xr_cnvcld=".false."  # Do not pass conv. clouds to Xu-Randall cloud fraction
         export cdmbgwd="0.071,2.1,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="40.0,1.77,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=6.0e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
+        export k_split=1
+        export n_split=4
+        export tau=8.0
+        export rf_cutoff=100.0
+        export fv_sg_adj=3600
+        export WRITE_GROUP_GDAS=1
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1
         export WRITE_GROUP_GFS=1
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
         ;;
     "C96")
-        export DELTIM=600
-        export layout_x=2
-        export layout_y=2
-        export layout_x_gfs=2
-        export layout_y_gfs=2
-        export nthreads_fv3=1
-        export nthreads_fv3_gfs=1
-        export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="20.0,2.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=3.0e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1
-        export WRITE_GROUP_GFS=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=450
+          export layout_x_gfs=4
+          export layout_y_gfs=4
+          export layout_x_nest=12
+          export layout_y_nest=10
+          export nest_refine=4
+          export nest_ioffset=4
+          export nest_joffset=9
+          export npx_nest=361
+          export npy_nest=241
+          export NEST_DLON=0.25
+          export NEST_DLAT=0.25
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=2
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=2
+        else
+          export DELTIM=600
+          export layout_x_gdas=2
+          export layout_y_gdas=2
+          export layout_x_gfs=2
+          export layout_y_gfs=2
+          export nthreads_fv3_gdas=1
+          export nthreads_fv3_gfs=1
+          export nthreads_ufs_gdas=1
+          export nthreads_ufs_gfs=1
+          export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction
+          export cdmbgwd="0.14,1.8,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="20.0,2.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=3.0e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=1
+          export n_split=4
+          export tau=8.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=1800
+          export WRITE_GROUP_GDAS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1
+          export WRITE_GROUP_GFS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1
+        fi
         ;;
     "C192")
-        export DELTIM=450
-        export layout_x=4
-        export layout_y=6
-        export layout_x_gfs=4
-        export layout_y_gfs=6
-        export nthreads_fv3=1
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="0.23,1.5,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="10.0,3.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=1.5e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=1
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        export WRITE_GROUP_GFS=2
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=225
+          export layout_x_gfs=5
+          export layout_y_gfs=6
+          export layout_x_nest=15
+          export layout_y_nest=25
+          export nest_refine=4
+          export nest_ioffset=7
+          export nest_joffset=19
+          export npx_nest=721
+          export npy_nest=481
+          export NEST_DLON=0.125
+          export NEST_DLAT=0.125
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15
+        else
+          export DELTIM=600
+          export layout_x_gdas=4
+          export layout_y_gdas=6
+          export layout_x_gfs=4
+          export layout_y_gfs=6
+          export nthreads_fv3_gdas=1
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=1
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="0.23,1.5,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="10.0,3.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=1.5e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=6.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=1800
+          export WRITE_GROUP_GDAS=1
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5
+        fi
         ;;
     "C384")
-        export DELTIM=300
-        export layout_x=8
-        export layout_y=8
-        export layout_x_gfs=8
-        export layout_y_gfs=8
-        export nthreads_fv3=2
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="1.1,0.72,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="5.0,5.0,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=0.8e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        export WRITE_GROUP_GFS=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=150
+          export layout_x_gdas=8
+          export layout_y_gdas=8
+          export layout_x_gfs=8
+          export layout_y_gfs=8
+          export layout_x_nest=34
+          export layout_y_nest=24
+          export nest_refine=4
+          export nest_ioffset=13
+          export nest_joffset=37
+          export npx_nest=1441
+          export npy_nest=961
+          export NEST_DLON=0.0625
+          export NEST_DLAT=0.0625
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=20
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20
+        else
+          export DELTIM=300
+          export layout_x_gdas=8
+          export layout_y_gdas=8
+          export layout_x_gfs=8
+          export layout_y_gfs=8
+          export nthreads_fv3_gdas=2
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=2
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="1.1,0.72,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="5.0,5.0,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=0.8e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=4.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=900
+          export WRITE_GROUP_GDAS=4
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          export WRITE_GROUP_GFS=4
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10
+        fi
         ;;
     "C768")
-        export DELTIM=150
-        export layout_x=8
-        export layout_y=12
-        #JKHexport layout_x_gfs=12
-        export layout_x_gfs=16
-        export layout_y_gfs=16 
-        export nthreads_fv3=4
-        #JKHexport nthreads_fv3_gfs=4
-        export nthreads_fv3_gfs=2
-        export cdmbgwd="4.0,0.15,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
-        export cdmbgwd_gsl="2.5,7.5,1.0,1.0"   # settings for GSL drag suite
-        export knob_ugwp_tauamp=0.5e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=2
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10
-        #JKHexport WRITE_GROUP_GFS=4
-        #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
-        export WRITE_GROUP_GFS=2
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          export DELTIM=75
+          export layout_x_gdas=16
+          export layout_y_gdas=10
+          export layout_x_gfs=16
+          export layout_y_gfs=10
+          export layout_x_nest=48
+          export layout_y_nest=45
+          export nthreads_fv3_nest=2
+          export nthreads_fv3_gdas=2
+          export nthreads_fv3_gfs=2
+          export nest_refine=4
+          export nest_ioffset=24
+          export nest_joffset=72
+          export npx_nest=2881
+          export npy_nest=1921
+          export NEST_DLON=0.0325
+          export NEST_DLAT=0.0325
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=90
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=90
+        else
+          export DELTIM=150
+          export layout_x_gdas=8
+          export layout_y_gdas=12
+          export layout_x_gfs=16
+          export layout_y_gfs=16
+	  #JKHexport layout_y_gfs=12
+          export nthreads_fv3_gdas=4
+	  #JKHexport nthreads_fv3_gfs=4
+          export nthreads_fv3_gfs=2
+          export nthreads_ufs_gdas=4
+          export nthreads_ufs_gfs=2
+          export cdmbgwd="4.0,0.15,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
+          export cdmbgwd_gsl="2.5,7.5,1.0,1.0"   # settings for GSL drag suite
+          export knob_ugwp_tauamp=0.5e-3      # setting for UGWPv1 non-stationary GWD
+          export k_split=2
+          export n_split=4
+          export tau=3.0
+          export rf_cutoff=100.0
+          export fv_sg_adj=450
+          export WRITE_GROUP_GDAS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
+          #JKHexport WRITE_GROUP_GFS=4
+          #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
+          export WRITE_GROUP_GFS=2
+          export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2
+        fi
         ;;
     "C1152")
-        export DELTIM=120
-        export layout_x=8
-        export layout_y=16
+        export DELTIM=150
+        export layout_x_gdas=8
+        export layout_y_gdas=16
         export layout_x_gfs=8
         export layout_y_gfs=16
-        export nthreads_fv3=4
+        export nthreads_fv3_gdas=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs_gdas=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.10,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="1.67,8.8,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.35e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
+        export k_split=2
+        export n_split=6
+        export tau=2.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=450
+        export WRITE_GROUP_GDAS=4
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20  # TODO: refine these numbers when a case is available
         ;;
     "C3072")
         export DELTIM=90
         export layout_x=16
-        export layout_y=32
-        export layout_x_gfs=16
+        export layout_y_gdas=32
+        export layout_x_gfs_gdas=16
         export layout_y_gfs=32
-        export nthreads_fv3=4
+        export nthreads_fv3_gdas=4
         export nthreads_fv3_gfs=4
+        export nthreads_ufs_gdas=4
+        export nthreads_ufs_gfs=4
         export cdmbgwd="4.0,0.05,1.0,1.0"  # mountain blocking, ogwd, cgwd, cgwd src scaling
         export cdmbgwd_gsl="0.625,14.1,1.0,1.0"   # settings for GSL drag suite
         export knob_ugwp_tauamp=0.13e-3      # setting for UGWPv1 non-stationary GWD
-        export WRITE_GROUP=4
-        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10  # TODO: refine these numbers when a case is available
+        export k_split=4
+        export n_split=5
+        export tau=0.5
+        export rf_cutoff=100.0
+        export fv_sg_adj=300
+        export WRITE_GROUP_GDAS=4
+        export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10  # TODO: refine these numbers when a case is available
         export WRITE_GROUP_GFS=4
         export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10  # TODO: refine these numbers when a case is available
         ;;
@@ -240,19 +342,22 @@ case "${fv3_res}" in
         ;;
 esac
 
-(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 ))
+(( WRTTASK_PER_GROUP_PER_THREAD_GDAS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS * 6 ))
 (( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 ))
-export WRTTASK_PER_GROUP_PER_THREAD
+export WRTTASK_PER_GROUP_PER_THREAD_GDAS
 export WRTTASK_PER_GROUP_PER_THREAD_GFS
 
-(( ntasks_fv3 = layout_x * layout_y * 6 ))
+(( ntasks_fv3_gdas = layout_x_gdas * layout_y_gdas * 6 ))
 (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 ))
-export ntasks_fv3
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  (( ntasks_fv3_gfs += layout_x_nest * layout_y_nest ))
+fi
+export ntasks_fv3_gdas
 export ntasks_fv3_gfs
 
-(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD ))
+(( ntasks_quilt_gdas = WRITE_GROUP_GDAS * WRTTASK_PER_GROUP_PER_THREAD_GDAS ))
 (( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS ))
-export ntasks_quilt
+export ntasks_quilt_gdas
 export ntasks_quilt_gfs
 
 # Determine whether to use parallel NetCDF based on resolution
@@ -279,13 +384,19 @@ export cplice=".false."
 export cplchm=".false."
 export cplwav=".false."
 export cplwav2atm=".false."
-export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1"
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  export CCPP_SUITE="${CCPP_SUITE:-FV3_global_nest_v1}"
+else
+#JKH  export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}"
+export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_mynn"
+fi
 model_list="atm"
 
 # Mediator specific settings
 if [[ "${skip_mediator}" == "false" ]]; then
   export cpl=".true."
-  export nthreads_mediator=${nthreads_fv3}  # Use same threads as FV3
+  export nthreads_mediator_gfs=${nthreads_fv3_gfs}  # Use same threads as FV3
+  export nthreads_mediator_gdas=${nthreads_fv3_gdas}
   export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1"  # TODO: Does this include FV3_GFS_v17_p8?  Can this be used instead of FV3_GFS_v17_p8?
 fi
 
@@ -307,9 +418,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
       CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
       MOM6_RESTART_SETTING='r'
       MOM6_RIVER_RUNOFF='False'
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
+        MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_MISVAL="0.0"
+      fi
       eps_imesh="4.0e-1"
       MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc"
-      MOM6_DIAG_MISVAL="0.0"
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
       ;;
@@ -326,12 +441,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RIVER_RUNOFF='False'
       eps_imesh="2.5e-1"
       TOPOEDITS="ufs.topo_edits_011818.nc"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='True'
       ;;
@@ -347,12 +462,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RESTART_SETTING='n'
       MOM6_RIVER_RUNOFF='True'
       eps_imesh="1.0e-1"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
@@ -369,12 +484,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
       MOM6_RIVER_RUNOFF='True'
       MOM6_RESTART_SETTING="r"
       eps_imesh="1.0e-1"
-      if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
-        MOM6_DIAG_MISVAL="0.0"
-      else
+      if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
         MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
         MOM6_DIAG_MISVAL="-1e34"
+      else
+        MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+        MOM6_DIAG_MISVAL="0.0"
       fi
       MOM6_ALLOW_LANDMASK_CHANGES='False'
       TOPOEDITS=""
@@ -457,6 +572,10 @@ if [[ "${skip_ww3}" == "false" ]]; then
     "glo_025")
       ntasks_ww3=262
       ;;
+    "glo_100")
+      ntasks_ww3=20
+      nthreads_ww3=1
+      ;;
     "glo_200")
       ntasks_ww3=30
       nthreads_ww3=1
@@ -468,6 +587,14 @@ if [[ "${skip_ww3}" == "false" ]]; then
     "mx025")
       ntasks_ww3=80
       ;;
+    "uglo_100km")
+      ntasks_ww3=40
+      nthreads_ww3=1
+      ;;
+    "uglo_m1g16")
+      ntasks_ww3=1000
+      nthreads_ww3=1
+      ;;
     *)
       echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!"
       exit 1
@@ -484,39 +611,45 @@ if [[ "${skip_gocart}" == "false" ]]; then
 fi
 
 # Set the name of the UFS (previously nems) configure template to use
+# Default ufs.configure templates for supported model configurations
+if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+  tmpl_suffix="_esmf"
+fi
 case "${model_list}" in
   atm)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN"
     ;;
   atm.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN"
     ;;
   atm.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN"
     ;;
   atm.ocean.ice.wave.aero)
-    export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN"
+    default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN"
     ;;
   *)
-    echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}"
+    echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}"
     exit 16
     ;;
 esac
 
+# Allow user to override the default template
+export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}}
+unset model_list default_template
+
 if [[ ! -r "${ufs_configure_template}" ]]; then
   echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable."
   exit 17
 fi
 
-unset model_list
-
 echo "END: config.ufs"
diff --git a/parm/config/gfs/config.upp b/parm/config/gfs/config.upp
index a1bd0a7d34..41015c2fee 100644
--- a/parm/config/gfs/config.upp
+++ b/parm/config/gfs/config.upp
@@ -8,7 +8,7 @@ echo "BEGIN: config.upp"
 # Get task specific resources
 . "${EXPDIR}/config.resources" upp
 
-export UPP_CONFIG="${HOMEgfs}/parm/post/upp.yaml"
+export UPP_CONFIG="${PARMgfs}/post/upp.yaml"
 
 # No. of forecast hours to process in a single job
 export NFHRS_PER_GROUP=3
diff --git a/parm/config/gfs/config.verfozn b/parm/config/gfs/config.verfozn
index 9eea0f25a3..df7d18012d 100644
--- a/parm/config/gfs/config.verfozn
+++ b/parm/config/gfs/config.verfozn
@@ -9,15 +9,14 @@ echo "BEGIN: config.verfozn"
 export DO_DATA_RPT=1
 export OZN_AREA="glb"
 export OZNMON_SUFFIX=${NET}
-export PARMmonitor=${PARMgfs}/monitor
-export SATYPE_FILE=${PARMmonitor}/gdas_oznmon_satype.txt
+export SATYPE_FILE=${PARMgfs}/monitor/gdas_oznmon_satype.txt
 
 # Source the parm file
-. "${PARMmonitor}/gdas_oznmon.parm"
+. "${PARMgfs}/monitor/gdas_oznmon.parm"
 
 # Set up validation file
 if [[ ${VALIDATE_DATA} -eq 1 ]]; then
-   export ozn_val_file=${PARMmonitor}/gdas_oznmon_base.tar
+   export ozn_val_file=${PARMgfs}/monitor/gdas_oznmon_base.tar
 fi
 
 echo "END: config.verfozn"
diff --git a/parm/config/gfs/config.verfrad b/parm/config/gfs/config.verfrad
index dd65020180..506ce50b4f 100644
--- a/parm/config/gfs/config.verfrad
+++ b/parm/config/gfs/config.verfrad
@@ -6,11 +6,10 @@ echo "BEGIN: config.verfrad"
 # Get task specific resources
 . "${EXPDIR}/config.resources" verfrad
 
-export PARMmonitor=${PARMgfs}/monitor
-export satype_file=${PARMmonitor}/gdas_radmon_satype.txt
+export satype_file=${PARMgfs}/monitor/gdas_radmon_satype.txt
 
 # Source the parm file
-. "${PARMmonitor}/da_mon.parm"
+. "${PARMgfs}/monitor/da_mon.parm"
 
 # Other variables
 export RAD_AREA="glb"
diff --git a/parm/config/gfs/config.vminmon b/parm/config/gfs/config.vminmon
index 8929c36e0e..7c7d362161 100644
--- a/parm/config/gfs/config.vminmon
+++ b/parm/config/gfs/config.vminmon
@@ -9,8 +9,7 @@ echo "BEGIN: config.vminmon"
 export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}}
 export CYCLE_INTERVAL=${assim_freq:-6}
 
-export PARMmonitor=${PARMgfs}/monitor
-export mm_gnormfile=${PARMmonitor}/${RUN}_minmon_gnorm.txt
-export mm_costfile=${PARMmonitor}/${RUN}_minmon_cost.txt
+export mm_gnormfile=${PARMgfs}/monitor/${RUN}_minmon_gnorm.txt
+export mm_costfile=${PARMgfs}/monitor/${RUN}_minmon_cost.txt
 
 echo "END: config.vminmon"
diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave
index acb4c518ba..db4eb9f708 100644
--- a/parm/config/gfs/config.wave
+++ b/parm/config/gfs/config.wave
@@ -6,26 +6,14 @@
 echo "BEGIN: config.wave"
 
 # Parameters that are common to all wave model steps
-
-# System and version
-export wave_sys_ver=v1.0.0
-
-export EXECwave="${HOMEgfs}/exec"
-export FIXwave="${HOMEgfs}/fix/wave"
-export PARMwave="${HOMEgfs}/parm/wave"
-export USHwave="${HOMEgfs}/ush"
-
 # This config contains variables/parameters used in the fcst step
 # Some others are also used across the workflow in wave component scripts
 
 # General runtime labels
-export CDUMPwave="${RUN}wave"
+export RUNwave="${RUN}wave"
 
 # In GFS/GDAS, restart files are generated/read from gdas runs
-export CDUMPRSTwave="gdas"
-
-# Grids for wave model
-export waveGRD=${waveGRD:-'mx025'}
+export RUNRSTwave="gdas"
 
 #grid dependent variable defaults
 export waveGRDN='1'                   # grid number for ww3_multi
@@ -68,6 +56,12 @@ case "${waveGRD}" in
     export wavepostGRD='glo_025'
     export waveuoutpGRD=${waveGRD}
     ;;
+  "glo_100")
+    #Global regular lat/lon 1deg deg grid
+    export waveinterpGRD=''
+    export wavepostGRD='glo_100'
+    export waveuoutpGRD=${waveGRD}
+    ;;
   "glo_200")
     #Global regular lat/lon 2deg deg grid
     export waveinterpGRD=''
@@ -80,7 +74,19 @@ case "${waveGRD}" in
     export wavepostGRD='glo_500'
     export waveuoutpGRD=${waveGRD}
     ;;
-  *)
+  "uglo_100km")
+    #unstructured 100km grid
+    export waveinterpGRD='glo_200'
+    export wavepostGRD=''
+    export waveuoutpGRD=${waveGRD}
+    ;;
+  "uglo_m1g16")
+    #unstructured m1v16 grid
+    export waveinterpGRD='glo_15mxt'
+    export wavepostGRD=''
+    export waveuoutpGRD=${waveGRD}
+    ;;
+   *)
     echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting."
     exit 1
     ;;
@@ -91,18 +97,12 @@ export WAVEWND_DID=
 export WAVEWND_FID=
 
 # The start time reflects the number of hindcast hours prior to the cycle initial time
-if [[ "${CDUMP}" = "gdas" ]]; then
-  export FHMAX_WAV=${FHMAX:-9}
-else
-  export FHMAX_WAV=${FHMAX_GFS}
+if [[ "${RUN}" == "gfs" ]]; then
+  export FHMAX_WAV=${FHMAX_WAV_GFS}
 fi
 export WAVHINDH=0
-export FHMIN_WAV=0
-export FHOUT_WAV=3
-export FHMAX_HF_WAV=120
-export FHOUT_HF_WAV=1
 export FHMAX_WAV_IBP=180
-if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi
+if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_WAV} ; fi
 
 # gridded and point output rate
 export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 ))
@@ -113,7 +113,7 @@ export FHINCP_WAV=$(( DTPNT_WAV / 3600 ))
 export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA"
 
 # Restart file config
-if [[ "${CDUMP}" = "gdas" ]]; then
+if [[ "${RUN}" == "gdas" ]]; then
   export WAVNCYC=4
   export WAVHCYC=${assim_freq:-6}
   export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days
@@ -128,7 +128,7 @@ fi
 # Restart timing business
 
 export RSTTYPE_WAV='T'          # generate second tier of restart files
-if [[ "${CDUMP}" != gfs ]]; then    # Setting is valid for GDAS and GEFS
+if [[ "${RUN}" != gfs ]]; then    # Setting is valid for GDAS and GEFS
   export DT_1_RST_WAV=10800       # time between restart files, set to DTRST=1 for a single restart file
   export DT_2_RST_WAV=43200       # restart stride for checkpointing restart
   export RSTIOFF_WAV=0            # first restart file offset relative to model start
@@ -136,8 +136,8 @@ else                              # This is a GFS run
   rst_dt_gfs=$(( restart_interval_gfs * 3600 ))  # TODO: This calculation needs to move to parsing_namelists_WW3.sh
   if [[ ${rst_dt_gfs} -gt 0 ]]; then
     export DT_1_RST_WAV=0 #${rst_dt_gfs:-0}   # time between restart files, set to DTRST=1 for a single restart file
-                                          #temporarily set to zero to avoid a clash in requested restart times 
-                                          #which makes the wave model crash a fix for the model issue will be coming
+                                              # temporarily set to zero to avoid a clash in requested restart times
+                                              # which makes the wave model crash a fix for the model issue will be coming
     export DT_2_RST_WAV=${rst_dt_gfs:-0}   # restart stride for checkpointing restart
   else
     rst_dt_fhmax=$(( FHMAX_WAV * 3600 ))
@@ -149,15 +149,15 @@ fi
 #
 # Set runmember to default value if not GEFS cpl run
 #  (for a GFS coupled run, RUNMEN would be unset, this should default to -1)
-export RUNMEM=${RUNMEM:--1}
+export RUNMEM="-1"
 # Set wave model member tags if ensemble run
 # -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN
-if [[ ${RUNMEM} = -1 ]]; then
+if (( RUNMEM == -1 )); then
 # No suffix added to model ID in case of deterministic run
-  export waveMEMB=
+  export waveMEMB=""
 else
 # Extract member number only
-  export waveMEMB="${RUNMEM: -2}"
+  export waveMEMB="${RUNMEM}"
 fi
 
 # Determine if wave component needs input and/or is coupled
diff --git a/parm/config/gfs/config.wavepostbndpnt b/parm/config/gfs/config.wavepostbndpnt
index dfeddc79b2..412c5fb42a 100644
--- a/parm/config/gfs/config.wavepostbndpnt
+++ b/parm/config/gfs/config.wavepostbndpnt
@@ -6,6 +6,6 @@
 echo "BEGIN: config.wavepostbndpnt"
 
 # Get task specific resources
-. $EXPDIR/config.resources wavepostbndpnt
+source "${EXPDIR}/config.resources" wavepostbndpnt
 
 echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gfs/config.wavepostbndpntbll b/parm/config/gfs/config.wavepostbndpntbll
index bb7224cc70..6695ab0f84 100644
--- a/parm/config/gfs/config.wavepostbndpntbll
+++ b/parm/config/gfs/config.wavepostbndpntbll
@@ -6,6 +6,6 @@
 echo "BEGIN: config.wavepostbndpntbll"
 
 # Get task specific resources
-. $EXPDIR/config.resources wavepostbndpntbll
+source "${EXPDIR}/config.resources" wavepostbndpntbll
 
 echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gfs/config.wavepostpnt b/parm/config/gfs/config.wavepostpnt
index 8befb91760..e87237da82 100644
--- a/parm/config/gfs/config.wavepostpnt
+++ b/parm/config/gfs/config.wavepostpnt
@@ -6,6 +6,6 @@
 echo "BEGIN: config.wavepostpnt"
 
 # Get task specific resources
-. $EXPDIR/config.resources wavepostpnt
+source "${EXPDIR}/config.resources" wavepostpnt
 
 echo "END: config.wavepostpnt"
diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs
index 8e74aae069..82cec321da 100644
--- a/parm/config/gfs/config.wavepostsbs
+++ b/parm/config/gfs/config.wavepostsbs
@@ -6,14 +6,13 @@
 echo "BEGIN: config.wavepostsbs"
 
 # Get task specific resources
-. $EXPDIR/config.resources wavepostsbs
+source "${EXPDIR}/config.resources" wavepostsbs
 
 # Subgrid info for grib2 encoding
 export WAV_SUBGRBSRC=""
 export WAV_SUBGRB=""
 
 # Options for point output (switch on/off boundary point output)
-export DOIBP_WAV='NO' # Input boundary points
 export DOFLD_WAV='YES' # Field data
 export DOPNT_WAV='YES' # Station data
 export DOGRB_WAV='YES' # Create grib2 files
diff --git a/parm/config/gfs/config.waveprep b/parm/config/gfs/config.waveprep
index 1c9a40c1d8..1f746eab77 100644
--- a/parm/config/gfs/config.waveprep
+++ b/parm/config/gfs/config.waveprep
@@ -21,7 +21,7 @@ export WAV_CUR_CDO_SMOOTH="NO"
 export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8}
 
 if [ "${WW3ICEINP}" = "YES" ]; then
-  export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2
+  export WAVICEFILE=${RUN}.t${cyc}z.seaice.5min.grib2
 fi
 
 echo "END: config.waveprep"
diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml
index ade83fa484..da4d587dff 100644
--- a/parm/config/gfs/yaml/defaults.yaml
+++ b/parm/config/gfs/yaml/defaults.yaml
@@ -3,15 +3,36 @@ base:
   DO_JEDIATMVAR: "NO"
   DO_JEDIATMENS: "NO"
   DO_JEDIOCNVAR: "NO"
-  DO_JEDILANDDA: "NO"
+  DO_JEDISNOWDA: "NO"
   DO_MERGENSST: "NO"
   DO_GOES: "NO"
+  DO_BUFRSND: "NO"
+  DO_GEMPAK: "NO"
+  DO_AWIPS: "NO"
+  DO_NPOESS: "NO"
+  DO_TRACKER: "YES"
+  DO_GENESIS: "YES"
+  DO_GENESIS_FSU: "NO"
+  DO_METP: "YES"
+  FHMAX_GFS: 120
+  FHMAX_HF_GFS: 0
+  DO_VRFY_OCEANDA: "NO"
+  GSI_SOILANAL: "NO"
+  EUPD_CYC: "gdas"
+  FHMAX_ENKF_GFS: 12
 
 atmanl:
+  JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2"
+  STATICB_TYPE: "gsibec"
+  LAYOUT_X_ATMANL: 8
+  LAYOUT_Y_ATMANL: 8
   IO_LAYOUT_X: 1
   IO_LAYOUT_Y: 1
 
 atmensanl:
+  JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2"
+  LAYOUT_X_ATMENSANL: 8
+  LAYOUT_Y_ATMENSANL: 8
   IO_LAYOUT_X: 1
   IO_LAYOUT_Y: 1
 
@@ -19,18 +40,17 @@ aeroanl:
   IO_LAYOUT_X: 1
   IO_LAYOUT_Y: 1
 
-landanl:
+snowanl:
   IO_LAYOUT_X: 1
   IO_LAYOUT_Y: 1
 
 ocnanal:
-  SOCA_INPUT_FIX_DIR: "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca"  # TODO: These need to go to glopara fix space. @guillaumevernieres will open an issue
-  CASE_ANL: "C48"
-  COMIN_OBS: "/scratch2/NCEPDEV/marineda/r2d2-v2-v3"  # TODO: make platform agnostic
-  SOCA_OBS_LIST: "{{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml"
+  SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca"
+  SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml"  # TODO: This is also repeated in oceanprepobs
   SOCA_NINNER: 100
-  R2D2_OBS_SRC: "gdas_marine"
-  R2D2_OBS_DUMP: "s2s_v1"
-  SABER_BLOCKS_YAML: ""
-  NICAS_RESOL: 1
-  NICAS_GRID_SIZE: 15000
+
+prepoceanobs:
+  SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca"
+  SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml"  # TODO: This is also repeated in ocnanal
+  OBSPREP_YAML: "${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml"
+  DMPDIR: "/scratch1/NCEPDEV/global/glopara/data/experimental_obs"
diff --git a/parm/config/gfs/yaml/test_ci.yaml b/parm/config/gfs/yaml/test_ci.yaml
index bb9602be59..7425d4d029 100644
--- a/parm/config/gfs/yaml/test_ci.yaml
+++ b/parm/config/gfs/yaml/test_ci.yaml
@@ -1,4 +1,4 @@
 defaults:
-  !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml
+  !INC {{ PARMgfs }}/config/gfs/yaml/defaults.yaml
 base:
   ACCOUNT: "nems"
diff --git a/parm/gdas/aero_crtm_coeff.yaml b/parm/gdas/aero_crtm_coeff.yaml
deleted file mode 100644
index 75b54c3741..0000000000
--- a/parm/gdas/aero_crtm_coeff.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-mkdir:
-- {{ DATA }}/crtm/
-copy:
-- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm/]
-- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm/]
diff --git a/parm/gdas/aero_crtm_coeff.yaml.j2 b/parm/gdas/aero_crtm_coeff.yaml.j2
new file mode 100644
index 0000000000..b48d8ff231
--- /dev/null
+++ b/parm/gdas/aero_crtm_coeff.yaml.j2
@@ -0,0 +1,13 @@
+mkdir:
+- '{{ DATA }}/crtm/'
+copy:
+- ['{{ CRTM_FIX }}/AerosolCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/CloudCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin', '{{ DATA }}/crtm/']
+- ['{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin', '{{ DATA }}/crtm/']
diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml
deleted file mode 100644
index 85a00c3c30..0000000000
--- a/parm/gdas/aero_jedi_fix.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
-mkdir:
-- !ENV ${DATA}/fv3jedi
-copy:
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/akbk$(npz).nc4
-  - !ENV ${DATA}/fv3jedi/akbk.nc4
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/fmsmpp.nml
-  - !ENV ${DATA}/fv3jedi/fmsmpp.nml
-- - !ENV ${FIXgdas}/fv3jedi/fv3files/field_table_gfdl
-  - !ENV ${DATA}/fv3jedi/field_table
-- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml
-  - !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml
diff --git a/parm/gdas/aero_jedi_fix.yaml.j2 b/parm/gdas/aero_jedi_fix.yaml.j2
new file mode 100644
index 0000000000..69039baddf
--- /dev/null
+++ b/parm/gdas/aero_jedi_fix.yaml.j2
@@ -0,0 +1,7 @@
+mkdir:
+- '{{ DATA }}/fv3jedi'
+copy:
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table']
+- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml']
diff --git a/parm/gdas/atm_crtm_coeff.yaml b/parm/gdas/atm_crtm_coeff.yaml.j2
similarity index 100%
rename from parm/gdas/atm_crtm_coeff.yaml
rename to parm/gdas/atm_crtm_coeff.yaml.j2
diff --git a/parm/gdas/atm_jedi_fix.yaml b/parm/gdas/atm_jedi_fix.yaml
deleted file mode 100644
index 3d1ca79f33..0000000000
--- a/parm/gdas/atm_jedi_fix.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-mkdir:
-- $(DATA)/fv3jedi
-copy:
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table]
-- [$(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml, $(DATA)/fv3jedi/fv3jedi_fieldmetadata_restart.yaml]
diff --git a/parm/gdas/atm_jedi_fix.yaml.j2 b/parm/gdas/atm_jedi_fix.yaml.j2
new file mode 100644
index 0000000000..a9cb2309f0
--- /dev/null
+++ b/parm/gdas/atm_jedi_fix.yaml.j2
@@ -0,0 +1,9 @@
+mkdir:
+- '{{ DATA }}/fv3jedi'
+copy:
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table']
+- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml']
+- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_history.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_history.yaml']
+- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_fv3inc.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_fv3inc.yaml']
diff --git a/parm/gdas/land_jedi_fix.yaml b/parm/gdas/land_jedi_fix.yaml
deleted file mode 100644
index 3d1ca79f33..0000000000
--- a/parm/gdas/land_jedi_fix.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-mkdir:
-- $(DATA)/fv3jedi
-copy:
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table]
-- [$(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml, $(DATA)/fv3jedi/fv3jedi_fieldmetadata_restart.yaml]
diff --git a/parm/gdas/snow_jedi_fix.yaml.j2 b/parm/gdas/snow_jedi_fix.yaml.j2
new file mode 100644
index 0000000000..69039baddf
--- /dev/null
+++ b/parm/gdas/snow_jedi_fix.yaml.j2
@@ -0,0 +1,7 @@
+mkdir:
+- '{{ DATA }}/fv3jedi'
+copy:
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml']
+- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table']
+- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml']
diff --git a/parm/gdas/staging/atm_berror_gsibec.yaml.j2 b/parm/gdas/staging/atm_berror_gsibec.yaml.j2
new file mode 100644
index 0000000000..e6c5e41609
--- /dev/null
+++ b/parm/gdas/staging/atm_berror_gsibec.yaml.j2
@@ -0,0 +1,8 @@
+{% set fname_list = ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4'] %}
+
+mkdir:
+- '{{ DATA }}/berror'
+copy:
+{% for fname in fname_list %}
+- ['{{ HOMEgfs }}/fix/gdas/gsibec/{{ CASE_ANL }}/{{ fname }}', '{{ DATA }}/berror']
+{% endfor %}
diff --git a/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2
new file mode 100644
index 0000000000..eda3dad5a7
--- /dev/null
+++ b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2
@@ -0,0 +1,32 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+{% for imem in range(1,NMEM_ENS+1) %}
+   {% set memchar = 'mem%03d' | format(imem) %}
+   {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+                         '${RUN}': RUN, 
+                         '${YMD}': current_cycle | to_YMD,	
+                         '${HH}': current_cycle | strftime('%H'),
+                         '${MEMDIR}': memchar }) %}
+- '{{ DATA }}/bkg/{{ memchar }}'
+- '{{ DATA }}/anl/{{ memchar }}'
+- '{{ COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}'
+{% endfor %}
+copy:
+{% for time in time_list %}
+   {% for imem in range(1,NMEM_ENS+1) %}
+      {% set memchar = 'mem%03d' | format(imem) %}
+      {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+      	      	      	    '${RUN}': 'enkfgdas',
+                            '${YMD}': previous_cycle | to_YMD,	
+                            '${HH}': previous_cycle | strftime('%H'),
+                            '${MEMDIR}': memchar }) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/{{ memchar }}/']
+      {% for ftype in ftype_list %}
+         {% for itile in range(1,7) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/{{ memchar }}/']
+         {% endfor %}
+      {% endfor %}
+   {% endfor %}
+{% endfor %}
diff --git a/parm/gdas/staging/atm_var_bkg.yaml.j2 b/parm/gdas/staging/atm_var_bkg.yaml.j2
new file mode 100644
index 0000000000..37af833649
--- /dev/null
+++ b/parm/gdas/staging/atm_var_bkg.yaml.j2
@@ -0,0 +1,14 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+- '{{ DATA }}/bkg'
+copy:
+{% for time in time_list %}
+- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/']
+   {% for ftype in ftype_list %}
+      {% for itile in range(1,ntiles+1) %}
+- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/']
+      {% endfor %}
+   {% endfor %}
+{% endfor %}
diff --git a/parm/gdas/staging/atm_var_fv3ens.yaml.j2 b/parm/gdas/staging/atm_var_fv3ens.yaml.j2
new file mode 100644
index 0000000000..e499c86d57
--- /dev/null
+++ b/parm/gdas/staging/atm_var_fv3ens.yaml.j2
@@ -0,0 +1,24 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+{% for imem in range(1,NMEM_ENS+1) %}
+- '{{ DATA }}/ens/{{ 'mem%03d' | format(imem) }}'
+{% endfor %}
+copy:
+{% for time in time_list %}
+   {% for imem in range(1,NMEM_ENS+1) %}
+      {% set memchar = 'mem%03d' | format(imem) %}
+      {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+                            '${RUN}': 'enkfgdas',
+                            '${YMD}': previous_cycle | to_YMD,
+                            '${HH}': previous_cycle | strftime('%H'),
+                            '${MEMDIR}': memchar }) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/ens/{{ memchar }}/']
+      {% for ftype in ftype_list %}
+         {% for itile in range(1,ntiles+1) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/ens/{{ memchar }}/']
+         {% endfor %}
+      {% endfor %}
+   {% endfor %}
+{% endfor %}
diff --git a/parm/post/oceanice_products.yaml b/parm/post/oceanice_products.yaml
new file mode 100644
index 0000000000..48e5a5f204
--- /dev/null
+++ b/parm/post/oceanice_products.yaml
@@ -0,0 +1,75 @@
+ocnicepost:
+  executable: "ocnicepost.x"
+  namelist:
+    debug: False
+  fix_data:
+    mkdir:
+      - "{{ DATA }}"
+    copy:
+      - ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/{{ component }}.csv", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      {% for grid in product_grids %}
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"]
+      {% endfor %}
+
+nc2grib2:
+  script: "{{ USHgfs }}/oceanice_nc2grib2.sh"
+
+ocean:
+  namelist:
+    ftype: "ocean"
+    maskvar: "temp"
+    sinvar: "sin_rot"
+    cosvar: "cos_rot"
+    angvar: ""
+    {% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %}
+    ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267, 309, 374, 467, 594, 757, 960, 1204, 1490, 1817, 2184, 2587, 3024, 3489, 3977, 4481]
+    {% elif model_grid == 'mx500' %}
+    ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267]
+    {% endif %}
+  subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
+  data_in:
+    copy:
+      - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
+  data_out:
+    mkdir:
+      - "{{ COM_OCEAN_NETCDF }}"
+      {% for grid in product_grids %}
+      - "{{ COM_OCEAN_GRIB }}/{{ grid }}"
+      {% endfor %}
+    copy:
+      - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+      {% for grid in product_grids %}
+      - ["{{ DATA }}/ocean.{{ grid }}.grib2", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+      - ["{{ DATA }}/ocean.{{ grid }}.grib2.idx", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+      {% endfor %}
+
+ice:
+  namelist:
+    ftype: "ice"
+    maskvar: "tmask"
+    sinvar: ""
+    cosvar: ""
+    angvar: "ANGLET"
+  subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'frzmlt_h', 'albsni_h', 'mlt_onset_h', 'frz_onset_h']
+  data_in:
+    copy:
+      - ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
+  data_out:
+    mkdir:
+      - "{{ COM_ICE_NETCDF }}"
+      {% for grid in product_grids %}
+      - "{{ COM_ICE_GRIB }}/{{ grid }}"
+      {% endfor %}
+    copy:
+      - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+      {% for grid in product_grids %}
+      - ["{{ DATA }}/ice.{{ grid }}.grib2", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+      - ["{{ DATA }}/ice.{{ grid }}.grib2.idx", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+      {% endfor %}
diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml
new file mode 100644
index 0000000000..74c0f0653b
--- /dev/null
+++ b/parm/post/oceanice_products_gefs.yaml
@@ -0,0 +1,73 @@
+ocnicepost:
+  executable: "ocnicepost.x"
+  namelist:
+    debug: False
+  fix_data:
+    mkdir:
+      - "{{ DATA }}"
+    copy:
+      - ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/{{ component }}.csv", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"]
+      {% for grid in product_grids %}
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"]
+      - ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"]
+      {% endfor %}
+
+nc2grib2:
+  script: "{{ USHgfs }}/oceanice_nc2grib2.sh"
+
+ocean:
+  namelist:
+    ftype: "ocean"
+    maskvar: "temp"
+    sinvar: "sin_rot"
+    cosvar: "cos_rot"
+    angvar: ""
+    {% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %}
+    ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267, 309, 374, 467, 594, 757, 960, 1204, 1490, 1817, 2184, 2587, 3024, 3489, 3977, 4481]
+    {% elif model_grid == 'mx500' %}
+    ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267]
+    {% endif %}
+  subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
+  data_in:
+    copy:
+      - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
+  data_out:
+    mkdir:
+      - "{{ COM_OCEAN_NETCDF }}"
+      {% for grid in product_grids %}
+      - "{{ COM_OCEAN_GRIB }}/{{ grid }}"
+      {% endfor %}
+    copy:
+      - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+      {% for grid in product_grids %}
+      - ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
+      {% endfor %}
+
+ice:
+  namelist:
+    ftype: "ice"
+    maskvar: "tmask"
+    sinvar: ""
+    cosvar: ""
+    angvar: "ANGLET"
+  subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'frzmlt_h', 'albsni_h', 'mlt_onset_h', 'frz_onset_h']
+  data_in:
+    copy:
+      - ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
+  data_out:
+    mkdir:
+      - "{{ COM_ICE_NETCDF }}"
+      {% for grid in product_grids %}
+      - "{{ COM_ICE_GRIB }}/{{ grid }}"
+      {% endfor %}
+    copy:
+      - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+      {% for grid in product_grids %}
+      - ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
+      {% endfor %}
diff --git a/parm/post/upp.yaml b/parm/post/upp.yaml
index 651f3c12a8..a39e2be877 100644
--- a/parm/post/upp.yaml
+++ b/parm/post/upp.yaml
@@ -9,16 +9,16 @@ upp:
       - "{{ DATA }}"
     copy:
       - ["{{ 'g2tmpl_ROOT' | getenv }}/share/params_grib2_tbl_new", "{{ DATA }}/params_grib2_tbl_new"]
-      - ["{{ HOMEgfs }}/parm/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"]
-      - ["{{ HOMEgfs }}/exec/upp.x", "{{ DATA }}/"]
-      - ["{{ HOMEgfs }}/parm/post/itag.jinja", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"]
+      - ["{{ EXECgfs }}/upp.x", "{{ DATA }}/"]
+      - ["{{ PARMgfs }}/post/itag.jinja", "{{ DATA }}/"]
 
 analysis:
   config:
     rdaod: True
   data_in:
     copy:
-      - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-ANL.txt", "{{ DATA }}/postxconfig-NT.txt"]
+      - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-ANL.txt", "{{ DATA }}/postxconfig-NT.txt"]
       - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmanl.nc", "{{ DATA }}/{{ atmos_filename }}"]
       - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcanl.nc", "{{ DATA }}/{{ flux_filename }}"]
   data_out:
@@ -28,26 +28,26 @@ analysis:
 
 forecast:
   config:
-    rdaod: False
+    rdaod: True
   data_in:
     copy:
       {% if forecast_hour == 0 %}
-      - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-F00-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"]
+      - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-F00-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"]
       {% else %}
-      - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"]
+      - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"]
       {% endif %}
       - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"]
       - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"]
   data_out:
     copy:
-      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.masterf{{ '%03d' % forecast_hour }}.grb2"]
-      - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxf{{ '%03d' % forecast_hour }}.grb2"]
-      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.masterf{{ '%03d' % forecast_hour }}.grb2.idx"]
-      - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxf{{ '%03d' % forecast_hour }}.grb2.idx"]
+      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.master.grb2f{{ '%03d' % forecast_hour }}"]
+      - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxgrbf{{ '%03d' % forecast_hour }}.grib2"]
+      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.master.grb2if{{ '%03d' % forecast_hour }}"]
+      - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxgrbf{{ '%03d' % forecast_hour }}.grib2.idx"]
 
 goes:
   config:
-    rdaod: True
+    rdaod: False
   data_in:
     copy:
       {% set crtm_coefficients = [
@@ -81,10 +81,10 @@ goes:
       {% endfor %}
       - ["{{ 'CRTM_FIX' | getenv }}/AerosolCoeff.bin", "{{ DATA }}/"]
       - ["{{ 'CRTM_FIX' | getenv }}/CloudCoeff.bin", "{{ DATA }}/"]
-      - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-GOES.txt", "{{ DATA }}/postxconfig-NT.txt"]
+      - ["{{ PARMgfs }}/post/postxconfig-NT-GFS-GOES.txt", "{{ DATA }}/postxconfig-NT.txt"]
       - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"]
       - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"]
   data_out:
     copy:
-      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.goesmasterf{{ '%03d' % forecast_hour }}.grb2"]
-      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.goesmasterf{{ '%03d' % forecast_hour }}.grb2.idx"]
+      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2f{{ '%03d' % forecast_hour }}"]
+      - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.special.grb2if{{ '%03d' % forecast_hour }}"]
diff --git a/parm/product/bufr_ij9km.txt b/parm/product/bufr_ij9km.txt
new file mode 100644
index 0000000000..321026f3d1
--- /dev/null
+++ b/parm/product/bufr_ij9km.txt
@@ -0,0 +1,2115 @@
+     1  2814   261    69.58  -140.18
+     2  2958   257    69.90  -128.97
+     3  3063   261    69.58  -120.75
+     4  2919   320    65.00  -132.00
+     5  3072   320    65.00  -120.00
+     6  3297   319    65.10  -102.43
+     7  2816   384    60.00  -140.00
+     8  3072   384    60.00  -120.00
+     9  3328   384    60.00  -100.00
+    10  2983   422    57.00  -127.00
+    11  2708   253    70.20  -148.47
+    12  3249   465    53.63  -106.20
+    13  2588   293    67.10  -157.85
+    14  3263   461    53.99  -105.12
+    15  3097   475    52.88  -118.07
+    16  3239   352    62.50  -107.00
+    17  3200   590    43.90  -110.00
+    18  3361   566    45.80   -97.45
+    19  3385   595    43.50   -95.60
+    20  3421   727    33.22   -92.80
+    21  3395   614    42.04   -94.79
+    22  3419   613    42.11   -92.92
+    23  3506   715    34.10   -86.10
+    24  3523   701    35.20   -84.80
+    25  3533   706    34.80   -84.05
+    26  3572   682    36.72   -80.97
+    27  3158   533    48.32  -113.35
+    28  3236   612    42.20  -107.20
+    29  3272   605    42.76  -104.45
+    30  3298   610    42.30  -102.40
+    31  3330   607    42.60   -99.90
+    32  3249   651    39.10  -106.20
+    33  3471   674    37.30   -88.90
+    34  3423   637    40.20   -92.60
+    35  3353   705    34.90   -98.10
+    36  3263   533    48.31  -105.10
+    37  3458   714    34.25   -89.87
+    38  3261   638    40.13  -105.24
+    39  3368   672    37.48   -96.93
+    40  3530   692    35.96   -84.29
+    41  3323   603    42.86  -100.41
+    42  3401   627    41.02   -94.36
+    43  3613   614    42.04   -77.76
+    44  3591   651    39.17   -79.52
+    45  3464   746    31.71   -89.41
+    46  3579   577    44.92   -80.42
+    47  3283   572    45.27  -103.54
+    48  3268   579    44.79  -104.73
+    49  3279   583    44.46  -103.85
+    50  3268   597    43.35  -104.69
+    51  3282   595    43.53  -103.65
+    52  3381   569    45.50   -95.90
+    53  3412   587    44.10   -93.50
+    54  3112   700    35.34  -116.88
+    55  3487   712    34.40   -87.60
+    56  3058   544    47.50  -121.10
+    57  3038   582    44.50  -122.70
+    58  3136   589    44.00  -115.00
+    59  3056   552    46.90  -121.30
+    60  3066   587    44.10  -120.50
+    61  3055   605    42.70  -121.40
+    62  3191   737    32.42  -110.73
+    63  3361   683    36.61   -97.49
+    64  3056   567    45.68  -121.27
+    65  3053   567    45.72  -121.56
+    66  3062   567    45.68  -120.82
+    67  3070   567    45.72  -120.21
+    68  3050   665    38.06  -121.77
+    69  3043   677    37.10  -122.28
+    70  3041   668    37.82  -122.47
+    71  3040   666    37.94  -122.50
+    72  3058   677    37.07  -121.12
+    73  3158   637    40.20  -113.30
+    74  3662   635    40.42   -73.98
+    75  3651   676    37.20   -74.80
+    76  3658   690    36.05   -74.25
+    77  3620   732    32.80   -77.20
+    78  3664   658    38.60   -73.75
+    79  3492   778    29.20   -87.25
+    80  3515   559    46.31   -85.46
+    81  3494   549    47.18   -87.22
+    82  3475   602    42.97   -88.55
+    83  3680   577    44.94   -72.51
+    84  3558   675    37.27   -82.10
+    85  3558   675    90.00     0.00
+    86  2604   201    74.30  -156.60
+    87  2816   192    75.00  -140.00
+    88  2390   376    60.60  -173.30
+    89  2538   385    59.90  -161.75
+    90  2691   347    62.88  -149.83
+    91  2703   374    60.79  -148.83
+    92  2691   378    60.49  -149.79
+    93  2700   372    60.95  -149.14
+    94  2705   374    60.78  -148.72
+    95  3146   778    29.22  -114.28
+    96  3563   689    36.20   -81.65
+    97  3483   607    42.59   -87.94
+    98  3100   607    42.59  -117.87
+    99  3042   549    47.08  -122.36
+   100  3020   551    46.91  -124.11
+   101  3445   542    47.66   -90.91
+   102  3440   553    46.77   -91.25
+   103  2554   444    55.31  -160.52
+   104  3615   700    35.33   -77.60
+   105  3679   582    44.53   -72.61
+   106  3582   619    41.63   -80.21
+   107  3576   657    38.69   -80.65
+   108  3102   709    34.57  -117.67
+   109  3103   709    34.63  -117.61
+   110  3506   563    45.97   -86.17
+   111  3499   558    46.42   -86.65
+   112  3461   651    39.16   -89.67
+   113  3308   588    44.05  -101.60
+   114  3505   594    43.58   -86.24
+   115  3517   596    43.43   -85.30
+   116  3528   597    43.37   -84.44
+   117  3274   712    34.38  -104.23
+   118  3994   554    46.70   -48.00
+   119  3903   548    47.20   -55.10
+   120  3506   605    42.75   -86.10
+   121  3504   609    42.41   -86.28
+   122  3108   571    45.35  -117.23
+   123  3388   620    41.59   -95.34
+   124  3484   591    43.78   -87.85
+   125  3292   756    30.90  -102.85
+   126  3434   721    33.64   -91.75
+   127  3427   688    36.22   -92.28
+   128  3284   750    31.38  -103.51
+   129  3566   814    26.42   -81.44
+   130  3572   810    26.75   -80.94
+   131  3337   753    31.18   -99.32
+   132  3320   760    30.59  -100.65
+   133  3060   652    39.08  -120.94
+   134  3072   672    37.51  -120.04
+   135  3069   671    37.58  -120.27
+   136  3081   680    36.83  -119.33
+   137  3056   693    35.85  -121.31
+   138  3049   546    47.31  -121.85
+   139  3057   659    38.49  -121.22
+   140  3062   651    39.13  -120.80
+   141  3041   631    40.72  -122.43
+   142  3656   633    40.50   -74.45
+   143  3705   602    42.99   -70.62
+   144  3667   569    45.50   -73.57
+   145  3676   626    41.10   -72.89
+   146  3048   644    39.69  -121.91
+   147  3058   651    39.17  -121.11
+   148  3076   696    35.62  -119.69
+   149  3056   695    35.66  -121.28
+   150  3031   658    38.61  -123.21
+   151  3031   659    38.51  -123.22
+   152  3034   656    38.78  -122.99
+   153  3069   677    37.11  -120.24
+   154  3046   640    39.99  -122.06
+   155  3033   653    39.00  -123.12
+   156  3035   659    38.51  -122.96
+   157  3249   723    33.50  -106.18
+   158  3243   719    33.82  -106.65
+   159  3180   632    40.58  -111.63
+   160  3546   591    43.78   -82.99
+   161  2791   409    58.00  -142.00
+   162  2778   461    54.00  -143.00
+   163  2855   435    56.00  -137.00
+   164  2624   448    55.00  -155.00
+   165  2829   473    53.00  -139.00
+   166  2722   498    51.10  -147.40
+   167  2535   486    52.00  -162.00
+   168  2641   518    49.50  -153.70
+   169  2448   522    49.20  -168.80
+   170  2880   512    50.00  -135.00
+   171  2957   659    38.50  -129.00
+   172  2996   717    34.00  -126.00
+   173  3034   774    29.50  -123.00
+   174  3072   832    25.00  -120.00
+   175  3136   832    25.00  -115.00
+   176  2806   585    44.30  -140.80
+   177  2764   541    47.70  -144.10
+   178  2855   673    37.40  -137.00
+   179  2842   550    47.00  -138.00
+   180  2874   594    43.60  -135.50
+   181  2902   644    39.70  -133.30
+   182  2951   531    48.50  -129.50
+   183  2976   581    44.60  -127.50
+   184  2799   505    50.50  -141.38
+   185  2983   633    40.50  -126.98
+   186  2176   448    55.00   170.00
+   187  2304   448    55.00   180.00
+   188  2240   384    60.00   175.00
+   189  3203   604    42.80  -109.81
+   190  3045   651    39.15  -122.15
+   191  3051   629    40.88  -121.66
+   192  3068   666    37.99  -120.38
+   193  3091   669    37.74  -118.59
+   194  3084   689    36.20  -119.10
+   195  3091   691    35.97  -118.54
+   196  3086   706    34.83  -118.95
+   197  3068   689    36.14  -120.35
+   198  3092   683    36.65  -118.48
+   199  3670   608    42.47   -73.29
+   200  3556   650    39.21   -82.23
+   201  3182   634    40.48  -111.43
+   202  3146   729    33.02  -114.24
+   203  3197   749    31.49  -110.30
+   204  3228   742    32.02  -107.87
+   205  3273   762    30.43  -104.33
+   206  3325   789    28.39  -100.29
+   207  3344   812    26.57   -98.82
+   208  3380   784    28.71   -95.96
+   209  3435   770    29.81   -91.66
+   210  3565   836    24.70   -81.51
+   211  3750   922    17.98   -67.08
+   212  3051   561    46.19  -121.70
+   213  3043   559    46.28  -122.28
+   214  3048   571    45.35  -121.94
+   215  3046   587    44.17  -122.06
+   216  3340   575    45.03   -99.11
+   217  3367   570    45.46   -96.99
+   218  3390   662    38.28   -95.22
+   219  3372   642    39.86   -96.63
+   220  3242   584    44.38  -106.72
+   221  3225   582    44.52  -108.08
+   222  3453   575    45.10   -90.30
+   223  3427   583    44.46   -92.29
+   224  3238   675    37.29  -107.06
+   225  3219   663    38.23  -108.56
+   226  3228   639    40.05  -107.89
+   227  3437   619    41.64   -91.54
+   228  3258   652    39.05  -105.51
+   229  3247   639    40.05  -106.36
+   230  3487   656    38.76   -87.61
+   231  3475   652    39.07   -88.53
+   232  3451   628    40.94   -90.43
+   233  3453   643    39.77   -90.24
+   234  3479   647    39.48   -88.28
+   235  3331   596    43.39   -99.84
+   236  3296   601    43.02  -102.52
+   237  3273   590    43.89  -104.32
+   238  3294   582    44.56  -102.66
+   239  3255   592    43.74  -105.74
+   240  3467   561    46.15   -89.21
+   241  3490   556    46.54   -87.39
+   242  3508   554    46.68   -85.97
+   243  3470   555    46.61   -88.91
+   244  3495   794    28.00   -87.00
+   245  3409   791    28.20   -93.70
+   246  3427   787    28.50   -92.30
+   247  3444   795    27.90   -91.00
+   248  3469   794    28.00   -89.00
+   249  3520   794    28.00   -85.00
+   250  3597   755    31.00   -79.00
+   251  3776   537    48.00   -65.00
+   252  3182   649    39.30  -111.46
+   253  3269   682    36.74  -104.65
+   254  3623   678    36.99   -77.00
+   255  3624   667    37.86   -76.89
+   256  3635   659    38.54   -76.03
+   257  3613   681    36.77   -77.79
+   258  3727   591    43.78   -68.86
+   259  3584   660    38.40   -80.00
+   260  3104   703    35.10  -117.56
+   261  3285   597    43.37  -103.39
+   262  3284   583    44.41  -103.48
+   263  3478   612    42.21   -88.32
+   264  3481   634    40.46   -88.10
+   265  3586   732    32.78   -79.92
+   266  3585   734    32.66   -79.93
+   267  3573   742    32.03   -80.89
+   268  3105   538    47.97  -117.43
+   269  3570   688    36.22   -81.10
+   270  3549   608    42.47   -82.76
+   271  3537   591    43.80   -83.72
+   272  3549   588    44.02   -82.79
+   273  3493   618    41.69   -87.15
+   274  3481   619    41.61   -88.10
+   275  3577   685    36.46   -80.55
+   276  3162   676    37.20  -112.99
+   277  3184   662    38.29  -111.26
+   278  3174   656    38.74  -112.10
+   279  3088   676    37.20  -118.80
+   280  3287   542    47.61  -103.26
+   281  3367   668    37.80   -97.01
+   282  3401   709    34.60   -94.30
+   283  3390   667    37.90   -95.20
+   284  3309   659    38.50  -101.50
+   285  3262   673    37.40  -105.20
+   286  3261   691    36.00  -105.30
+   287  3307   714    34.20  -101.70
+   288  3337   722    33.60   -99.30
+   289  3362   721    33.70   -97.40
+   290  3427   677    37.10   -92.30
+   291  3239   627    41.00  -107.00
+   292  3147   560    46.25  -114.15
+   293  3168   550    47.00  -112.50
+   294  3149   525    49.00  -114.00
+   295  3226   518    49.50  -108.00
+   296  3277   525    49.00  -104.00
+   297  3341   544    47.50   -99.00
+   298  3360   561    46.20   -97.50
+   299  3392   563    46.00   -95.00
+   300  3531   630    40.80   -84.20
+   301  3584   665    38.00   -80.00
+   302  3552   710    34.50   -82.50
+   303  3354   618    41.73   -98.01
+   304  3388   620    41.58   -95.34
+   305  3367   611    42.24   -96.98
+   306  3344   669    37.70   -98.75
+   307  3349   528    48.75   -98.39
+   308  3368   528    48.75   -96.94
+   309  3376   583    44.46   -96.25
+   310  3390   599    43.17   -95.21
+   311  3336   599    43.22   -99.40
+   312  3344   598    43.26   -98.76
+   313  3359   628    40.90   -97.62
+   314  3331   630    40.79   -99.78
+   315  3359   638    40.15   -97.58
+   316  3337   643    39.73   -99.32
+   317  3452   681    36.77   -90.32
+   318  3490   674    37.36   -87.40
+   319  3513   652    39.05   -85.61
+   320  3551   669    37.75   -82.64
+   321  3537   683    36.61   -83.74
+   322  3522   680    36.86   -84.86
+   323  3534   665    38.06   -83.98
+   324  3306   626    41.12  -101.77
+   325  3321   615    41.96  -100.57
+   326  3204   594    43.55  -109.69
+   327  3188   605    42.71  -110.94
+   328  3193   617    41.82  -110.56
+   329  3216   578    44.87  -108.79
+   330  3220   577    44.91  -108.45
+   331  3244   592    43.71  -106.63
+   332  3459   595    43.52   -89.77
+   333  3476   592    43.77   -88.49
+   334  3453   603    42.89   -90.24
+   335  3479   601    43.04   -88.24
+   336  3465   588    44.04   -89.31
+   337  3467   623    41.35   -89.15
+   338  3497   618    41.70   -86.82
+   339  3484   626    41.07   -87.85
+   340  3468   616    41.89   -89.08
+   341  3434   570    45.42   -91.77
+   342  3399   575    45.10   -94.51
+   343  3376   579    44.73   -96.27
+   344  3300   639    40.10  -102.24
+   345  3293   656    38.76  -102.79
+   346  3495   572    45.29   -86.98
+   347  3459   584    44.36   -89.84
+   348  3659   640    39.99   -74.17
+   349  3509   669    37.70   -85.87
+   350  3353   670    37.67   -98.12
+   351  3354   675    37.28   -98.04
+   352  3383   662    38.30   -95.72
+   353  3376   667    37.85   -96.29
+   354  3351   656    38.75   -98.23
+   355  3663   646    39.55   -73.90
+   356  3692   644    39.70   -71.60
+   357  3385   677    37.09   -95.57
+   358  3358   661    38.35   -97.69
+   359  3337   674    37.35   -99.35
+   360  3343   700    35.30   -98.90
+   361  3367   698    35.50   -97.00
+   362  3396   704    34.98   -94.69
+   363  3308   683    36.60  -101.60
+   364  3601   608    42.50   -78.68
+   365  3640   592    43.76   -75.68
+   366  3443   738    32.35   -91.03
+   367  3567   821    25.86   -81.38
+   368  3555   783    28.84   -82.33
+   369  3547   741    32.07   -82.90
+   370  3485   746    31.71   -87.78
+   371  3472   724    33.45   -88.82
+   372  3441   705    34.89   -91.20
+   373  3507   698    35.48   -86.09
+   374  3484   696    35.62   -87.84
+   375  3517   679    36.95   -85.26
+   376  3290   722    33.62  -103.02
+   377  3325   701    35.21  -100.25
+   378  3317   651    39.13  -100.87
+   379  3439   678    37.01   -91.36
+   380  3408   661    38.37   -93.79
+   381  3425   647    39.42   -92.44
+   382  3597   709    34.61   -79.06
+   383  3706   619    41.65   -70.52
+   384  3574   724    33.46   -80.85
+   385  3444   569    45.50   -91.00
+   386  3415   564    45.89   -93.27
+   387  3424   574    45.15   -92.54
+   388  3380   573    45.23   -96.00
+   389  3455   588    44.03   -90.08
+   390  3043   623    41.32  -122.32
+   391  3077   607    42.55  -119.66
+   392  3062   597    43.33  -120.84
+   393  3028   622    41.39  -123.49
+   394  3054   622    41.43  -121.46
+   395  3108   571    45.36  -117.25
+   396  3086   584    44.40  -118.96
+   397  3615   687    36.33   -77.64
+   398  3588   705    34.89   -79.76
+   399  3566   634    40.47   -81.42
+   400  3591   629    40.82   -79.53
+   401  3596   632    40.63   -79.11
+   402  3593   645    39.58   -79.34
+   403  3581   638    40.14   -80.29
+   404  3648   608    42.46   -75.06
+   405  3309   682    36.68  -101.50
+   406  3498   727    33.17   -86.77
+   407  3466   752    31.27   -89.26
+   408  3566   790    28.29   -81.44
+   409  3561   783    28.82   -81.81
+   410  3387   763    30.36   -95.41
+   411  3386   759    30.73   -95.47
+   412  3303   576    45.02  -102.02
+   413  3332   778    29.21   -99.74
+   414  3349   693    35.87   -98.42
+   415  3474   628    40.92   -88.62
+   416  3673   620    41.56   -73.05
+   417  3120   529    48.69  -116.32
+   418  3089   529    48.65  -118.73
+   419  3064   541    47.76  -120.65
+   420  3069   531    48.49  -120.24
+   421  3122   543    47.54  -116.14
+   422  3035   631    40.73  -122.94
+   423  3026   628    40.94  -123.63
+   424  3033   636    40.34  -123.07
+   425  3031   643    39.75  -123.21
+   426  3416   684    36.54   -93.20
+   427  2426   337    63.68  -170.50
+   428  2666   280    68.13  -151.73
+   429  2410   336    63.77  -171.73
+   430  2480   379    60.37  -166.27
+   431  2543   427    56.65  -161.37
+   432  2041   460    54.05   159.43
+   433  3684   577    44.89   -72.23
+   434  3309   596    43.46  -101.50
+   435  3280   589    43.99  -103.79
+   436  3533   777    29.30   -84.04
+   437  3372   795    27.90   -96.64
+   438  3528   713    34.31   -84.42
+   439  3535   713    34.27   -83.83
+   440  3528   623    41.34   -84.43
+   441  3510   624    41.28   -85.84
+   442  3515   617    41.81   -85.44
+   443  3498   620    41.57   -86.73
+   444  3286   526    48.93  -103.30
+   445  3333   526    48.88   -99.62
+   446  3285   561    46.19  -103.43
+   447  3337   563    46.02   -99.35
+   448  3353   561    46.17   -98.07
+   449  3537   563    46.01   -83.74
+   450  3465   552    46.88   -89.32
+   451  3495   568    45.58   -87.00
+   452  3103   712    34.36  -117.63
+   453  3076   705    34.94  -119.69
+   454  3070   711    34.48  -120.23
+   455  3071   709    34.61  -120.08
+   456  3186   592    43.74  -111.10
+   457  3533   706    34.85   -84.00
+   458  3280   584    44.35  -103.77
+   459  3273   583    44.41  -104.36
+   460  3395   668    37.80   -94.77
+   461  3423   670    37.64   -92.65
+   462  3414   661    38.35   -93.34
+   463  3052   646    39.49  -121.61
+   464  3478   718    33.90   -88.33
+   465  3282   709    34.64  -103.63
+   466  3643   655    38.83   -75.43
+   467  3338   735    32.54   -99.25
+   468  3402   655    38.81   -94.26
+   469  3355   687    36.34   -97.92
+   470  3342   712    34.36   -98.98
+   471  3509   631    40.72   -85.93
+   472  3362   759    30.72   -97.38
+   473  3348   636    40.32   -98.44
+   474  3515   749    31.46   -85.46
+   475  3510   735    32.54   -85.79
+   476  3311   531    48.50  -101.40
+   477  3541   689    36.17   -83.40
+   478  3643   595    43.47   -75.46
+   479  3542   734    32.68   -83.35
+   480  3491   682    36.74   -87.29
+   481  3143   750    31.40  -114.49
+   482  3164   738    32.37  -112.87
+   483  3205   732    32.82  -109.68
+   484  3129   532    48.39  -115.55
+   485  3271   528    48.76  -104.52
+   486  3034   669    37.70  -123.00
+   487  3033   661    38.32  -123.07
+   488  3274   558    46.37  -104.28
+   489  3183   583    44.42  -111.37
+   490  3171   612    42.17  -112.28
+   491  3124   646    39.50  -115.95
+   492  3083   662    38.30  -119.16
+   493  3668   580    44.65   -73.49
+   494  3644   625    41.14   -75.38
+   495  3638   640    39.98   -75.82
+   496  3295   675    37.28  -102.61
+   497  3709   568    45.64   -70.26
+   498  3722   544    47.46   -69.22
+   499  3745   569    45.56   -67.43
+   500  3672   614    42.05   -73.20
+   501  3208   613    42.11  -109.45
+   502  3228   608    42.49  -107.83
+   503  3215   608    42.48  -108.84
+   504  3195   599    43.20  -110.40
+   505  3381   632    40.61   -95.87
+   506  3372   624    41.24   -96.59
+   507  3495   621    41.45   -87.01
+   508  3477   622    41.42   -88.41
+   509  3344   643    39.76   -98.79
+   510  3241   633    40.51  -106.87
+   511  3234   647    39.43  -107.38
+   512  3250   647    39.48  -106.15
+   513  3232   633    40.50  -107.52
+   514  3464   627    41.02   -89.39
+   515  3480   656    38.72   -88.18
+   516  3352   652    39.06   -98.17
+   517  3377   677    37.13   -96.19
+   518  3373   661    38.37   -96.54
+   519  3622   606    42.64   -77.05
+   520  3630   617    41.77   -76.45
+   521  3248   631    40.73  -106.28
+   522  3629   596    43.45   -76.51
+   523  3402   600    43.08   -94.27
+   524  3396   596    43.40   -94.75
+   525  3394   635    40.35   -94.92
+   526  3539   663    38.22   -83.59
+   527  3416   656    38.71   -93.18
+   528  3416   647    39.42   -93.13
+   529  3221   592    43.71  -108.39
+   530  3187   599    43.18  -111.04
+   531  3283   568    45.59  -103.55
+   532  2654   373    60.82  -152.72
+   533  2560   269    69.00  -160.00
+   534  2273   486    52.00   177.55
+   535  2484   422    57.00  -166.00
+   536  2649   355    62.22  -153.08
+   537  2596   387    59.73  -157.26
+   538  2723   360    61.89  -147.32
+   539  2483   304    66.27  -166.05
+   540  3108   715    34.10  -117.23
+   541  3407   632    40.63   -93.90
+   542  3191   724    33.40  -110.77
+   543  3165   717    33.97  -112.74
+   544  3635   655    38.80   -76.07
+   545  3653   626    41.05   -74.63
+   546  2766   349    62.72  -143.97
+   547  2764   285    67.75  -144.11
+   548  2761   312    65.59  -144.36
+   549  2676   359    61.95  -151.00
+   550  2630   419    57.27  -154.56
+   551  2714   305    66.15  -148.03
+   552  2774   294    67.03  -143.29
+   553  2546   317    65.20  -161.15
+   554  2671   387    59.75  -151.37
+   555  2575   340    63.39  -158.83
+   556  2893   423    56.97  -134.00
+   557  2531   282    67.95  -162.31
+   558  2684   376    60.59  -150.32
+   559  2696   379    60.37  -149.41
+   560  2783   367    61.32  -142.59
+   561  2667   383    60.03  -151.66
+   562  2577   281    68.07  -158.71
+   563  2566   364    61.58  -159.54
+   564  2746   345    63.03  -145.49
+   565  2501   315    65.41  -164.66
+   566  2645   340    63.44  -153.36
+   567  2874   393    59.25  -135.52
+   568  2905   440    55.58  -133.10
+   569  2677   339    63.49  -150.88
+   570  2774   346    62.97  -143.34
+   571  2792   332    64.05  -141.93
+   572  2633   296    66.85  -154.34
+   573  2661   306    66.08  -152.17
+   574  2606   363    61.64  -156.44
+   575  2605   262    69.50  -156.50
+   576  2701   269    69.00  -149.00
+   577  2701   285    67.75  -149.00
+   578  2612   315    65.34  -155.95
+   579  2617   331    64.10  -155.56
+   580  2675   329    64.31  -151.08
+   581  2558   380    60.32  -160.20
+   582  2512   357    62.10  -163.80
+   583  2536   251    70.40  -161.90
+   584  2604   239    71.32  -156.62
+   585  3742   610    42.35   -67.70
+   586  3687   649    39.30   -72.00
+   587  3768   649    39.30   -65.70
+   588  3711   674    37.30   -70.10
+   589  3654   687    36.30   -74.60
+   590  3664   696    35.60   -73.80
+   591  3702   705    34.90   -70.80
+   592  3636   719    33.80   -76.00
+   593  3683   732    32.80   -72.30
+   594  2938   536    48.10  -130.50
+   595  2988   536    48.10  -126.60
+   596  2948   572    45.30  -129.70
+   597  3001   572    45.30  -125.60
+   598  2946   617    41.75  -129.90
+   599  2998   616    41.90  -125.80
+   600  3002   650    39.20  -125.50
+   601  3003   686    36.40  -125.40
+   602  3008   726    33.30  -125.00
+   603  3043   709    34.60  -122.30
+   604  3053   756    30.90  -121.50
+   605  3111   773    29.60  -117.00
+   606  3635   671    37.54   -76.01
+   607  3239   730    32.99  -106.97
+   608  3203   740    32.15  -109.84
+   609  3534   606    42.63   -83.98
+   610  3532   602    42.99   -84.14
+   611  3544   613    42.10   -83.16
+   612  3532   616    41.87   -84.07
+   613  3539   588    44.02   -83.54
+   614  3540   596    43.46   -83.45
+   615  3541   615    41.94   -83.43
+   616  3287   632    40.61  -103.26
+   617  3280   636    40.34  -103.80
+   618  3411   639    40.08   -93.59
+   619  3418   634    40.48   -93.01
+   620  3401   637    40.25   -94.33
+   621  3419   631    40.68   -92.90
+   622  3422   618    41.71   -92.73
+   623  3415   608    42.47   -93.27
+   624  3476   657    38.66   -88.45
+   625  3487   652    39.02   -87.65
+   626  3465   646    39.53   -89.33
+   627  3211   598    43.31  -109.19
+   628  3226   619    41.67  -107.98
+   629  3188   595    43.50  -110.96
+   630  3352   647    39.47   -98.13
+   631  3401   667    37.85   -94.31
+   632  3473   615    41.93   -88.71
+   633  3193   673    37.44  -110.56
+   634  3242   634    40.45  -106.75
+   635  3362   674    37.32   -97.39
+   636  3241   673    37.45  -106.80
+   637  3233   588    44.03  -107.45
+   638  3188   613    42.08  -110.96
+   639  3225   655    38.79  -108.06
+   640  3243   635    40.35  -106.70
+   641  3471   777    29.30   -88.84
+   642  4508   741    32.13    -7.88
+   643  3678   616    41.89   -72.71
+   644   143   381    60.20    11.10
+   645   230   388    59.67    17.93
+   646   230   392    59.35    17.95
+   647   320   380    60.32    24.97
+   648  4593   382    60.13    -1.18
+   649  4550   429    56.50    -4.58
+   650  3879  1574   -33.00   -57.00
+   651  4586   490    51.68    -1.78
+   652  4588   489    51.75    -1.58
+   653  4605   495    51.29    -0.27
+   654  4527   483    52.25    -6.33
+   655  4319   333    63.97   -22.60
+   656  4522   358    62.02    -6.76
+   657   161   438    55.77    12.53
+   658    56   486    52.03     4.35
+   659    58   500    50.90     4.47
+   660    89   553    46.82     6.95
+   661    31   528    48.73     2.40
+   662  4501   597    43.37    -8.42
+   663  4595   619    41.67    -1.02
+   664  4537   676    37.17    -5.62
+   665  4315   938    16.75   -22.95
+   666   184   291    67.27    14.37
+   667  2323   243    70.97  -178.53
+   668  2311   270    68.92  -179.48
+   669  2435   305    66.17  -169.83
+   670  2182   324    64.68   170.42
+   671  2272   323    64.73   177.50
+   672  2296   345    63.05   179.32
+   673   814   471    53.21    63.55
+   674  4528   407    58.22    -6.32
+   675    17   478    52.63     1.32
+   676   856   509    50.22    66.83
+   677   937   514    49.80    73.15
+   678  4541   511    50.08    -5.25
+   679  4529   452    54.65    -6.22
+   680  3674   705    34.90   -73.00
+   681  3646   738    32.30   -75.20
+   682  3596   736    32.50   -79.07
+   683  3618   777    29.30   -77.40
+   684  3573   750    31.40   -80.87
+   685  3604   782    28.90   -78.50
+   686  3589   732    32.80   -79.62
+   687  3579   739    32.28   -80.41
+   688  3460   820    25.90   -89.70
+   689  3410   820    25.90   -93.60
+   690  3509   820    25.90   -85.90
+   691  3392   795    27.90   -95.00
+   692  3373   806    27.00   -96.50
+   693  3704   659    38.50   -70.70
+   694  3730   607    42.60   -68.60
+   695  3712   595    43.53   -70.14
+   696  3720   633    40.50   -69.40
+   697  3652   659    38.50   -74.70
+   698  3756   626    41.10   -66.60
+   699  3651   683    36.60   -74.80
+   700  3686   631    40.70   -72.10
+   701  3672   636    40.30   -73.20
+   702  3747   585    44.30   -67.30
+   703  3665   635    40.37   -73.70
+   704  3644   669    37.76   -75.33
+   705  3485   537    48.06   -87.78
+   706  3503   572    45.33   -86.42
+   707  3549   571    45.35   -82.84
+   708  3501   543    47.56   -86.55
+   709  3554   618    41.68   -82.40
+   710  3458   546    47.32   -89.87
+   711  3495   606    42.67   -87.02
+   712  3555   584    44.28   -82.42
+   713  3618   594    43.62   -77.41
+   714  3569   608    42.47   -81.22
+   715  3592   596    43.40   -79.45
+   716  3593   605    42.74   -79.35
+   717  3486   612    42.14   -87.66
+   718  2712   431    56.30  -148.20
+   719  2938   608    42.50  -130.50
+   720  2613   488    51.90  -155.90
+   721  2932   562    46.10  -131.00
+   722  2848   628    40.90  -137.50
+   723  3021   650    39.20  -124.00
+   724  3011   605    42.75  -124.82
+   725  3019   561    46.20  -124.20
+   726  3015   635    40.40  -124.50
+   727  2334   422    57.00  -177.70
+   728  3079   737    32.40  -119.50
+   729  3098   736    32.49  -118.03
+   730  1267   912    18.77    98.96
+   731  1316   941    16.47   102.78
+   732  1282   950    15.77   100.14
+   733  1343   957    15.25   104.87
+   734  1288   977    13.67   100.61
+   735  1288  1060     7.19   100.61
+   736  2531   852    23.40  -162.30
+   737  2589   932    17.20  -157.80
+   738  2550   905    19.30  -160.80
+   739  2656   929    17.40  -152.50
+   740   110   441    55.52     8.55
+   741    61   482    52.31     4.76
+   742    89   483    52.28     6.89
+   743  2674   387    59.77  -151.17
+   744    80   525    48.98     6.25
+   745  3354   772    29.70   -98.01
+   746  3500   688    36.25   -86.57
+   747  3385   689    36.18   -95.56
+   748  3085   701    35.24  -119.03
+   749  3454   658    38.62   -90.18
+   750  3275   660    38.46  -104.18
+   751  3564   621    41.50   -81.60
+   752  3166   600    43.11  -112.68
+   753  3126   544    47.47  -115.80
+   754    27   624    41.28     2.07
+   755  4554   635    40.42    -4.25
+   756  4602   646    39.50    -0.47
+   757  3318   685    36.50  -100.80
+   758  3307   764    30.30  -101.70
+   759  3296   797    27.70  -102.50
+   760  3340   762    30.50   -99.10
+   761  3345   788    28.40   -98.70
+   762  3344   815    26.30   -98.80
+   763  3362   840    24.40   -97.40
+   764  3389   820    25.90   -95.30
+   765  3400   838    24.50   -94.40
+   766  3385   854    23.30   -95.60
+   767  3403   918    18.30   -94.20
+   768  3417   878    21.40   -93.10
+   769  3436   854    23.30   -91.60
+   770  3473   849    23.70   -88.70
+   771  3520   858    23.00   -85.00
+   772  3354   851    23.50   -98.00
+   773  3438   563    46.02   -91.45
+   774  3549   655    38.83   -82.80
+   775  3603   654    38.88   -78.52
+   776  3600   645    39.62   -78.76
+   777  3623   645    39.61   -77.01
+   778   113   473    53.05     8.79
+   779   172   480    52.47    13.40
+   780    92   501    50.87     7.15
+   781   110   511    50.05     8.58
+   782   119   529    48.68     9.22
+   783   151   533    48.35    11.78
+   784   182   535    48.23    14.19
+   785   213   536    48.12    16.57
+   786   183   511    50.10    14.26
+   787   237   456    54.38    18.47
+   788   269   484    52.17    20.97
+   789   217   498    51.10    16.89
+   790   246   545    47.43    19.18
+   791   263   579    44.78    20.53
+   792   300   605    42.69    23.41
+   793   353   607    42.57    27.52
+   794   357   599    43.23    27.83
+   795   319   613    42.07    24.86
+   796   334   581    44.57    26.09
+   797   158   569    45.50    12.33
+   798   162   617    41.80    12.60
+   799   157   617    41.80    12.23
+   800   310   697    35.53    24.15
+   801   373   627    40.97    29.08
+   802   423   638    40.13    33.00
+   803   515   669    37.75    40.20
+   804   426   702    35.15    33.28
+   805  2391   327    64.43  -173.23
+   806   387   389    59.58    30.18
+   807   481   438    55.75    37.57
+   808  1062   447    55.03    82.90
+   809  1689   600    43.12   131.90
+   810   985   599    43.23    76.93
+   811   576   618    41.68    44.95
+   812   954   601    43.07    74.47
+   813   887   624    41.27    69.27
+   814   461   743    31.98    35.98
+   815   451   744    31.87    35.22
+   816   599   833    24.88    46.77
+   817   599   836    24.72    46.72
+   818   615   778    29.22    47.98
+   819   553   687    36.32    43.15
+   820   567   727    33.22    44.23
+   821   612   761    30.57    47.78
+   822   628   794    28.00    49.00
+   823   640   806    27.00    50.00
+   824   657   695    35.68    51.32
+   825   787   754    31.05    61.47
+   826   903   711    34.42    70.47
+   827   843   751    31.31    65.85
+   828   661   829    25.25    51.57
+   829   709   829    25.25    55.33
+   830   700   839    24.42    54.65
+   831  3728   172    76.53   -68.75
+   832   989   786    28.58    77.20
+   833  1067   925    17.72    83.30
+   834  1372   599    43.20   107.17
+   835  1093   797    27.70    85.37
+   836  1462   866    22.32   114.17
+   837  1556   832    25.03   121.52
+   838  1483  1015    10.72   115.83
+   839  1624   671    37.55   126.80
+   840  1651   702    35.18   128.93
+   841  1810   631    40.70   141.37
+   842  1753   701    35.25   136.93
+   843  1790   697    35.55   139.78
+   844  1797   694    35.76   140.38
+   845  1735   708    34.68   135.53
+   846  1284  1084     5.30   100.27
+   847  1303  1117     2.75   101.72
+   848  1328  1134     1.38   103.72
+   849  1344   860    22.82   104.97
+   850  1355   883    21.02   105.80
+   851  1366  1013    10.82   106.67
+   852  1622   490    51.72   126.65
+   853  1491   642    39.80   116.47
+   854  1541   690    36.07   120.33
+   855  1332   759    30.67   104.02
+   856  1395   713    34.30   108.93
+   857  1458   710    34.52   113.83
+   858  1555   753    31.17   121.43
+   859  4412   794    27.93   -15.38
+   860  4510   722    33.57    -7.67
+   861  4506   747    31.62    -8.03
+   862    71   860    22.82     5.47
+   863    28   979    13.48     2.17
+   864  4570   938    16.72    -3.00
+   865  4384   963    14.73   -17.50
+   866  4393   981    13.35   -16.80
+   867   927  1245    -7.30    72.42
+   868   299   842    24.22    23.30
+   869   349   751    31.33    27.22
+   870   472  1168    -1.28    36.83
+   871   475  1196    -3.42    37.07
+   872   490  1215    -4.92    38.23
+   873   198  1208    -4.38    15.45
+   874    43  1068     6.58     3.33
+   875  4606  1080     5.60    -0.17
+   876  4512  1058     7.38    -7.53
+   877  4558  1085     5.25    -3.93
+   878  4476  1072     6.23   -10.37
+   879   170  1265    -8.85    13.23
+   880   398  1464   -24.37    31.05
+   881   239  1587   -33.97    18.60
+   882  2602   239    71.30  -156.78
+   883  2770   254    70.13  -143.63
+   884  2482   270    68.88  -166.13
+   885  2527   296    66.87  -162.63
+   886  2661   264    69.37  -152.13
+   887  2669   295    66.92  -151.52
+   888  2662   318    65.17  -152.10
+   889  2749   300    66.57  -145.27
+   890  2491   326    64.50  -165.43
+   891  2550   334    63.88  -160.80
+   892  2483   361    61.78  -166.03
+   893  2537   374    60.78  -161.80
+   894  2600   323    64.73  -156.93
+   895  2617   346    62.97  -155.62
+   896  2612   347    62.90  -155.98
+   897  2617   370    61.10  -155.58
+   898  2687   354    62.30  -150.10
+   899  2673   359    61.97  -151.18
+   900  2672   377    60.57  -151.25
+   901  2716   322    64.82  -147.87
+   902  2726   324    64.67  -147.10
+   903  2744   333    63.97  -145.70
+   904  2743   333    64.00  -145.73
+   905  2747   356    62.15  -145.45
+   906  2691   368    61.25  -149.80
+   907  2688   369    61.17  -150.02
+   908  2700   363    61.60  -149.08
+   909  2735   369    61.13  -146.35
+   910  2696   382    60.12  -149.45
+   911  2802   323    64.78  -141.15
+   912  2792   346    62.97  -141.93
+   913  2746   377    60.50  -145.50
+   914  2534   401    58.65  -162.07
+   915  2430   420    57.15  -170.22
+   916  2526   445    55.20  -162.73
+   917  2579   396    59.05  -158.52
+   918  2603   401    58.68  -156.65
+   919  2626   387    59.75  -154.92
+   920  2669   389    59.63  -151.50
+   921  2735   391    59.43  -146.33
+   922  2656   413    57.75  -152.50
+   923  2821   390    59.52  -139.67
+   924  2877   391    59.47  -135.30
+   925  2871   404    58.42  -135.73
+   926  2876   421    57.07  -135.35
+   927  2886   405    58.37  -134.58
+   928  2906   425    56.82  -132.97
+   929  2914   429    56.48  -132.37
+   930  2923   443    55.35  -131.70
+   931  2924   447    55.03  -131.57
+   932  2229   477    52.72   174.12
+   933  2347   488    51.88  -176.65
+   934  2447   474    52.95  -168.85
+   935  2477   462    53.90  -166.55
+   936  2931   487    51.93  -131.02
+   937  2986   316    65.28  -126.75
+   938  3005   230    72.00  -125.28
+   939  3105   432    56.23  -117.43
+   940  3081   176    76.23  -119.33
+   941  3315   424    56.87  -101.08
+   942  3356   438    55.75   -97.87
+   943  3568   271    68.78   -81.25
+   944  3811    96    82.50   -62.33
+   945  3042   524    49.03  -122.37
+   946  2978   503    50.68  -127.37
+   947  3148   466    53.55  -114.10
+   948  3155   470    53.30  -113.58
+   949  3219   459    54.13  -108.52
+   950  3185   508    50.27  -111.18
+   951  3277   469    53.33  -104.00
+   952  3693   552    46.90   -71.50
+   953  3851   543    47.57   -59.17
+   954  3080   513    49.95  -119.40
+   955  2944   404    58.42  -130.00
+   956  3585   599    43.17   -79.93
+   957  3580   595    43.47   -80.38
+   958  3796   577    44.88   -63.50
+   959  3778   576    44.98   -64.92
+   960  3773   592    43.72   -65.25
+   961  3184   310    65.77  -111.25
+   962  3546   611    42.27   -82.97
+   963  3840   590    43.93   -60.02
+   964  3796   581    44.63   -63.50
+   965  3762   590    43.87   -66.10
+   966  3765   572    45.32   -65.88
+   967  3570   601    43.03   -81.15
+   968  3589   593    43.67   -79.63
+   969  3619   564    45.95   -77.32
+   970  3664   570    45.47   -73.75
+   971  3661   567    45.68   -74.03
+   972  3640   572    45.32   -75.67
+   973  3605   586    44.23   -78.37
+   974  3593   576    44.97   -79.30
+   975  3570   579    44.75   -81.10
+   976  3758   565    45.83   -66.43
+   977  3781   562    46.12   -64.68
+   978  3800   559    46.28   -63.13
+   979  3840   561    46.17   -60.05
+   980  3695   553    46.80   -71.40
+   981  3636   558    46.38   -75.97
+   982  3613   537    48.05   -77.78
+   983  3592   558    46.37   -79.42
+   984  3523   538    47.97   -84.78
+   985  3567   530    48.57   -81.37
+   986  3484   483    52.23   -87.88
+   987  3465   533    48.37   -89.32
+   988  3029   529    48.65  -123.43
+   989  3934   542    47.62   -52.73
+   990  3910   525    48.95   -54.57
+   991  3874   522    49.22   -57.40
+   992  3760   509    50.22   -66.27
+   993  3859   531    48.53   -58.55
+   994  3835   469    53.32   -60.42
+   995  3655   515    49.77   -74.53
+   996  3666   464    53.75   -73.67
+   997  3496   515    49.78   -86.93
+   998  3576   496    51.27   -80.65
+   999  3577   495    51.28   -80.60
+  1000  3454   493    51.45   -90.20
+  1001  3458   463    53.83   -89.87
+  1002  3401   515    49.78   -94.37
+  1003  3364   513    49.90   -97.23
+  1004  3333   515    49.78   -99.65
+  1005  3307   452    54.68  -101.68
+  1006  3269   506    50.43  -104.67
+  1007  3243   484    52.17  -106.68
+  1008  3314   461    53.97  -101.10
+  1009  3256   471    53.22  -105.68
+  1010  3191   512    50.02  -110.72
+  1011  3165   517    49.63  -112.80
+  1012  3149   498    51.12  -114.02
+  1013  3127   517    49.62  -115.78
+  1014  3103   521    49.30  -117.63
+  1015  3032   522    49.18  -123.17
+  1016  3010   515    49.72  -124.90
+  1017  3038   462    53.88  -122.68
+  1018  3733   408    58.10   -68.42
+  1019  3609   404    58.45   -78.12
+  1020  3731   336    63.75   -68.53
+  1021  3404   400    58.75   -94.07
+  1022  3541   330    64.20   -83.37
+  1023  3509   128    79.98   -85.93
+  1024  3237   418    57.35  -107.13
+  1025  3393   195    74.72   -94.95
+  1026  3263   267    69.10  -105.12
+  1027  3380   329    64.30   -96.00
+  1028  3175   451    54.77  -112.02
+  1029  3176   384    60.02  -111.95
+  1030  3144   352    62.50  -114.40
+  1031  3039   399    58.83  -122.58
+  1032  3057   361    61.80  -121.20
+  1033  2960   382    60.12  -128.82
+  1034  2899   277    68.32  -133.53
+  1035  2880   375    60.72  -135.07
+  1036  2828   332    64.05  -139.13
+  1037  2819   287    67.57  -139.82
+  1038  3562   838    24.55   -81.75
+  1039  3571   835    24.73   -81.05
+  1040  3581   821    25.82   -80.28
+  1041  3581   820    25.90   -80.28
+  1042  3583   818    26.07   -80.15
+  1043  3579   824    25.65   -80.43
+  1044  3583   810    26.68   -80.12
+  1045  3582   817    26.20   -80.17
+  1046  3576   792    28.10   -80.65
+  1047  3579   798    27.65   -80.42
+  1048  3568   788    28.43   -81.32
+  1049  3571   778    29.18   -81.05
+  1050  3563   762    30.50   -81.69
+  1051  3560   765    30.22   -81.88
+  1052  3569   741    32.13   -81.19
+  1053  3576   739    32.22   -80.70
+  1054  3584   731    32.90   -80.03
+  1055  3554   797    27.70   -82.38
+  1056  3580   800    27.50   -80.37
+  1057  3561   812    26.58   -81.87
+  1058  3562   812    26.53   -81.75
+  1059  3552   794    27.97   -82.53
+  1060  3552   801    27.40   -82.55
+  1061  3550   795    27.92   -82.68
+  1062  3559   794    27.99   -82.02
+  1063  3545   773    29.62   -83.10
+  1064  3554   752    31.25   -82.40
+  1065  3566   752    31.25   -81.47
+  1066  3567   753    31.15   -81.37
+  1067  3529   763    30.38   -84.37
+  1068  3555   772    29.68   -82.27
+  1069  3526   725    33.36   -84.57
+  1070  3531   748    31.53   -84.18
+  1071  3543   758    30.78   -83.28
+  1072  3538   733    32.70   -83.65
+  1073  3559   725    33.37   -81.97
+  1074  3528   721    33.65   -84.42
+  1075  3527   720    33.78   -84.52
+  1076  3529   718    33.88   -84.30
+  1077  3521   771    29.73   -84.98
+  1078  3509   761    30.56   -85.92
+  1079  3493   762    30.47   -87.18
+  1080  3479   759    30.68   -88.25
+  1081  3481   760    30.63   -88.07
+  1082  3512   765    30.22   -85.68
+  1083  3523   738    32.33   -84.83
+  1084  3521   736    32.52   -84.93
+  1085  3503   738    32.30   -86.40
+  1086  3515   751    31.32   -85.45
+  1087  3498   722    33.57   -86.75
+  1088  3487   727    33.22   -87.62
+  1089  3510   722    33.58   -85.85
+  1090  3492   731    32.90   -87.25
+  1091  3414   771    29.78   -93.30
+  1092  3498   727    33.17   -86.77
+  1093  3454   779    29.10   -90.20
+  1094  3453   768    29.98   -90.25
+  1095  3432   768    30.03   -91.88
+  1096  3456   767    30.05   -90.03
+  1097  3442   761    30.53   -91.15
+  1098  3464   776    29.33   -89.40
+  1099  3459   764    30.33   -89.82
+  1100  3472   738    32.33   -88.75
+  1101  3465   749    31.47   -89.33
+  1102  3455   738    32.32   -90.08
+  1103  3444   723    33.48   -90.98
+  1104  3450   753    31.18   -90.47
+  1105  3455   723    33.50   -90.08
+  1106  3416   754    31.05   -93.20
+  1107  3415   766    30.12   -93.22
+  1108  3431   765    30.20   -91.98
+  1109  3405   769    29.95   -94.02
+  1110  3395   777    29.30   -94.80
+  1111  3388   768    29.97   -95.35
+  1112  3389   772    29.65   -95.28
+  1113  3375   760    30.58   -96.37
+  1114  3396   752    31.23   -94.75
+  1115  3387   738    32.34   -95.40
+  1116  3397   738    32.34   -94.65
+  1117  3408   736    32.47   -93.82
+  1118  3431   736    32.52   -92.03
+  1119  3427   750    31.40   -92.30
+  1120  3363   732    32.83   -97.30
+  1121  3361   820    25.90   -97.43
+  1122  3359   816    26.23   -97.65
+  1123  3351   817    26.18   -98.23
+  1124  3360   796    27.77   -97.50
+  1125  3354   797    27.73   -98.03
+  1126  3335   799    27.55   -99.47
+  1127  3386   779    29.12   -95.47
+  1128  3348   774    29.53   -98.47
+  1129  3358   764    30.30   -97.70
+  1130  3368   783    28.85   -96.92
+  1131  3364   747    31.62   -97.22
+  1132  3356   754    31.07   -97.83
+  1133  3369   732    32.84   -96.85
+  1134  3387   721    33.63   -95.45
+  1135  3367   731    32.90   -97.03
+  1136  3096   703    35.07  -118.15
+  1137  3362   732    32.82   -97.37
+  1138  3352   739    32.22   -98.18
+  1139  3317   776    29.37  -100.92
+  1140  3267   744    31.83  -104.80
+  1141  3322   750    31.37  -100.50
+  1142  3296   691    36.02  -102.55
+  1143  3277   763    30.37  -104.02
+  1144  3301   743    31.95  -102.18
+  1145  3288   745    31.78  -103.20
+  1146  3333   737    32.41   -99.68
+  1147  3305   721    33.65  -101.82
+  1148  3271   726    33.30  -104.53
+  1149  3274   738    32.33  -104.27
+  1150  3287   734    32.68  -103.22
+  1151  3249   739    32.24  -106.22
+  1152  3240   739    32.28  -106.92
+  1153  3247   745    31.80  -106.40
+  1154  3235   727    33.23  -107.27
+  1155  3230   739    32.27  -107.72
+  1156  3196   748    31.57  -110.33
+  1157  3206   749    31.47  -109.60
+  1158  3189   741    32.12  -110.93
+  1159  3175   724    33.43  -112.02
+  1160  3142   734    32.65  -114.60
+  1161  3103   716    34.05  -117.60
+  1162  3117   719    33.83  -116.50
+  1163  3094   714    34.20  -118.35
+  1164  3092   714    34.22  -118.48
+  1165  3064   701    35.23  -120.63
+  1166  3109   733    32.73  -117.17
+  1167  3111   735    32.57  -116.98
+  1168  3080   726    33.25  -119.45
+  1169  3093   724    33.40  -118.42
+  1170  3107   728    33.13  -117.28
+  1171  3109   731    32.85  -117.12
+  1172  3093   719    33.93  -118.40
+  1173  3096   719    33.82  -118.15
+  1174  3100   721    33.68  -117.87
+  1175  3611   713    34.27   -77.90
+  1176  3597   702    35.17   -79.02
+  1177  3599   704    34.98   -78.87
+  1178  3641   700    35.27   -75.55
+  1179  3625   707    34.78   -76.87
+  1180  3600   693    35.87   -78.78
+  1181  3618   696    35.64   -77.39
+  1182  3610   700    35.33   -77.97
+  1183  3611   693    35.84   -77.90
+  1184  3615   706    34.82   -77.61
+  1185  3686   681    36.82   -72.10
+  1186  3633   680    36.90   -76.19
+  1187  3629   677    37.13   -76.50
+  1188  3624   705    34.90   -76.88
+  1189  3622   703    35.07   -77.05
+  1190  3570   717    33.95   -81.12
+  1191  3588   714    34.18   -79.72
+  1192  3542   717    33.95   -83.32
+  1193  3554   706    34.84   -82.35
+  1194  3556   705    34.90   -82.22
+  1195  3550   710    34.50   -82.72
+  1196  3573   701    35.22   -80.93
+  1197  3567   695    35.73   -81.37
+  1198  3552   698    35.43   -82.55
+  1199  3585   690    36.08   -79.94
+  1200  3579   676    37.21   -80.41
+  1201  3554   685    36.48   -82.40
+  1202  3582   689    36.13   -80.22
+  1203  3518   712    34.35   -85.16
+  1204  3498   708    34.65   -86.77
+  1205  3487   707    34.75   -87.62
+  1206  3518   704    35.03   -85.20
+  1207  3534   693    35.82   -83.98
+  1208  3519   692    35.95   -85.08
+  1209  3499   689    36.13   -86.68
+  1210  3472   713    34.27   -88.77
+  1211  3456   703    35.05   -90.00
+  1212  3470   696    35.59   -88.92
+  1213  3428   706    34.83   -92.25
+  1214  3428   707    34.73   -92.23
+  1215  3448   693    35.83   -90.65
+  1216  3417   711    34.48   -93.10
+  1217  3432   714    34.18   -91.93
+  1218  3406   724    33.45   -93.98
+  1219  3421   727    33.22   -92.80
+  1220  3401   700    35.33   -94.37
+  1221  3403   691    36.00   -94.17
+  1222  3416   688    36.27   -93.15
+  1223  3425   689    36.20   -92.47
+  1224  3435   695    35.73   -91.65
+  1225  3432   680    36.88   -91.90
+  1226  3462   675    37.23   -89.57
+  1227  3405   679    36.91   -94.02
+  1228  3399   676    37.15   -94.50
+  1229  3348   717    33.98   -98.50
+  1230  3341   704    34.98   -99.05
+  1231  3339   700    35.33   -99.20
+  1232  3331   687    36.30   -99.77
+  1233  3359   699    35.40   -97.60
+  1234  3366   682    36.73   -97.10
+  1235  3349   709    34.60   -98.40
+  1236  3367   713    34.30   -97.02
+  1237  3381   689    36.20   -95.90
+  1238  3380   681    36.76   -96.01
+  1239  3383   705    34.88   -95.78
+  1240  3361   701    35.23   -97.47
+  1241  3288   685    36.45  -103.15
+  1242  3325   711    34.43  -100.28
+  1243  3216   697    35.52  -108.78
+  1244  3307   701    35.23  -101.70
+  1245  3243   744    31.87  -106.70
+  1246  3244   703    35.05  -106.62
+  1247  3251   696    35.62  -106.08
+  1248  3223   681    36.75  -108.23
+  1249  3282   702    35.18  -103.60
+  1250  3263   696    35.65  -105.15
+  1251  3150   700    35.27  -113.95
+  1252  3182   679    36.93  -111.45
+  1253  3169   710    34.53  -112.47
+  1254  3170   708    34.65  -112.42
+  1255  3191   704    35.02  -110.73
+  1256  3200   713    34.27  -110.00
+  1257  3208   710    34.51  -109.38
+  1258  3179   702    35.13  -111.67
+  1259  3177   701    35.23  -111.82
+  1260  3173   692    35.95  -112.15
+  1261  3141   707    34.77  -114.62
+  1262  3099   705    34.92  -117.90
+  1263  3114   706    34.84  -116.78
+  1264  3095   707    34.73  -118.22
+  1265  3097   709    34.63  -118.08
+  1266  3085   698    35.43  -119.05
+  1267  3134   690    36.08  -115.17
+  1268  3123   683    36.62  -116.02
+  1269  3076   681    36.77  -119.72
+  1270  3084   715    34.12  -119.12
+  1271  3075   711    34.43  -119.83
+  1272  3083   714    34.21  -119.20
+  1273  3065   707    34.75  -120.57
+  1274  3067   705    34.90  -120.45
+  1275  3064   695    35.66  -120.63
+  1276  3619   672    37.50   -77.33
+  1277  3604   664    38.13   -78.44
+  1278  3605   674    37.35   -78.43
+  1279  3642   666    37.93   -75.48
+  1280  3617   653    38.95   -77.44
+  1281  3617   662    38.27   -77.45
+  1282  3631   662    38.28   -76.40
+  1283  3642   661    38.33   -75.51
+  1284  3623   655    38.84   -77.03
+  1285  3627   650    39.18   -76.67
+  1286  3614   644    39.70   -77.73
+  1287  3630   648    39.33   -76.42
+  1288  3654   647    39.45   -74.57
+  1289  3648   648    39.37   -75.07
+  1290  3645   641    39.88   -75.25
+  1291  3648   639    40.08   -75.01
+  1292  3641   644    39.68   -75.60
+  1293  3651   636    40.28   -74.82
+  1294  3654   640    40.02   -74.60
+  1295  3656   630    40.80   -74.42
+  1296  3595   674    37.33   -79.19
+  1297  3593   684    36.57   -79.33
+  1298  3585   674    37.32   -79.97
+  1299  3587   666    37.95   -79.83
+  1300  3570   668    37.78   -81.12
+  1301  3569   674    37.30   -81.19
+  1302  3579   667    37.87   -80.40
+  1303  3564   661    38.37   -81.60
+  1304  3586   654    38.88   -79.85
+  1305  3582   649    39.30   -80.23
+  1306  3586   644    39.65   -79.92
+  1307  3610   648    39.40   -77.98
+  1308  3525   652    39.05   -84.67
+  1309  3526   665    38.03   -84.60
+  1310  3511   663    38.18   -85.73
+  1311  3542   671    37.59   -83.32
+  1312  3493   669    37.75   -87.16
+  1313  3508   667    37.91   -85.97
+  1314  3532   677    37.08   -84.08
+  1315  3552   661    38.37   -82.55
+  1316  3535   647    39.42   -83.83
+  1317  3496   662    38.25   -86.95
+  1318  3566   648    39.34   -81.43
+  1319  3576   638    40.18   -80.65
+  1320  3548   640    40.00   -82.88
+  1321  3547   642    39.82   -82.93
+  1322  3560   641    39.95   -81.90
+  1323  3531   641    39.90   -84.20
+  1324  3528   652    39.09   -84.42
+  1325  3488   665    38.05   -87.53
+  1326  3470   657    38.65   -88.97
+  1327  3466   668    37.78   -89.25
+  1328  3452   656    38.75   -90.37
+  1329  3448   657    38.66   -90.65
+  1330  3472   677    37.07   -88.77
+  1331  3525   678    37.05   -84.61
+  1332  3491   647    39.45   -87.32
+  1333  3500   651    39.15   -86.62
+  1334  3504   643    39.73   -86.27
+  1335  3496   635    40.41   -86.93
+  1336  3461   642    39.84   -89.67
+  1337  3441   641    39.95   -91.20
+  1338  3470   634    40.48   -88.92
+  1339  3487   638    40.12   -87.60
+  1340  3413   675    37.23   -93.38
+  1341  3428   655    38.82   -92.22
+  1342  3451   668    37.77   -90.43
+  1343  3434   664    38.13   -91.77
+  1344  3424   664    38.10   -92.55
+  1345  3396   649    39.32   -94.72
+  1346  3398   651    39.12   -94.60
+  1347  3394   655    38.83   -94.89
+  1348  3394   643    39.77   -94.92
+  1349  3361   670    37.65   -97.43
+  1350  3356   665    38.07   -97.87
+  1351  3386   670    37.66   -95.48
+  1352  3363   665    38.06   -97.28
+  1353  3329   668    37.77   -99.97
+  1354  3319   666    37.93  -100.72
+  1355  3316   678    37.04  -100.97
+  1356  3343   661    38.34   -98.86
+  1357  3338   655    38.85   -99.27
+  1358  3347   675    37.27   -98.55
+  1359  3371   651    39.13   -96.67
+  1360  3377   661    38.33   -96.19
+  1361  3385   652    39.07   -95.62
+  1362  3384   653    38.95   -95.67
+  1363  3359   646    39.55   -97.65
+  1364  3344   654    38.87   -98.82
+  1365  3359   655    38.80   -97.65
+  1366  3304   678    37.01  -101.88
+  1367  3253   673    37.45  -105.87
+  1368  3229   676    37.15  -107.75
+  1369  3227   666    37.95  -107.90
+  1370  3283   665    38.05  -103.52
+  1371  3294   665    38.07  -102.68
+  1372  3271   662    38.28  -104.52
+  1373  3307   648    39.37  -101.70
+  1374  3331   648    39.38   -99.83
+  1375  3268   655    38.82  -104.72
+  1376  3266   645    39.57  -104.85
+  1377  3240   644    39.65  -106.92
+  1378  3241   650    39.22  -106.87
+  1379  3240   659    38.53  -106.93
+  1380  3267   657    38.70  -104.77
+  1381  3266   643    39.75  -104.87
+  1382  3287   638    40.17  -103.22
+  1383  3263   641    39.91  -105.12
+  1384  3191   645    39.62  -110.75
+  1385  3207   670    37.62  -109.47
+  1386  3191   661    38.37  -110.72
+  1387  3156   678    37.04  -113.50
+  1388  3161   669    37.70  -113.10
+  1389  3173   669    37.70  -112.15
+  1390  3219   651    39.12  -108.53
+  1391  3227   659    38.50  -107.90
+  1392  3218   674    37.30  -108.67
+  1393  3269   634    40.43  -104.63
+  1394  3264   634    40.45  -105.01
+  1395  3198   653    39.00  -110.17
+  1396  3204   656    38.76  -109.75
+  1397  3167   648    39.33  -112.58
+  1398  3177   617    41.78  -111.85
+  1399  3093   674    37.37  -118.37
+  1400  3066   675    37.28  -120.52
+  1401  3053   659    38.52  -121.50
+  1402  3056   658    38.55  -121.30
+  1403  3052   657    38.70  -121.58
+  1404  3110   665    38.05  -117.08
+  1405  3090   658    38.55  -118.63
+  1406  3138   649    39.28  -114.85
+  1407  3143   670    37.62  -114.52
+  1408  3075   646    39.50  -119.78
+  1409  3075   645    39.57  -119.79
+  1410  3049   684    36.58  -121.85
+  1411  3052   683    36.66  -121.60
+  1412  3056   667    37.90  -121.25
+  1413  3049   669    37.70  -121.82
+  1414  3044   669    37.73  -122.22
+  1415  3042   670    37.62  -122.38
+  1416  3048   674    37.37  -121.93
+  1417  3036   659    38.52  -122.82
+  1418  3676   629    40.87   -72.86
+  1419  3659   631    40.70   -74.17
+  1420  3660   629    40.84   -74.07
+  1421  3663   630    40.77   -73.90
+  1422  3673   630    40.80   -73.10
+  1423  3663   619    41.63   -73.87
+  1424  3665   626    41.07   -73.69
+  1425  3660   621    41.50   -74.10
+  1426  3673   625    41.17   -73.12
+  1427  3676   624    41.27   -72.87
+  1428  3686   623    41.33   -72.05
+  1429  3692   625    41.17   -71.58
+  1430  3706   619    41.65   -70.52
+  1431  3712   625    41.25   -70.07
+  1432  3703   615    41.92   -70.73
+  1433  3700   618    41.68   -70.97
+  1434  3709   619    41.67   -70.28
+  1435  3694   618    41.73   -71.43
+  1436  3694   619    41.60   -71.42
+  1437  3678   615    41.93   -72.68
+  1438  3685   618    41.73   -72.18
+  1439  3683   607    42.57   -72.27
+  1440  3679   618    41.73   -72.65
+  1441  3701   607    42.58   -70.92
+  1442  3699   610    42.37   -71.03
+  1443  3689   611    42.27   -71.87
+  1444  3636   635    40.38   -75.97
+  1445  3626   637    40.20   -76.76
+  1446  3632   638    40.12   -76.29
+  1447  3599   625    41.18   -78.90
+  1448  3606   636    40.30   -78.32
+  1449  3599   636    40.32   -78.83
+  1450  3612   629    40.84   -77.85
+  1451  3639   623    41.33   -75.73
+  1452  3624   624    41.25   -76.92
+  1453  3651   618    41.70   -74.80
+  1454  3636   611    42.22   -75.98
+  1455  3630   608    42.48   -76.44
+  1456  3624   612    42.17   -76.90
+  1457  3675   595    43.53   -72.95
+  1458  3643   632    40.65   -75.43
+  1459  3664   605    42.75   -73.80
+  1460  3666   597    43.33   -73.62
+  1461  3642   580    44.68   -75.47
+  1462  3634   600    43.12   -76.12
+  1463  3644   600    43.15   -75.37
+  1464  3582   633    40.50   -80.22
+  1465  3579   630    40.77   -80.40
+  1466  3585   636    40.34   -79.93
+  1467  3592   636    40.28   -79.40
+  1468  3566   628    40.91   -81.43
+  1469  3594   612    42.15   -79.26
+  1470  3561   622    41.42   -81.87
+  1471  3552   629    40.82   -82.52
+  1472  3576   624    41.27   -80.67
+  1473  3582   613    42.08   -80.18
+  1474  3602   617    41.80   -78.62
+  1475  3586   622    41.38   -79.87
+  1476  3601   602    42.93   -78.73
+  1477  3598   600    43.10   -78.94
+  1478  3614   600    43.12   -77.67
+  1479  3483   615    41.98   -87.90
+  1480  3479   615    41.92   -88.25
+  1481  3479   640    40.03   -88.28
+  1482  3471   642    39.83   -88.87
+  1483  3461   631    40.66   -89.68
+  1484  3461   618    41.74   -89.68
+  1485  3518   627    41.00   -85.20
+  1486  3515   637    40.25   -85.40
+  1487  3490   619    41.62   -87.42
+  1488  3485   617    41.78   -87.75
+  1489  3487   616    41.87   -87.60
+  1490  3484   609    42.42   -87.87
+  1491  3504   618    41.70   -86.32
+  1492  3536   619    41.60   -83.80
+  1493  3538   627    41.02   -83.67
+  1494  3542   611    42.23   -83.33
+  1495  3546   609    42.42   -83.02
+  1496  3552   603    42.92   -82.53
+  1497  3483   545    47.45   -87.90
+  1498  3526   604    42.77   -84.60
+  1499  3527   611    42.27   -84.47
+  1500  3517   610    42.30   -85.25
+  1501  3468   612    42.20   -89.10
+  1502  3450   621    41.45   -90.52
+  1503  3435   616    41.88   -91.70
+  1504  3442   630    40.78   -91.13
+  1505  3410   620    41.53   -93.65
+  1506  3425   626    41.10   -92.45
+  1507  3387   630    40.75   -95.41
+  1508  3448   609    42.40   -90.70
+  1509  3434   598    43.28   -91.74
+  1510  3426   607    42.55   -92.40
+  1511  3414   600    43.15   -93.33
+  1512  3403   607    42.55   -94.20
+  1513  3390   607    42.60   -95.23
+  1514  3381   623    41.30   -95.90
+  1515  3370   629    40.84   -96.75
+  1516  3370   636    40.30   -96.75
+  1517  3350   627    40.97   -98.32
+  1518  3342   619    41.62   -98.95
+  1519  3341   631    40.73   -99.00
+  1520  3377   617    41.76   -96.18
+  1521  3385   639    40.08   -95.60
+  1522  3333   621    41.44   -99.64
+  1523  3361   615    41.98   -97.43
+  1524  3363   621    41.45   -97.34
+  1525  3345   608    42.47   -98.69
+  1526  3375   609    42.40   -96.38
+  1527  3375   623    41.32   -96.37
+  1528  3290   626    41.10  -102.98
+  1529  3320   625    41.13  -100.68
+  1530  3308   633    40.51  -101.62
+  1531  3337   634    40.45   -99.33
+  1532  3293   614    42.05  -102.80
+  1533  3289   604    42.83  -103.10
+  1534  3267   625    41.15  -104.82
+  1535  3256   623    41.32  -105.67
+  1536  3269   642    39.87  -104.67
+  1537  3282   616    41.87  -103.60
+  1538  3246   603    42.92  -106.47
+  1539  3207   634    40.43  -109.52
+  1540  3236   634    40.48  -107.22
+  1541  3230   646    39.53  -107.73
+  1542  3175   630    40.78  -111.97
+  1543  3178   637    40.22  -111.72
+  1544  3212   619    41.60  -109.07
+  1545  3236   617    41.80  -107.20
+  1546  3175   625    41.20  -112.02
+  1547  3217   604    42.82  -108.73
+  1548  3220   601    43.07  -108.47
+  1549  3187   624    41.28  -111.03
+  1550  3191   594    43.60  -110.73
+  1551  3167   603    42.92  -112.60
+  1552  3174   595    43.52  -112.07
+  1553  3122   577    44.88  -116.10
+  1554  3091   639    40.07  -118.57
+  1555  3149   631    40.73  -114.03
+  1556  3127   629    40.87  -115.73
+  1557  3127   629    40.83  -115.78
+  1558  3127   619    41.67  -115.78
+  1559  3101   628    40.90  -117.80
+  1560  3065   635    40.38  -120.57
+  1561  3064   649    39.28  -120.70
+  1562  3071   649    39.32  -120.13
+  1563  3072   654    38.90  -120.00
+  1564  3145   595    43.50  -114.30
+  1565  3143   608    42.48  -114.48
+  1566  3152   607    42.55  -113.77
+  1567  3050   612    42.15  -121.73
+  1568  3032   651    39.13  -123.20
+  1569  3044   638    40.15  -122.25
+  1570  3043   633    40.50  -122.30
+  1571  3020   627    40.98  -124.10
+  1572  3018   617    41.78  -124.23
+  1573  3066   621    41.50  -120.53
+  1574  3036   610    42.37  -122.87
+  1575  3693   599    43.20   -71.50
+  1576  3702   600    43.08   -70.82
+  1577  3708   593    43.65   -70.32
+  1578  3703   596    43.40   -70.72
+  1579  3733   583    44.45   -68.37
+  1580  3724   588    44.07   -69.10
+  1581  3751   577    44.92   -67.00
+  1582  3734   547    47.28   -68.32
+  1583  3727   578    44.80   -68.83
+  1584  3680   597    43.35   -72.52
+  1585  3683   593    43.63   -72.30
+  1586  3693   584    44.36   -71.55
+  1587  3687   583    44.42   -72.02
+  1588  3680   586    44.20   -72.57
+  1589  3694   594    43.57   -71.42
+  1590  3697   581    44.58   -71.18
+  1591  3683   603    42.90   -72.27
+  1592  3672   583    44.47   -73.15
+  1593  3706   582    44.53   -70.53
+  1594  3709   588    44.05   -70.28
+  1595  3715   585    44.32   -69.80
+  1596  3718   570    45.47   -69.58
+  1597  3729   568    45.65   -68.68
+  1598  3668   580    44.65   -73.47
+  1599  3650   577    44.93   -74.85
+  1600  3636   589    44.00   -76.01
+  1601  3659   584    44.38   -74.19
+  1602  3540   605    42.70   -83.47
+  1603  3524   577    44.90   -84.72
+  1604  3514   603    42.88   -85.52
+  1605  3502   612    42.14   -86.44
+  1606  3513   611    42.23   -85.55
+  1607  3504   599    43.17   -86.25
+  1608  3536   602    42.97   -83.75
+  1609  3541   606    42.67   -83.42
+  1610  3532   595    43.53   -84.08
+  1611  3525   584    44.36   -84.67
+  1612  3515   585    44.28   -85.42
+  1613  3504   585    44.28   -86.25
+  1614  3513   579    44.73   -85.58
+  1615  3539   575    45.07   -83.57
+  1616  3541   583    44.45   -83.40
+  1617  3483   602    42.95   -87.90
+  1618  3465   600    43.13   -89.33
+  1619  3469   606    42.62   -89.04
+  1620  3454   599    43.21   -90.18
+  1621  3440   590    43.87   -91.25
+  1622  3438   578    44.87   -91.48
+  1623  3424   590    43.92   -92.50
+  1624  3480   583    44.48   -88.13
+  1625  3486   587    44.13   -87.68
+  1626  3475   589    43.98   -88.55
+  1627  3461   577    44.93   -89.63
+  1628  3461   579    44.78   -89.67
+  1629  3494   567    45.73   -87.08
+  1630  3487   574    45.12   -87.63
+  1631  3411   578    44.85   -93.57
+  1632  3370   594    43.58   -96.73
+  1633  3369   585    44.31   -96.82
+  1634  3362   603    42.92   -97.38
+  1635  3337   591    43.80   -99.32
+  1636  3351   584    44.38   -98.22
+  1637  3354   592    43.77   -98.03
+  1638  3365   577    44.92   -97.15
+  1639  3404   569    45.55   -94.07
+  1640  3404   558    46.40   -94.13
+  1641  3391   582    44.55   -95.08
+  1642  3387   565    45.87   -95.40
+  1643  3382   583    44.45   -95.82
+  1644  3399   585    44.32   -94.50
+  1645  3401   564    45.95   -94.35
+  1646  3415   577    44.88   -93.22
+  1647  3417   577    44.95   -93.07
+  1648  3406   586    44.22   -93.91
+  1649  3400   593    43.65   -94.42
+  1650  3413   593    43.68   -93.37
+  1651  3349   570    45.45   -98.43
+  1652  3289   588    44.06  -103.05
+  1653  3258   584    44.35  -105.53
+  1654  3239   579    44.77  -106.97
+  1655  3195   582    44.54  -110.42
+  1656  3227   589    43.97  -107.95
+  1657  3259   570    45.45  -105.40
+  1658  3267   549    47.13  -104.80
+  1659  3323   569    45.55  -100.41
+  1660  3325   584    44.38  -100.28
+  1661  3301   564    45.93  -102.17
+  1662  3213   582    44.52  -109.02
+  1663  3199   607    42.58  -110.11
+  1664  3186   580    44.68  -111.12
+  1665  3219   566    45.80  -108.53
+  1666  3207   550    47.05  -109.47
+  1667  3168   564    45.95  -112.50
+  1668  3168   573    45.25  -112.55
+  1669  3186   566    45.78  -111.15
+  1670  3195   567    45.70  -110.45
+  1671  3121   594    43.57  -116.22
+  1672  3086   594    43.58  -118.95
+  1673  3058   585    44.25  -121.15
+  1674  3151   574    45.12  -113.88
+  1675  3122   564    45.95  -116.13
+  1676  3087   567    45.68  -118.85
+  1677  3100   578    44.83  -117.82
+  1678  3030   599    43.23  -123.35
+  1679  3018   596    43.42  -124.25
+  1680  3031   587    44.12  -123.22
+  1681  3034   577    44.92  -123.00
+  1682  3021   581    44.58  -124.06
+  1683  3039   568    45.60  -122.60
+  1684  3042   569    45.55  -122.40
+  1685  3035   569    45.53  -122.95
+  1686  3058   568    45.62  -121.17
+  1687  3741   562    46.12   -67.80
+  1688  3719   555    46.62   -69.53
+  1689  3738   552    46.87   -68.01
+  1690  3737   554    46.68   -68.05
+  1691  3529   557    46.47   -84.37
+  1692  3527   560    46.25   -84.47
+  1693  3523   569    45.57   -84.80
+  1694  3463   568    45.63   -89.47
+  1695  3488   556    46.53   -87.55
+  1696  3490   559    46.35   -87.40
+  1697  3481   565    45.82   -88.12
+  1698  3476   548    47.17   -88.50
+  1699  3455   556    46.53   -90.13
+  1700  3429   552    46.83   -92.18
+  1701  3420   545    47.38   -92.83
+  1702  3381   552    46.83   -95.89
+  1703  3433   540    47.82   -91.83
+  1704  3413   530    48.57   -93.38
+  1705  3397   528    48.73   -94.62
+  1706  3369   552    46.90   -96.80
+  1707  3345   551    46.93   -98.68
+  1708  3393   544    47.50   -94.93
+  1709  3388   526    48.93   -95.33
+  1710  3365   538    47.95   -97.18
+  1711  3343   536    48.10   -98.87
+  1712  3319   553    46.77  -100.75
+  1713  3293   553    46.80  -102.80
+  1714  3282   535    48.18  -103.63
+  1715  3312   534    48.27  -101.28
+  1716  3310   542    47.65  -101.43
+  1717  3244   535    48.22  -106.62
+  1718  3240   546    47.33  -106.93
+  1719  3257   536    48.10  -105.58
+  1720  3275   541    47.70  -104.20
+  1721  3175   555    46.60  -112.00
+  1722  3148   551    46.92  -114.08
+  1723  3183   544    47.48  -111.37
+  1724  3203   530    48.55  -109.77
+  1725  3146   534    48.30  -114.27
+  1726  3170   530    48.60  -112.37
+  1727  3066   556    46.57  -120.53
+  1728  3055   547    47.28  -121.33
+  1729  3072   545    47.40  -120.02
+  1730  3070   545    47.40  -120.20
+  1731  3079   546    47.30  -119.52
+  1732  3081   548    47.20  -119.32
+  1733  3111   558    46.38  -117.02
+  1734  3113   540    47.77  -116.82
+  1735  3082   559    46.32  -119.27
+  1736  3084   560    46.27  -119.12
+  1737  3095   562    46.10  -118.28
+  1738  3104   542    47.63  -117.53
+  1739  3107   542    47.68  -117.32
+  1740  3109   553    46.75  -117.12
+  1741  3103   541    47.70  -117.60
+  1742  3100   530    48.55  -117.88
+  1743  3066   550    47.03  -120.53
+  1744  3028   536    48.12  -123.50
+  1745  3079   532    48.42  -119.53
+  1746  3023   561    46.15  -123.88
+  1747  3035   551    46.97  -122.90
+  1748  3022   551    46.97  -123.93
+  1749  3035   562    46.12  -122.94
+  1750  3043   545    47.45  -122.30
+  1751  3044   544    47.50  -122.22
+  1752  3043   543    47.53  -122.30
+  1753  3043   539    47.90  -122.28
+  1754  3039   547    47.27  -122.58
+  1755  3567   798    27.65   -81.33
+  1756  3014   538    47.95  -124.55
+  1757  3040   527    48.80  -122.53
+  1758  3163   638    40.17  -112.93
+  1759  3253   558    46.43  -105.87
+  1760  3387   580    44.67   -95.45
+  1761  3450   599    43.22   -90.53
+  1762  3639   588    44.05   -75.73
+  1763  3709   590    43.90   -70.25
+  1764  3694   602    42.93   -71.43
+  1765  3249   616    41.90  -106.19
+  1766  3320   639    40.09  -100.65
+  1767  3307   603    42.91  -101.69
+  1768  3363   639    40.10   -97.34
+  1769  3357   612    42.21   -97.79
+  1770  3409   616    41.90   -93.70
+  1771  3449   619    41.61   -90.57
+  1772  3533   642    39.82   -84.03
+  1773  3476   617    41.77   -88.48
+  1774  3494   630    40.81   -87.05
+  1775  3515   655    38.83   -85.42
+  1776  3664   632    40.65   -73.78
+  1777  3696   608    42.47   -71.28
+  1778  3698   605    42.72   -71.12
+  1779  3680   612    42.20   -72.53
+  1780  3678   612    42.15   -72.72
+  1781  3714   619    41.67   -69.97
+  1782  3662   603    42.85   -73.93
+  1783  3301   668    37.77  -102.18
+  1784  3267   653    38.97  -104.82
+  1785  3281   649    39.26  -103.70
+  1786  3268   638    40.18  -104.72
+  1787  3340   670    37.65   -99.09
+  1788  3384   673    37.38   -95.63
+  1789  3363   662    38.31   -97.30
+  1790  3422   672    37.52   -92.70
+  1791  3403   645    39.58   -94.19
+  1792  3450   644    39.66   -90.48
+  1793  3465   638    40.15   -89.33
+  1794  3650   652    39.02   -74.92
+  1795  3102   695    35.68  -117.68
+  1796  3247   737    32.41  -106.35
+  1797  3227   680    36.84  -107.91
+  1798  3338   690    36.07   -99.22
+  1799  3335   686    36.43   -99.53
+  1800  3361   682    36.69   -97.48
+  1801  3381   695    35.68   -95.86
+  1802  3360   704    34.98   -97.52
+  1803  3457   680    36.88   -89.97
+  1804  3502   679    36.97   -86.42
+  1805  3633   688    36.27   -76.18
+  1806  3129   732    32.83  -115.58
+  1807  3122   721    33.63  -116.17
+  1808  3140   722    33.62  -114.72
+  1809  3282   703    35.08  -103.61
+  1810  3250   728    33.08  -106.12
+  1811  3247   731    32.90  -106.40
+  1812  3316   729    33.02  -100.98
+  1813  3331   762    30.50   -99.77
+  1814  3383   745    31.78   -95.71
+  1815  3402   715    34.11   -94.29
+  1816  3421   744    31.90   -92.78
+  1817  3468   763    30.40   -89.07
+  1818  3471   716    34.09   -88.86
+  1819  3564   744    31.90   -81.63
+  1820  3546   757    30.89   -83.01
+  1821  3578   717    33.97   -80.47
+  1822  3598   721    33.68   -78.93
+  1823  3601   719    33.82   -78.72
+  1824  3577   788    28.47   -80.55
+  1825  3111   735    32.55  -116.97
+  1826  3133   734    32.63  -115.24
+  1827  3246   747    31.63  -106.43
+  1828  3094   782    28.88  -118.30
+  1829  3188   780    29.07  -110.97
+  1830  3252   785    28.70  -105.97
+  1831  3171   802    27.32  -112.30
+  1832  3189   794    27.97  -110.93
+  1833  3190   794    27.95  -110.80
+  1834  3334   801    27.43   -99.57
+  1835  3351   819    26.02   -98.23
+  1836  3284   825    25.53  -103.45
+  1837  3326   821    25.87  -100.20
+  1838  3327   822    25.78  -100.10
+  1839  3360   822    25.77   -97.53
+  1840  3195   843    24.17  -110.42
+  1841  3196   844    24.07  -110.37
+  1842  3204   856    23.15  -109.70
+  1843  3234   834    24.82  -107.40
+  1844  3271   843    24.13  -104.53
+  1845  3246   855    23.20  -106.42
+  1846  3248   855    23.17  -106.27
+  1847  3340   848    23.73   -99.13
+  1848  3342   848    23.72   -98.97
+  1849  3294   859    22.90  -102.68
+  1850  3316   868    22.15  -100.98
+  1851  3356   867    22.28   -97.87
+  1852  3299   872    21.88  -102.30
+  1853  3497   883    21.03   -86.87
+  1854  3261   887    20.68  -105.25
+  1855  3286   889    20.52  -103.32
+  1856  3461   883    20.98   -89.65
+  1857  3496   889    20.53   -86.93
+  1858  3270   907    19.15  -104.57
+  1859  3315   898    19.85  -101.03
+  1860  3334   904    19.35   -99.57
+  1861  3340   903    19.43   -99.10
+  1862  3377   907    19.15   -96.18
+  1863  3398   920    18.10   -94.58
+  1864  3433   913    18.65   -91.80
+  1865  3310   927    17.60  -101.47
+  1866  3330   937    16.83   -99.92
+  1867  3332   937    16.77   -99.75
+  1868  3376   950    15.78   -96.27
+  1869  3426   963    14.78   -92.38
+  1870  3781   738    32.37   -64.68
+  1871  3598   810    26.70   -78.97
+  1872  3601   812    26.55   -78.69
+  1873  3593   823    25.73   -79.30
+  1874  3617   831    25.05   -77.47
+  1875  3639   851    23.50   -75.76
+  1876  3549   873    21.83   -82.78
+  1877  3554   858    22.98   -82.40
+  1878  3568   856    23.13   -81.28
+  1879  3612   878    21.42   -77.85
+  1880  3621   892    20.33   -77.12
+  1881  3628   891    20.40   -76.62
+  1882  3638   896    19.96   -75.85
+  1883  3647   895    20.08   -75.15
+  1884  3655   891    20.35   -74.50
+  1885  3650   888    20.65   -74.92
+  1886  3565   875    21.62   -81.55
+  1887  3600   873    21.78   -78.78
+  1888  3624   884    20.95   -76.94
+  1889  3647   897    19.90   -75.12
+  1890  3567   905    19.28   -81.35
+  1891  3611   915    18.50   -77.92
+  1892  3626   922    17.93   -76.78
+  1893  3685   899    19.75   -72.18
+  1894  3683   914    18.57   -72.30
+  1895  3705   899    19.75   -70.55
+  1896  3704   903    19.46   -70.69
+  1897  3733   914    18.57   -68.37
+  1898  3717   916    18.43   -69.67
+  1899  3714   916    18.47   -69.88
+  1900  3749   915    18.50   -67.12
+  1901  3749   918    18.27   -67.15
+  1902  3756   921    18.02   -66.57
+  1903  3764   916    18.43   -66.00
+  1904  3777   917    18.33   -64.97
+  1905  3779   925    17.70   -64.80
+  1906  3783   916    18.45   -64.53
+  1907  3478   928    17.53   -88.30
+  1908  3458   935    16.92   -89.88
+  1909  3438   956    15.32   -91.47
+  1910  3474   951    15.72   -88.60
+  1911  3450   965    14.58   -90.52
+  1912  3446   974    13.92   -90.82
+  1913  3459   978    13.57   -89.83
+  1914  3468   977    13.70   -89.12
+  1915  3469   980    13.43   -89.05
+  1916  3486   982    13.28   -87.67
+  1917  3509   941    16.46   -85.92
+  1918  3501   943    16.32   -86.53
+  1919  3497   951    15.73   -86.87
+  1920  3489   951    15.72   -87.48
+  1921  3493   958    15.17   -87.12
+  1922  3483   954    15.45   -87.93
+  1923  3536   957    15.22   -83.80
+  1924  3509   961    14.90   -85.93
+  1925  3472   963    14.78   -88.78
+  1926  3480   969    14.33   -88.17
+  1927  3492   972    14.05   -87.22
+  1928  3493   982    13.30   -87.18
+  1929  3541   972    14.05   -83.37
+  1930  3506   996    12.15   -86.17
+  1931  3523  1024     9.97   -84.78
+  1932  3530  1024    10.00   -84.22
+  1933  3531  1025     9.95   -84.15
+  1934  3545  1024    10.00   -83.05
+  1935  3513  1016    10.60   -85.55
+  1936  3552  1031     9.43   -82.52
+  1937  3593  1036     9.05   -79.37
+  1938  3554  1045     8.39   -82.42
+  1939  3556  1032     9.35   -82.25
+  1940  3572  1049     8.08   -80.94
+  1941  3591  1037     8.97   -79.51
+  1942  3801   919    18.20   -63.05
+  1943  3813   938    16.75   -62.17
+  1944  3806   931    17.29   -62.68
+  1945  3807   932    17.20   -62.58
+  1946  3818   933    17.12   -61.78
+  1947  3801   921    18.04   -63.12
+  1948  3802   928    17.48   -62.98
+  1949  3804   923    17.90   -62.85
+  1950  3821   944    16.27   -61.52
+  1951  3824   953    15.53   -61.30
+  1952  3823   953    15.53   -61.40
+  1953  3823   956    15.30   -61.40
+  1954  3847   985    13.07   -59.48
+  1955  3712   992    12.50   -70.01
+  1956  3726   996    12.20   -68.97
+  1957  3735   996    12.15   -68.28
+  1958  3562   991    12.58   -81.72
+  1959  3658  1010    11.13   -74.23
+  1960  3642  1018    10.45   -75.52
+  1961  3651  1012    10.90   -74.77
+  1962  3672  1061     7.10   -73.20
+  1963  3641  1072     6.22   -75.60
+  1964  3643  1073     6.18   -75.43
+  1965  3638  1090     4.82   -75.80
+  1966  3660  1092     4.70   -74.13
+  1967  3690  1017    10.57   -71.73
+  1968  3751  1016    10.60   -66.98
+  1969  3832  1188    -2.83   -60.70
+  1970  3988  1170    -1.43   -48.48
+  1971  3841  1192    -3.15   -59.98
+  1972  4115  1200    -3.78   -38.53
+  1973  3979  1223    -5.53   -49.15
+  1974  4060  1217    -5.05   -42.82
+  1975  4157  1228    -5.92   -35.25
+  1976  4095  1253    -7.88   -40.08
+  1977  3791  1263    -8.70   -63.90
+  1978  3978  1258    -8.27   -49.28
+  1979  4041  1268    -9.07   -44.37
+  1980  3989  1289   -10.70   -48.40
+  1981  4115  1318   -13.00   -38.52
+  1982  3890  1352   -15.65   -56.10
+  1983  4005  1446   -23.00   -47.13
+  1984  3980  1438   -22.32   -49.07
+  1985  4056  1445   -22.90   -43.17
+  1986  4011  1454   -23.62   -46.65
+  1987  3953  1537   -30.08   -51.18
+  1988  3461  1164    -0.90   -89.62
+  1989  3606  1154    -0.12   -78.35
+  1990  3586  1180    -2.15   -79.88
+  1991  3596  1255    -8.08   -79.12
+  1992  3623  1306   -12.02   -77.03
+  1993  3779  1341   -14.75   -64.80
+  1994  3736  1363   -16.50   -68.17
+  1995  3703  1579   -33.38   -70.78
+  1996  3697  1600   -34.97   -71.22
+  1997  3874  1474   -25.16   -57.38
+  1998  3853  1503   -27.45   -59.05
+  1999  3831  1573   -32.92   -60.78
+  2000  3859  1598   -34.82   -58.53
+  2001  3745  1738   -45.78   -67.45
+  2002  2569   871    21.98  -159.35
+  2003  2585   879    21.32  -158.07
+  2004  2587   879    21.35  -157.93
+  2005  2598   881    21.15  -157.10
+  2006  2606   884    20.90  -156.43
+  2007  1854   981    13.35   144.80
+  2008  1866   958    15.12   145.73
+  2009  2134   905    19.28   166.65
+  2010  2624   900    19.72  -155.07
+  2011  1944  1056     7.47   151.85
+  2012  2026  1063     6.97   158.22
+  2013  2087  1084     5.33   163.03
+  2014  2147  1040     8.73   167.73
+  2015  2194  1061     7.08   171.38
+  2016  1722  1058     7.33   134.48
+  2017  1768  1031     9.48   138.08
+  2018  1859   970    14.20   145.20
+  2019  1864   960    15.00   145.60
+  2020  1865   911    18.80   145.70
+  2021  1725  1048     8.10   134.70
+  2022  1693  1084     5.30   132.20
+  2023  1760  1046     8.30   137.50
+  2024  1790  1024    10.00   139.80
+  2025  1799  1027     9.80   140.50
+  2026  1727  1057     7.40   134.90
+  2027  1851  1042     8.60   144.60
+  2028  1883  1057     7.40   147.10
+  2029  1910  1057     7.40   149.20
+  2030  1917  1042     8.60   149.70
+  2031  1945  1042     8.60   151.90
+  2032  1955  1064     6.90   152.70
+  2033  1969  1082     5.50   153.80
+  2034  1984  1103     3.80   155.00
+  2035  2014  1078     5.80   157.30
+  2036  2022  1062     7.00   157.90
+  2037  2046  1065     6.80   159.80
+  2038  2057  1073     6.20   160.70
+  2039  2080  1001    11.80   162.50
+  2040  2121  1038     8.90   165.70
+  2041  2161  1059     7.30   168.80
+  2042  2171  1076     5.90   169.60
+  2043  2174  1009    11.20   169.80
+  2044  2179  1030     9.50   170.20
+  2045  2200  1074     6.10   171.80
+  2046  2804  1383   -18.07  -140.95
+  2047  2238  1626   -37.02   174.80
+  2048  1676  1311   -12.42   130.87
+  2049  1955  1506   -27.63   152.72
+  2050  1485  1561   -31.92   115.97
+  2051  1936  1587   -33.95   151.18
+  2052  1854  1634   -37.67   144.83
+  2053  1907  1605   -35.40   148.98
+  2054  1888  1700   -42.83   147.50
+  2055  1368  1231    -6.15   106.85
+  2056  3494   750    31.42   -87.05
+  2057  1544   958    15.18   120.57
+  2058  1549   966    14.52   121.00
+  2059  1563  1064     6.90   122.07
+  2060  3615   723    33.49   -77.59
+  2061  3143   776    29.37  -114.47
+  2062  3156   636    40.33  -113.50
+  2063  3591   702    35.17   -79.50
+  2064  3307   527    48.83  -101.67
+  2065  4600   586    44.22    -0.67
+  2066   276  1510   -28.00    21.50
+  2067  3738  1671   -40.50   -68.00
+  2068  3847  1502   -27.33   -59.50
+  2069  1728  1513   -28.23   134.98
+  2070  4262   657    38.70   -27.10
+  2071  2691   368    61.20  -149.80
+  2072  4424  1253    -7.90   -14.40
+  2073  1784   695    35.70   139.30
+  2074  3911   526    48.90   -54.50
+  2075    63   595    43.50     4.90
+  2076  4495   477    52.70    -8.90
+  2077  1694  1338   -14.50   132.30
+  2078  4507   664    38.10    -7.90
+  2079  1544   842    24.20   120.60
+  2080  1293   989    12.70   101.00
+  2081   533   892    20.30    41.60
+  2082  2424  1335   -14.30  -170.70
+  2083  3721  1813   -51.60   -69.30
+  2084  3208  1500   -27.20  -109.40
+  2085  1133   863    22.60    88.50
+  2086  4162  1256    -8.10   -34.90
+  2087  4013  1452   -23.40   -46.50
+  2088  1023  1060     7.20    79.90
+  2089  3751  1016    10.60   -67.00
+  2090  3938  1091     4.80   -52.40
+  2091   932   907    19.10    72.80
+  2092   608  1393   -18.80    47.50
+  2093  3189   740    32.20  -110.90
+  2094  3289   587    44.10  -103.10
+  2095  2727   325    64.60  -147.00
+  2096  3065   706    34.80  -120.60
+  2097  3097   709    34.60  -118.10
+  2098  3156   637    40.19  -113.47
+  2099   887   705    34.95    69.27
+  2100  4592   476    52.83    -1.32
+  2101    93  1037     9.01     7.26
+  2102  3118   850    23.61  -116.48
+  2103   814   563    46.00    63.56
+  2104   867   542    47.67    67.73
+  2105  2611   899    19.73  -156.05
+  2106  2875   394    59.23  -135.43
+  2107  2736   369    61.13  -146.25
+  2108  2600   886    20.78  -156.95
+  2109  2604   883    21.02  -156.63
+  2110  3757   923    17.85   -66.52
+  2111  3749   919    18.17   -67.15
+  2112  3417   701    35.25   -93.09
+  2113  1641  1351   -15.51   128.15
+  2114  4514   416    57.48    -7.36
+  2115   130   456    54.38    10.13
diff --git a/parm/product/gefs.0p25.f000.paramlist.a.txt b/parm/product/gefs.0p25.f000.paramlist.a.txt
new file mode 100644
index 0000000000..4fdb8f9713
--- /dev/null
+++ b/parm/product/gefs.0p25.f000.paramlist.a.txt
@@ -0,0 +1,39 @@
+:HGT:surface:
+:PRMSL:mean sea level:
+:PRES:surface:
+:TMP:2 m above ground:
+:TMAX:2 m above ground:
+:TMIN:2 m above ground:
+:RH:2 m above ground:
+:DPT:2 m above ground:
+:UGRD:10 m above ground:
+:VGRD:10 m above ground:
+:APCP:surface:
+:CRAIN:surface:
+:CSNOW:surface:
+:CFRZR:surface:
+:CICEP:surface:
+:PWAT:entire atmosphere (considered as a single layer):
+:CAPE:180-0 mb above ground:
+:CAPE:surface:
+:CIN:180-0 mb above ground:
+:CIN:surface:
+:CPOFP:surface:
+:HLCY:3000-0 m above ground:
+:TCDC:entire atmosphere:
+:WEASD:surface:
+:SNOD:surface:
+:ULWRF:top of atmosphere:
+:DSWRF:surface:
+:DLWRF:surface:
+:USWRF:surface:
+:ULWRF:surface:
+:GUST:surface:
+:SHTFL:surface:
+:LHTFL:surface:
+:ICETK:surface:
+:TSOIL:0-0.1
+:SOILW:0-0.1
+:MSLET:mean sea level:
+:VIS:surface:
+:HGT:cloud ceiling:
diff --git a/parm/product/gefs.0p25.f000.paramlist.b.txt b/parm/product/gefs.0p25.f000.paramlist.b.txt
new file mode 100644
index 0000000000..b94b4ab8a3
--- /dev/null
+++ b/parm/product/gefs.0p25.f000.paramlist.b.txt
@@ -0,0 +1,522 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:BRTMP:top of atmosphere:
+:CAPE:255-0 mb above ground:
+:CIN:255-0 mb above ground:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:DPT:30-0 mb above ground:
+:FLDCP:surface:
+:FRICV:surface:
+:HGT:0C isotherm:
+:HGT:1000 mb:
+:HGT:100 mb:
+:HGT:10 mb:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:200 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:500 mb:
+:HGT:50 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:700 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:850 mb:
+:HGT:900 mb:
+:HGT:925 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:MNTSF:320 K isentropic level:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRES:80 m above ground:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:1000 mb:
+:RH:100 mb:
+:RH:10 mb:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:200 mb:
+:RH:20 mb:
+:RH:250 mb:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:500 mb:
+:RH:50 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:700 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:850 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:925 mb:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SNOHF:surface:
+:SNOWC:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TMP:0.995 sigma level:
+:TMP:1000 mb:
+:TMP:100 m above ground:
+:TMP:100 mb:
+:TMP:10 mb:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:200 mb:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:250 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 mb:
+:TMP:450 K isentropic level:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:500 mb:
+:TMP:50 mb:
+:TMP:5 mb:
+:TMP:550 mb:
+:TMP:550 K isentropic level:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 mb:
+:TMP:650 K isentropic level:
+:TMP:700 mb:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:850 mb:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:925 mb:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:max wind:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UGRD:0.995 sigma level:
+:UGRD:1000 mb:
+:UGRD:100 m above ground:
+:UGRD:100 mb:
+:UGRD:10 mb:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:200 mb:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:250 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:300 mb:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:400 mb:
+:UGRD:450 mb:
+:UGRD:450 K isentropic level:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:500 mb:
+:UGRD:50 mb:
+:UGRD:5 mb:
+:UGRD:550 mb:
+:UGRD:550 K isentropic level:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 mb:
+:UGRD:650 K isentropic level:
+:UGRD:700 mb:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:850 mb:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:925 mb:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:USTM:6000-0 m above ground:
+:APTMP:2 m above ground:
+:VGRD:0.995 sigma level:
+:VGRD:1000 mb:
+:VGRD:100 m above ground:
+:VGRD:100 mb:
+:VGRD:10 mb:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:200 mb:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:250 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:300 mb:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:400 mb:
+:VGRD:450 mb:
+:VGRD:450 K isentropic level:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:500 mb:
+:VGRD:50 mb:
+:VGRD:5 mb:
+:VGRD:550 mb:
+:VGRD:550 K isentropic level:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 mb:
+:VGRD:650 K isentropic level:
+:VGRD:700 mb:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:850 mb:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:925 mb:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:850 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WILT:surface:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.0p25.fFFF.paramlist.a.txt b/parm/product/gefs.0p25.fFFF.paramlist.a.txt
new file mode 100644
index 0000000000..a4a3ace385
--- /dev/null
+++ b/parm/product/gefs.0p25.fFFF.paramlist.a.txt
@@ -0,0 +1,38 @@
+:PRMSL:mean sea level:
+:PRES:surface:
+:TMP:2 m above ground:
+:TMAX:2 m above ground:
+:TMIN:2 m above ground:
+:RH:2 m above ground:
+:DPT:2 m above ground:
+:UGRD:10 m above ground:
+:VGRD:10 m above ground:
+:APCP:surface:
+:CRAIN:surface:
+:CSNOW:surface:
+:CFRZR:surface:
+:CICEP:surface:
+:PWAT:entire atmosphere (considered as a single layer):
+:CAPE:180-0 mb above ground:
+:CAPE:surface:
+:CIN:180-0 mb above ground:
+:CIN:surface:
+:HLCY:3000-0 m above ground:
+:TCDC:entire atmosphere:
+:WEASD:surface:
+:SNOD:surface:
+:ULWRF:top of atmosphere:
+:DSWRF:surface:
+:CPOFP:surface:
+:DLWRF:surface:
+:USWRF:surface:
+:ULWRF:surface:
+:GUST:surface:
+:SHTFL:surface:
+:LHTFL:surface:
+:ICETK:surface:
+:TSOIL:0-0.1
+:SOILW:0-0.1
+:MSLET:mean sea level:
+:VIS:surface:
+:HGT:cloud ceiling:
diff --git a/parm/product/gefs.0p25.fFFF.paramlist.b.txt b/parm/product/gefs.0p25.fFFF.paramlist.b.txt
new file mode 100644
index 0000000000..f7fdb73ddf
--- /dev/null
+++ b/parm/product/gefs.0p25.fFFF.paramlist.b.txt
@@ -0,0 +1,554 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:ACPCP:surface:
+:ALBDO:surface:
+:BRTMP:top of atmosphere:
+:CAPE:255-0 mb above ground:
+:CDUVB:surface:
+:CIN:255-0 mb above ground:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CPRAT:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:CWORK:entire atmosphere (considered as a single layer):
+:DPT:30-0 mb above ground:
+:DUVB:surface:
+:FLDCP:surface:
+:FRICV:surface:
+:GFLUX:surface:
+:HGT:0C isotherm:
+:HGT:1000 mb:
+:HGT:100 mb:
+:HGT:10 mb:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:200 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:500 mb:
+:HGT:50 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:700 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:850 mb:
+:HGT:900 mb:
+:HGT:925 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:MNTSF:320 K isentropic level:
+:NCPCP:surface:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRATE:surface:
+:PRES:80 m above ground:
+:PRES:convective cloud bottom level:
+:PRES:convective cloud top level:
+:PRES:high cloud bottom level:
+:PRES:high cloud top level:
+:PRES:low cloud bottom level:
+:PRES:low cloud top level:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:middle cloud bottom level:
+:PRES:middle cloud top level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:1000 mb:
+:RH:100 mb:
+:RH:10 mb:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:200 mb:
+:RH:20 mb:
+:RH:250 mb:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:500 mb:
+:RH:50 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:700 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:850 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:925 mb:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SNOWC:surface:
+:SNOHF:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TCDC:boundary layer cloud layer:
+:TCDC:convective cloud layer:
+:TCDC:high cloud layer:
+:TCDC:low cloud layer:
+:TCDC:middle cloud layer:
+:TMP:0.995 sigma level:
+:TMP:1000 mb:
+:TMP:100 m above ground:
+:TMP:100 mb:
+:TMP:10 mb:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:200 mb:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:250 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 K isentropic level:
+:TMP:450 mb:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:500 mb:
+:TMP:50 mb:
+:TMP:5 mb:
+:TMP:550 K isentropic level:
+:TMP:550 mb:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 K isentropic level:
+:TMP:650 mb:
+:TMP:700 mb:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:850 mb:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:925 mb:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:high cloud top level:
+:TMP:low cloud top level:
+:TMP:max wind:
+:TMP:middle cloud top level:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UFLX:surface:
+:UGRD:0.995 sigma level:
+:UGRD:1000 mb:
+:UGRD:100 m above ground:
+:UGRD:100 mb:
+:UGRD:10 mb:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:200 mb:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:250 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:300 mb:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:400 mb:
+:UGRD:450 K isentropic level:
+:UGRD:450 mb:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:500 mb:
+:UGRD:50 mb:
+:UGRD:5 mb:
+:UGRD:550 K isentropic level:
+:UGRD:550 mb:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 K isentropic level:
+:UGRD:650 mb:
+:UGRD:700 mb:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:850 mb:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:925 mb:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:U-GWD:surface:
+:USTM:6000-0 m above ground:
+:USWRF:top of atmosphere:
+:APTMP:2 m above ground
+:VFLX:surface:
+:VGRD:0.995 sigma level:
+:VGRD:1000 mb:
+:VGRD:100 m above ground:
+:VGRD:100 mb:
+:VGRD:10 mb:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:200 mb:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:250 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:300 mb:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:400 mb:
+:VGRD:450 K isentropic level:
+:VGRD:450 mb:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:500 mb:
+:VGRD:50 mb:
+:VGRD:5 mb:
+:VGRD:550 K isentropic level:
+:VGRD:550 mb:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 K isentropic level:
+:VGRD:650 mb:
+:VGRD:700 mb:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:850 mb:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:925 mb:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:V-GWD:surface:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:850 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WATR:surface:
+:WILT:surface:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.0p50.f000.paramlist.a.txt b/parm/product/gefs.0p50.f000.paramlist.a.txt
new file mode 100644
index 0000000000..ab8e73f552
--- /dev/null
+++ b/parm/product/gefs.0p50.f000.paramlist.a.txt
@@ -0,0 +1,80 @@
+############################# sorted pgrb2a 201408
+:CAPE:180-0 mb above ground:
+:CIN:180-0 mb above ground:
+:DLWRF:surface:
+:DSWRF:surface:
+:HGT:10 mb:
+:HGT:100 mb:
+:HGT:1000 mb:
+:HGT:200 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:50 mb:
+:HGT:500 mb:
+:HGT:700 mb:
+:HGT:850 mb:
+:HGT:925 mb:
+:HGT:surface:
+:ICETK:surface:
+:LHTFL:surface:
+:PRES:surface:
+:PRMSL:mean sea level:
+:PWAT:entire atmosphere (considered as a single layer):
+:RH:10 mb:
+:RH:100 mb:
+:RH:1000 mb:
+:RH:2 m above ground:
+:RH:200 mb:
+:RH:250 mb:
+:RH:50 mb:
+:RH:500 mb:
+:RH:700 mb:
+:RH:850 mb:
+:RH:925 mb:
+:SHTFL:surface:
+:SNOD:surface:
+:SOILW:0-0.1 m below ground:
+:TMP:10 mb:
+:TMP:100 mb:
+:TMP:1000 mb:
+:TMP:2 m above ground:
+:TMP:200 mb:
+:TMP:250 mb:
+:TMP:50 mb:
+:TMP:500 mb:
+:TMP:700 mb:
+:TMP:850 mb:
+:TMP:925 mb:
+:TSOIL:0-0.1 m below ground:
+:UGRD:10 m above ground:
+:UGRD:10 mb:
+:UGRD:100 mb:
+:UGRD:1000 mb:
+:UGRD:200 mb:
+:UGRD:250 mb:
+:UGRD:300 mb:
+:UGRD:400 mb:
+:UGRD:50 mb:
+:UGRD:500 mb:
+:UGRD:700 mb:
+:UGRD:850 mb:
+:UGRD:925 mb:
+:ULWRF:surface:
+:ULWRF:top of atmosphere:
+:USWRF:surface:
+:VGRD:10 m above ground:
+:VGRD:10 mb:
+:VGRD:100 mb:
+:VGRD:1000 mb:
+:VGRD:200 mb:
+:VGRD:250 mb:
+:VGRD:300 mb:
+:VGRD:400 mb:
+:VGRD:50 mb:
+:VGRD:500 mb:
+:VGRD:700 mb:
+:VGRD:850 mb:
+:VGRD:925 mb:
+:VVEL:850 mb:
+:WEASD:surface:
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.0p50.f000.paramlist.b.txt b/parm/product/gefs.0p50.f000.paramlist.b.txt
new file mode 100644
index 0000000000..8fd65468ae
--- /dev/null
+++ b/parm/product/gefs.0p50.f000.paramlist.b.txt
@@ -0,0 +1,474 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:BRTMP:top of atmosphere:
+:CAPE:255-0 mb above ground:
+:CAPE:surface:
+:CIN:255-0 mb above ground:
+:CIN:surface:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CPOFP:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:DPT:2 m above ground:
+:DPT:30-0 mb above ground:
+:FLDCP:surface:
+:FRICV:surface:
+:GUST:surface:
+:HGT:0C isotherm:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:900 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HLCY:3000-0 m above ground:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:MNTSF:320 K isentropic level:
+:MSLET:mean sea level:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRES:80 m above ground:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:20 mb:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SNOHF:surface:
+:SNOWC:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TMP:0.995 sigma level:
+:TMP:100 m above ground:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 mb:
+:TMP:450 K isentropic level:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:5 mb:
+:TMP:550 mb:
+:TMP:550 K isentropic level:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 mb:
+:TMP:650 K isentropic level:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:max wind:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UGRD:0.995 sigma level:
+:UGRD:100 m above ground:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:450 mb:
+:UGRD:450 K isentropic level:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:5 mb:
+:UGRD:550 mb:
+:UGRD:550 K isentropic level:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 mb:
+:UGRD:650 K isentropic level:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:USTM:6000-0 m above ground:
+:APTMP:2 m above ground:
+:VGRD:0.995 sigma level:
+:VGRD:100 m above ground:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:450 mb:
+:VGRD:450 K isentropic level:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:5 mb:
+:VGRD:550 mb:
+:VGRD:550 K isentropic level:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 mb:
+:VGRD:650 K isentropic level:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:VIS:surface:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WILT:surface:
+:HGT:cloud ceiling:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
diff --git a/parm/product/gefs.0p50.fFFF.paramlist.a.txt b/parm/product/gefs.0p50.fFFF.paramlist.a.txt
new file mode 100644
index 0000000000..dde635408c
--- /dev/null
+++ b/parm/product/gefs.0p50.fFFF.paramlist.a.txt
@@ -0,0 +1,87 @@
+############################# sorted pgrb2a 201408
+:APCP:surface:
+:CAPE:180-0 mb above ground:
+:CFRZR:surface:
+:CICEP:surface:
+:CIN:180-0 mb above ground:
+:CRAIN:surface:
+:CSNOW:surface:
+:DLWRF:surface:
+:DSWRF:surface:
+:HGT:10 mb:
+:HGT:100 mb:
+:HGT:1000 mb:
+:HGT:200 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:50 mb:
+:HGT:500 mb:
+:HGT:700 mb:
+:HGT:850 mb:
+:HGT:925 mb:
+:LHTFL:surface:
+:ICETK:surface:
+:PRES:surface:
+:PRMSL:mean sea level:
+:PWAT:entire atmosphere (considered as a single layer):
+:RH:10 mb:
+:RH:100 mb:
+:RH:1000 mb:
+:RH:2 m above ground:
+:RH:200 mb:
+:RH:250 mb:
+:RH:50 mb:
+:RH:500 mb:
+:RH:700 mb:
+:RH:850 mb:
+:RH:925 mb:
+:SHTFL:surface:
+:SNOD:surface:
+:SOILW:0-0.1 m below ground:
+:TCDC:entire atmosphere:
+:TMAX:2 m above ground:
+:TMIN:2 m above ground:
+:TMP:10 mb:
+:TMP:100 mb:
+:TMP:1000 mb:
+:TMP:2 m above ground:
+:TMP:200 mb:
+:TMP:250 mb:
+:TMP:50 mb:
+:TMP:500 mb:
+:TMP:700 mb:
+:TMP:850 mb:
+:TMP:925 mb:
+:TSOIL:0-0.1 m below ground:
+:UGRD:10 m above ground:
+:UGRD:10 mb:
+:UGRD:100 mb:
+:UGRD:1000 mb:
+:UGRD:200 mb:
+:UGRD:250 mb:
+:UGRD:300 mb:
+:UGRD:400 mb:
+:UGRD:50 mb:
+:UGRD:500 mb:
+:UGRD:700 mb:
+:UGRD:850 mb:
+:UGRD:925 mb:
+:ULWRF:surface:
+:ULWRF:top of atmosphere:
+:USWRF:surface:
+:VGRD:10 m above ground:
+:VGRD:10 mb:
+:VGRD:100 mb:
+:VGRD:1000 mb:
+:VGRD:200 mb:
+:VGRD:250 mb:
+:VGRD:300 mb:
+:VGRD:400 mb:
+:VGRD:50 mb:
+:VGRD:500 mb:
+:VGRD:700 mb:
+:VGRD:850 mb:
+:VGRD:925 mb:
+:VVEL:850 mb:
+:WEASD:surface:
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.0p50.fFFF.paramlist.b.txt b/parm/product/gefs.0p50.fFFF.paramlist.b.txt
new file mode 100644
index 0000000000..28b98db7d5
--- /dev/null
+++ b/parm/product/gefs.0p50.fFFF.paramlist.b.txt
@@ -0,0 +1,506 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:ACPCP:surface:
+:ALBDO:surface:
+:BRTMP:top of atmosphere:
+:CAPE:255-0 mb above ground:
+:CAPE:surface:
+:CDUVB:surface:
+:CIN:255-0 mb above ground:
+:CIN:surface:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CPOFP:surface:
+:CPRAT:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:CWORK:entire atmosphere (considered as a single layer):
+:DPT:2 m above ground:
+:DPT:30-0 mb above ground:
+:DUVB:surface:
+:FLDCP:surface:
+:FRICV:surface:
+:GFLUX:surface:
+:GUST:surface:
+:HGT:0C isotherm:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:900 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HLCY:3000-0 m above ground:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:MNTSF:320 K isentropic level:
+:MSLET:mean sea level:
+:NCPCP:surface:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRATE:surface:
+:PRES:80 m above ground:
+:PRES:convective cloud bottom level:
+:PRES:convective cloud top level:
+:PRES:high cloud bottom level:
+:PRES:high cloud top level:
+:PRES:low cloud bottom level:
+:PRES:low cloud top level:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:middle cloud bottom level:
+:PRES:middle cloud top level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:20 mb:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SNOWC:surface:
+:SNOHF:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TCDC:boundary layer cloud layer:
+:TCDC:convective cloud layer:
+:TCDC:high cloud layer:
+:TCDC:low cloud layer:
+:TCDC:middle cloud layer:
+:TMP:0.995 sigma level:
+:TMP:100 m above ground:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 K isentropic level:
+:TMP:450 mb:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:5 mb:
+:TMP:550 K isentropic level:
+:TMP:550 mb:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 K isentropic level:
+:TMP:650 mb:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:high cloud top level:
+:TMP:low cloud top level:
+:TMP:max wind:
+:TMP:middle cloud top level:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UFLX:surface:
+:UGRD:0.995 sigma level:
+:UGRD:100 m above ground:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:450 K isentropic level:
+:UGRD:450 mb:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:5 mb:
+:UGRD:550 K isentropic level:
+:UGRD:550 mb:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 K isentropic level:
+:UGRD:650 mb:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:U-GWD:surface:
+:USTM:6000-0 m above ground:
+:USWRF:top of atmosphere:
+:APTMP:2 m above ground
+:VFLX:surface:
+:VGRD:0.995 sigma level:
+:VGRD:100 m above ground:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:450 K isentropic level:
+:VGRD:450 mb:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:5 mb:
+:VGRD:550 K isentropic level:
+:VGRD:550 mb:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 K isentropic level:
+:VGRD:650 mb:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:V-GWD:surface:
+:VIS:surface:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WATR:surface:
+:WILT:surface:
+:HGT:cloud ceiling:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
diff --git a/parm/product/gefs.1p00.f000.paramlist.a.txt b/parm/product/gefs.1p00.f000.paramlist.a.txt
new file mode 120000
index 0000000000..69265297d3
--- /dev/null
+++ b/parm/product/gefs.1p00.f000.paramlist.a.txt
@@ -0,0 +1 @@
+gefs.0p50.f000.paramlist.a.txt
\ No newline at end of file
diff --git a/parm/product/gefs.1p00.f000.paramlist.b.txt b/parm/product/gefs.1p00.f000.paramlist.b.txt
new file mode 120000
index 0000000000..a51f2079e2
--- /dev/null
+++ b/parm/product/gefs.1p00.f000.paramlist.b.txt
@@ -0,0 +1 @@
+gefs.0p50.f000.paramlist.b.txt
\ No newline at end of file
diff --git a/parm/product/gefs.1p00.fFFF.paramlist.a.txt b/parm/product/gefs.1p00.fFFF.paramlist.a.txt
new file mode 120000
index 0000000000..c131b24c02
--- /dev/null
+++ b/parm/product/gefs.1p00.fFFF.paramlist.a.txt
@@ -0,0 +1 @@
+gefs.0p50.fFFF.paramlist.a.txt
\ No newline at end of file
diff --git a/parm/product/gefs.1p00.fFFF.paramlist.b.txt b/parm/product/gefs.1p00.fFFF.paramlist.b.txt
new file mode 120000
index 0000000000..0f2fb179cb
--- /dev/null
+++ b/parm/product/gefs.1p00.fFFF.paramlist.b.txt
@@ -0,0 +1 @@
+gefs.0p50.fFFF.paramlist.b.txt
\ No newline at end of file
diff --git a/parm/product/gefs.2p50.f000.paramlist.a.txt b/parm/product/gefs.2p50.f000.paramlist.a.txt
new file mode 100644
index 0000000000..4d2219ce8c
--- /dev/null
+++ b/parm/product/gefs.2p50.f000.paramlist.a.txt
@@ -0,0 +1,23 @@
+############################# sorted pgrb2a 201408
+:HGT:surface:
+:HGT:1000 mb:
+:HGT:500 mb:
+:PRMSL:mean sea level:
+:RH:700 mb:
+:TMP:2 m above ground:
+:TMP:850 mb:
+:UGRD:10 m above ground:
+:UGRD:200 mb:
+:UGRD:250 mb:
+:UGRD:850 mb:
+:VGRD:10 m above ground:
+:VGRD:200 mb:
+:VGRD:250 mb:
+:VGRD:850 mb:
+:APCP:surface
+:CSNOW:surface
+:CRAIN:surface
+:CICEP:surface
+:CFRZR:surface
+:ULWRF:top
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.2p50.f000.paramlist.b.txt b/parm/product/gefs.2p50.f000.paramlist.b.txt
new file mode 100644
index 0000000000..f2610c5f77
--- /dev/null
+++ b/parm/product/gefs.2p50.f000.paramlist.b.txt
@@ -0,0 +1,530 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:BRTMP:top of atmosphere:
+:CAPE:180-0 mb above ground:
+:CAPE:255-0 mb above ground:
+:CAPE:surface:
+:CIN:180-0 mb above ground:
+:CIN:255-0 mb above ground:
+:CIN:surface:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CPOFP:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:DPT:2 m above ground:
+:DPT:30-0 mb above ground:
+:FLDCP:surface:
+:FRICV:surface:
+:GUST:surface:
+:HGT:0C isotherm:
+:HGT:100 mb:
+:HGT:10 mb:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:200 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:50 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:700 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:850 mb:
+:HGT:900 mb:
+:HGT:925 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HLCY:3000-0 m above ground:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICETK:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:MNTSF:320 K isentropic level:
+:MSLET:mean sea level:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRES:80 m above ground:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:PWAT:entire atmosphere (considered as a single layer):
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:1000 mb:
+:RH:100 mb:
+:RH:10 mb:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:200 mb:
+:RH:20 mb:
+:RH:250 mb:
+:RH:2 m above ground:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:500 mb:
+:RH:50 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:850 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:925 mb:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SNOD:surface:
+:SNOHF:surface:
+:SNOWC:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0-0.1 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TMP:0.995 sigma level:
+:TMP:1000 mb:
+:TMP:100 m above ground:
+:TMP:100 mb:
+:TMP:10 mb:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:200 mb:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:250 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 mb:
+:TMP:450 K isentropic level:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:500 mb:
+:TMP:50 mb:
+:TMP:5 mb:
+:TMP:550 mb:
+:TMP:550 K isentropic level:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 mb:
+:TMP:650 K isentropic level:
+:TMP:700 mb:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:925 mb:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:max wind:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0-0.1 m below ground:
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UGRD:0.995 sigma level:
+:UGRD:1000 mb:
+:UGRD:100 m above ground:
+:UGRD:100 mb:
+:UGRD:10 mb:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:300 mb:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:400 mb:
+:UGRD:450 mb:
+:UGRD:450 K isentropic level:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:500 mb:
+:UGRD:50 mb:
+:UGRD:5 mb:
+:UGRD:550 mb:
+:UGRD:550 K isentropic level:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 mb:
+:UGRD:650 K isentropic level:
+:UGRD:700 mb:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:925 mb:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:USTM:6000-0 m above ground:
+:APTMP:2 m above ground:
+:VGRD:0.995 sigma level:
+:VGRD:1000 mb:
+:VGRD:100 m above ground:
+:VGRD:100 mb:
+:VGRD:10 mb:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:300 mb:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:400 mb:
+:VGRD:450 mb:
+:VGRD:450 K isentropic level:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:500 mb:
+:VGRD:50 mb:
+:VGRD:5 mb:
+:VGRD:550 mb:
+:VGRD:550 K isentropic level:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 mb:
+:VGRD:650 K isentropic level:
+:VGRD:700 mb:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:925 mb:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:VIS:surface:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:850 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WEASD:surface:
+:WILT:surface:
+:HGT:cloud ceiling:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
diff --git a/parm/product/gefs.2p50.fFFF.paramlist.a.txt b/parm/product/gefs.2p50.fFFF.paramlist.a.txt
new file mode 100644
index 0000000000..11b6a8aef3
--- /dev/null
+++ b/parm/product/gefs.2p50.fFFF.paramlist.a.txt
@@ -0,0 +1,22 @@
+############################# sorted pgrb2a 201408
+:HGT:1000 mb:
+:HGT:500 mb:
+:PRMSL:mean sea level:
+:RH:700 mb:
+:TMP:2 m above ground:
+:TMP:850 mb:
+:UGRD:10 m above ground:
+:UGRD:200 mb:
+:UGRD:250 mb:
+:UGRD:850 mb:
+:VGRD:10 m above ground:
+:VGRD:200 mb:
+:VGRD:250 mb:
+:VGRD:850 mb:
+:APCP:surface
+:CSNOW:surface
+:CRAIN:surface
+:CICEP:surface
+:CFRZR:surface
+:ULWRF:top
+;############################ do not leave a blank line at the end
diff --git a/parm/product/gefs.2p50.fFFF.paramlist.b.txt b/parm/product/gefs.2p50.fFFF.paramlist.b.txt
new file mode 100644
index 0000000000..8c05d49271
--- /dev/null
+++ b/parm/product/gefs.2p50.fFFF.paramlist.b.txt
@@ -0,0 +1,571 @@
+############################# sorted pgrb2a + pgrb2b 201502
+:4LFTX:surface:
+:5WAVH:500 mb:
+:ABSV:1000 mb:
+:ABSV:100 mb:
+:ABSV:10 mb:
+:ABSV:150 mb:
+:ABSV:200 mb:
+:ABSV:20 mb:
+:ABSV:250 mb:
+:ABSV:300 mb:
+:ABSV:30 mb:
+:ABSV:350 mb:
+:ABSV:400 mb:
+:ABSV:450 mb:
+:ABSV:500 mb:
+:ABSV:50 mb:
+:ABSV:550 mb:
+:ABSV:600 mb:
+:ABSV:650 mb:
+:ABSV:700 mb:
+:ABSV:70 mb:
+:ABSV:750 mb:
+:ABSV:800 mb:
+:ABSV:850 mb:
+:ABSV:900 mb:
+:ABSV:925 mb:
+:ABSV:950 mb:
+:ABSV:975 mb:
+:ACPCP:surface:
+:ALBDO:surface:
+:BRTMP:top of atmosphere:
+:CAPE:180-0 mb above ground:
+:CAPE:255-0 mb above ground:
+:CAPE:surface:
+:CDUVB:surface:
+:CIN:180-0 mb above ground:
+:CIN:255-0 mb above ground:
+:CIN:surface:
+:CLWMR:1000 mb:
+:CLWMR:100 mb:
+:CLWMR:10 mb:
+:CLWMR:150 mb:
+:CLWMR:200 mb:
+:CLWMR:20 mb:
+:CLWMR:250 mb:
+:CLWMR:300 mb:
+:CLWMR:30 mb:
+:CLWMR:350 mb:
+:CLWMR:400 mb:
+:CLWMR:450 mb:
+:CLWMR:500 mb:
+:CLWMR:50 mb:
+:CLWMR:550 mb:
+:CLWMR:600 mb:
+:CLWMR:650 mb:
+:CLWMR:700 mb:
+:CLWMR:70 mb:
+:CLWMR:750 mb:
+:CLWMR:800 mb:
+:CLWMR:850 mb:
+:CLWMR:900 mb:
+:CLWMR:925 mb:
+:CLWMR:950 mb:
+:CLWMR:975 mb:
+:CNWAT:surface:
+:CPOFP:surface:
+:CPRAT:surface:
+:CWAT:entire atmosphere (considered as a single layer):
+:CWORK:entire atmosphere (considered as a single layer):
+:DLWRF:surface:
+:DPT:2 m above ground:
+:DPT:30-0 mb above ground:
+:DSWRF:surface:
+:DUVB:surface:
+:FLDCP:surface:
+:FRICV:surface:
+:GFLUX:surface:
+:GUST:surface:
+:HGT:0C isotherm:
+:HGT:100 mb:
+:HGT:10 mb:
+:HGT:1 mb:
+:HGT:150 mb:
+:HGT:200 mb:
+:HGT:20 mb:
+:HGT:2 mb:
+:HGT:250 mb:
+:HGT:300 mb:
+:HGT:30 mb:
+:HGT:3 mb:
+:HGT:350 mb:
+:HGT:400 mb:
+:HGT:450 mb:
+:HGT:50 mb:
+:HGT:5 mb:
+:HGT:550 mb:
+:HGT:600 mb:
+:HGT:650 mb:
+:HGT:700 mb:
+:HGT:70 mb:
+:HGT:7 mb:
+:HGT:750 mb:
+:HGT:800 mb:
+:HGT:850 mb:
+:HGT:900 mb:
+:HGT:925 mb:
+:HGT:950 mb:
+:HGT:975 mb:
+:HGT:highest tropospheric freezing level:
+:HGT:max wind:
+:HGT:PV=-1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=1.5e-06 (Km^2/kg/s) surface:
+:HGT:PV=-1e-06 (Km^2/kg/s) surface:
+:HGT:PV=1e-06 (Km^2/kg/s) surface:
+:HGT:PV=-2e-06 (Km^2/kg/s) surface:
+:HGT:PV=2e-06 (Km^2/kg/s) surface:
+:HGT:PV=-5e-07 (Km^2/kg/s) surface:
+:HGT:PV=5e-07 (Km^2/kg/s) surface:
+:HGT:surface:
+:HGT:tropopause:
+:HINDEX:surface:
+:HLCY:3000-0 m above ground:
+:HPBL:surface:
+:ICAHT:max wind:
+:ICAHT:tropopause:
+:ICEC:surface:
+:ICETK:surface:
+:ICIP:300 mb:
+:ICIP:400 mb:
+:ICIP:500 mb:
+:ICIP:600 mb:
+:ICIP:700 mb:
+:ICIP:800 mb:
+:ICSEV:300 mb:
+:ICSEV:400 mb:
+:ICSEV:500 mb:
+:ICSEV:600 mb:
+:ICSEV:700 mb:
+:ICSEV:800 mb:
+:LAND:surface:
+:LFTX:surface:
+:LHTFL:surface:
+:MNTSF:320 K isentropic level:
+:MSLET:mean sea level:
+:NCPCP:surface:
+:O3MR:100 mb:
+:O3MR:10 mb:
+:O3MR:125 mb:
+:O3MR:150 mb:
+:O3MR:1 mb:
+:O3MR:200 mb:
+:O3MR:20 mb:
+:O3MR:250 mb:
+:O3MR:2 mb:
+:O3MR:300 mb:
+:O3MR:30 mb:
+:O3MR:350 mb:
+:O3MR:3 mb:
+:O3MR:400 mb:
+:O3MR:50 mb:
+:O3MR:5 mb:
+:O3MR:70 mb:
+:O3MR:7 mb:
+:PEVPR:surface:
+:PLI:30-0 mb above ground:
+:PLPL:255-0 mb above ground:
+:POT:0.995 sigma level:
+:PRATE:surface:
+:PRES:80 m above ground:
+:PRES:convective cloud bottom level:
+:PRES:convective cloud top level:
+:PRES:high cloud bottom level:
+:PRES:high cloud top level:
+:PRES:low cloud bottom level:
+:PRES:low cloud top level:
+:PRES:max wind:
+:PRES:mean sea level:
+:PRES:middle cloud bottom level:
+:PRES:middle cloud top level:
+:PRES:PV=-1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=1.5e-06 (Km^2/kg/s) surface:
+:PRES:PV=-1e-06 (Km^2/kg/s) surface:
+:PRES:PV=1e-06 (Km^2/kg/s) surface:
+:PRES:PV=-2e-06 (Km^2/kg/s) surface:
+:PRES:PV=2e-06 (Km^2/kg/s) surface:
+:PRES:PV=-5e-07 (Km^2/kg/s) surface:
+:PRES:PV=5e-07 (Km^2/kg/s) surface:
+:PRES:surface:
+:PRES:tropopause:
+:PVORT:310 K isentropic level:
+:PVORT:320 K isentropic level:
+:PVORT:350 K isentropic level:
+:PVORT:450 K isentropic level:
+:PVORT:550 K isentropic level:
+:PVORT:650 K isentropic level:
+:PWAT:30-0 mb above ground:
+:PWAT:entire atmosphere (considered as a single layer):
+:RH:0.33-1 sigma layer:
+:RH:0.44-0.72 sigma layer:
+:RH:0.44-1 sigma layer:
+:RH:0.72-0.94 sigma layer:
+:RH:0.995 sigma level:
+:RH:0C isotherm:
+:RH:1000 mb:
+:RH:100 mb:
+:RH:10 mb:
+:RH:120-90 mb above ground:
+:RH:150-120 mb above ground:
+:RH:150 mb:
+:RH:180-150 mb above ground:
+:RH:200 mb:
+:RH:20 mb:
+:RH:250 mb:
+:RH:2 m above ground:
+:RH:300 mb:
+:RH:30-0 mb above ground:
+:RH:30 mb:
+:RH:350 mb:
+:RH:400 mb:
+:RH:450 mb:
+:RH:500 mb:
+:RH:50 mb:
+:RH:550 mb:
+:RH:600 mb:
+:RH:60-30 mb above ground:
+:RH:650 mb:
+:RH:70 mb:
+:RH:750 mb:
+:RH:800 mb:
+:RH:850 mb:
+:RH:900 mb:
+:RH:90-60 mb above ground:
+:RH:925 mb:
+:RH:950 mb:
+:RH:975 mb:
+:RH:entire atmosphere (considered as a single layer):
+:RH:highest tropospheric freezing level:
+:SFCR:surface:
+:SHTFL:surface:
+:SNOD:surface:
+:SNOWC:surface:
+:SNOHF:surface:
+:SOILL:0-0.1 m below ground:
+:SOILL:0.1-0.4 m below ground:
+:SOILL:0.4-1 m below ground:
+:SOILL:1-2 m below ground:
+:SOILW:0-0.1 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:SPFH:1000 mb:
+:SPFH:100 mb:
+:SPFH:10 mb:
+:SPFH:1 mb:
+:SPFH:120-90 mb above ground:
+:SPFH:150-120 mb above ground:
+:SPFH:150 mb:
+:SPFH:180-150 mb above ground:
+:SPFH:200 mb:
+:SPFH:20 mb:
+:SPFH:2 mb:
+:SPFH:250 mb:
+:SPFH:2 m above ground:
+:SPFH:300 mb:
+:SPFH:30-0 mb above ground:
+:SPFH:30 mb:
+:SPFH:3 mb:
+:SPFH:350 mb:
+:SPFH:400 mb:
+:SPFH:450 mb:
+:SPFH:500 mb:
+:SPFH:50 mb:
+:SPFH:5 mb:
+:SPFH:550 mb:
+:SPFH:600 mb:
+:SPFH:60-30 mb above ground:
+:SPFH:650 mb:
+:SPFH:700 mb:
+:SPFH:70 mb:
+:SPFH:7 mb:
+:SPFH:750 mb:
+:SPFH:800 mb:
+:SPFH:80 m above ground:
+:SPFH:850 mb:
+:SPFH:900 mb:
+:SPFH:90-60 mb above ground:
+:SPFH:925 mb:
+:SPFH:950 mb:
+:SPFH:975 mb:
+:SUNSD:surface:
+:TCDC:475 mb:
+:TCDC:boundary layer cloud layer:
+:TCDC:convective cloud layer:
+:TCDC:entire atmosphere:
+:TCDC:high cloud layer:
+:TCDC:low cloud layer:
+:TCDC:middle cloud layer:
+:TMAX:2 m above ground:
+:TMIN:2 m above ground:
+:TMP:0.995 sigma level:
+:TMP:1000 mb:
+:TMP:100 m above ground:
+:TMP:100 mb:
+:TMP:10 mb:
+:TMP:1 mb:
+:TMP:120-90 mb above ground:
+:TMP:150-120 mb above ground:
+:TMP:150 mb:
+:TMP:180-150 mb above ground:
+:TMP:1829 m above mean sea level:
+:TMP:200 mb:
+:TMP:20 mb:
+:TMP:2 mb:
+:TMP:250 mb:
+:TMP:2743 m above mean sea level:
+:TMP:300 mb:
+:TMP:30-0 mb above ground:
+:TMP:305 m above mean sea level:
+:TMP:30 mb:
+:TMP:3 mb:
+:TMP:320 K isentropic level:
+:TMP:350 mb:
+:TMP:3658 m above mean sea level:
+:TMP:400 mb:
+:TMP:450 K isentropic level:
+:TMP:450 mb:
+:TMP:4572 m above mean sea level:
+:TMP:457 m above mean sea level:
+:TMP:500 mb:
+:TMP:50 mb:
+:TMP:5 mb:
+:TMP:550 K isentropic level:
+:TMP:550 mb:
+:TMP:600 mb:
+:TMP:60-30 mb above ground:
+:TMP:610 m above mean sea level:
+:TMP:650 K isentropic level:
+:TMP:650 mb:
+:TMP:700 mb:
+:TMP:70 mb:
+:TMP:7 mb:
+:TMP:750 mb:
+:TMP:800 mb:
+:TMP:80 m above ground:
+:TMP:900 mb:
+:TMP:90-60 mb above ground:
+:TMP:914 m above mean sea level:
+:TMP:925 mb:
+:TMP:950 mb:
+:TMP:975 mb:
+:TMP:high cloud top level:
+:TMP:low cloud top level:
+:TMP:max wind:
+:TMP:middle cloud top level:
+:TMP:PV=-1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=1.5e-06 (Km^2/kg/s) surface:
+:TMP:PV=-1e-06 (Km^2/kg/s) surface:
+:TMP:PV=1e-06 (Km^2/kg/s) surface:
+:TMP:PV=-2e-06 (Km^2/kg/s) surface:
+:TMP:PV=2e-06 (Km^2/kg/s) surface:
+:TMP:PV=-5e-07 (Km^2/kg/s) surface:
+:TMP:PV=5e-07 (Km^2/kg/s) surface:
+:TMP:surface:
+:TMP:tropopause:
+:TOZNE:entire atmosphere (considered as a single layer):
+:TSOIL:0-0.1 m below ground:
+:TSOIL:0.1-0.4 m below ground:
+:TSOIL:0.4-1 m below ground:
+:TSOIL:1-2 m below ground:
+:UFLX:surface:
+:UGRD:0.995 sigma level:
+:UGRD:1000 mb:
+:UGRD:100 m above ground:
+:UGRD:100 mb:
+:UGRD:10 mb:
+:UGRD:1 mb:
+:UGRD:120-90 mb above ground:
+:UGRD:150-120 mb above ground:
+:UGRD:150 mb:
+:UGRD:180-150 mb above ground:
+:UGRD:1829 m above mean sea level:
+:UGRD:20 mb:
+:UGRD:2 mb:
+:UGRD:2743 m above mean sea level:
+:UGRD:300 mb:
+:UGRD:30-0 mb above ground:
+:UGRD:305 m above mean sea level:
+:UGRD:30 mb:
+:UGRD:3 mb:
+:UGRD:320 K isentropic level:
+:UGRD:350 mb:
+:UGRD:3658 m above mean sea level:
+:UGRD:400 mb:
+:UGRD:450 K isentropic level:
+:UGRD:450 mb:
+:UGRD:4572 m above mean sea level:
+:UGRD:457 m above mean sea level:
+:UGRD:500 mb:
+:UGRD:50 mb:
+:UGRD:5 mb:
+:UGRD:550 K isentropic level:
+:UGRD:550 mb:
+:UGRD:600 mb:
+:UGRD:60-30 mb above ground:
+:UGRD:610 m above mean sea level:
+:UGRD:650 K isentropic level:
+:UGRD:650 mb:
+:UGRD:700 mb:
+:UGRD:70 mb:
+:UGRD:7 mb:
+:UGRD:750 mb:
+:UGRD:800 mb:
+:UGRD:80 m above ground:
+:UGRD:900 mb:
+:UGRD:90-60 mb above ground:
+:UGRD:914 m above mean sea level:
+:UGRD:925 mb:
+:UGRD:950 mb:
+:UGRD:975 mb:
+:UGRD:max wind:
+:UGRD:planetary boundary layer:
+:UGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=1e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=2e-06 (Km^2/kg/s) surface:
+:UGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:UGRD:PV=5e-07 (Km^2/kg/s) surface:
+:UGRD:tropopause:
+:U-GWD:surface:
+:ULWRF:surface:
+:USTM:6000-0 m above ground:
+:USWRF:surface:
+:USWRF:top of atmosphere:
+:APTMP:2 m above ground
+:VFLX:surface:
+:VGRD:0.995 sigma level:
+:VGRD:1000 mb:
+:VGRD:100 m above ground:
+:VGRD:100 mb:
+:VGRD:10 mb:
+:VGRD:1 mb:
+:VGRD:120-90 mb above ground:
+:VGRD:150-120 mb above ground:
+:VGRD:150 mb:
+:VGRD:180-150 mb above ground:
+:VGRD:1829 m above mean sea level:
+:VGRD:20 mb:
+:VGRD:2 mb:
+:VGRD:2743 m above mean sea level:
+:VGRD:300 mb:
+:VGRD:30-0 mb above ground:
+:VGRD:305 m above mean sea level:
+:VGRD:30 mb:
+:VGRD:3 mb:
+:VGRD:320 K isentropic level:
+:VGRD:350 mb:
+:VGRD:3658 m above mean sea level:
+:VGRD:400 mb:
+:VGRD:450 K isentropic level:
+:VGRD:450 mb:
+:VGRD:4572 m above mean sea level:
+:VGRD:457 m above mean sea level:
+:VGRD:500 mb:
+:VGRD:50 mb:
+:VGRD:5 mb:
+:VGRD:550 K isentropic level:
+:VGRD:550 mb:
+:VGRD:600 mb:
+:VGRD:60-30 mb above ground:
+:VGRD:610 m above mean sea level:
+:VGRD:650 K isentropic level:
+:VGRD:650 mb:
+:VGRD:700 mb:
+:VGRD:70 mb:
+:VGRD:7 mb:
+:VGRD:750 mb:
+:VGRD:800 mb:
+:VGRD:80 m above ground:
+:VGRD:900 mb:
+:VGRD:90-60 mb above ground:
+:VGRD:914 m above mean sea level:
+:VGRD:925 mb:
+:VGRD:950 mb:
+:VGRD:975 mb:
+:VGRD:max wind:
+:VGRD:planetary boundary layer:
+:VGRD:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1.5e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=1e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=2e-06 (Km^2/kg/s) surface:
+:VGRD:PV=-5e-07 (Km^2/kg/s) surface:
+:VGRD:PV=5e-07 (Km^2/kg/s) surface:
+:VGRD:tropopause:
+:V-GWD:surface:
+:VIS:surface:
+:VRATE:planetary boundary layer:
+:VSTM:6000-0 m above ground:
+:VVEL:0.995 sigma level:
+:VVEL:1 mb:
+:VVEL:2 mb:
+:VVEL:3 mb:
+:VVEL:5 mb:
+:VVEL:7 mb:
+:VVEL:10 mb:
+:VVEL:20 mb:
+:VVEL:30 mb:
+:VVEL:50 mb:
+:VVEL:70 mb:
+:VVEL:1000 mb:
+:VVEL:100 mb:
+:VVEL:150 mb:
+:VVEL:200 mb:
+:VVEL:250 mb:
+:VVEL:300 mb:
+:VVEL:350 mb:
+:VVEL:400 mb:
+:VVEL:450 mb:
+:VVEL:500 mb:
+:VVEL:550 mb:
+:VVEL:600 mb:
+:VVEL:650 mb:
+:VVEL:700 mb:
+:VVEL:750 mb:
+:VVEL:800 mb:
+:VVEL:850 mb:
+:VVEL:900 mb:
+:VVEL:925 mb:
+:VVEL:950 mb:
+:VVEL:975 mb:
+:VWSH:PV=-1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1.5e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=1e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=2e-06 (Km^2/kg/s) surface:
+:VWSH:PV=-5e-07 (Km^2/kg/s) surface:
+:VWSH:PV=5e-07 (Km^2/kg/s) surface:
+:VWSH:tropopause:
+:WATR:surface:
+:WEASD:surface:
+:WILT:surface:
+:HGT:cloud ceiling:
+:PRES:1 hybrid level:
+:HGT:1 hybrid level:
+:TMP:1 hybrid level:
+:RH:1 hybrid level:
+:UGRD:1 hybrid level:
+:VGRD:1 hybrid level:
+:PRES:2 hybrid level:
+:HGT:2 hybrid level:
+:TMP:2 hybrid level:
+:RH:2 hybrid level:
+:UGRD:2 hybrid level:
+:VGRD:2 hybrid level:
+:PRES:3 hybrid level:
+:HGT:3 hybrid level:
+:TMP:3 hybrid level:
+:RH:3 hybrid level:
+:UGRD:3 hybrid level:
+:VGRD:3 hybrid level:
+:PRES:4 hybrid level:
+:HGT:4 hybrid level:
+:TMP:4 hybrid level:
+:RH:4 hybrid level:
+:UGRD:4 hybrid level:
+:VGRD:4 hybrid level:
diff --git a/parm/product/gefs_ice_shortparmlist.parm b/parm/product/gefs_ice_shortparmlist.parm
new file mode 100644
index 0000000000..07db948fe3
--- /dev/null
+++ b/parm/product/gefs_ice_shortparmlist.parm
@@ -0,0 +1,10 @@
+aice_h
+hi_h
+Tsfc_h
+uvel_h
+vvel_h
+hs_h
+albsni_h
+melts_h
+meltb_h
+frzmlt_h
diff --git a/parm/product/gefs_ocn_shortparmlist.parm b/parm/product/gefs_ocn_shortparmlist.parm
new file mode 100644
index 0000000000..6673ddb16e
--- /dev/null
+++ b/parm/product/gefs_ocn_shortparmlist.parm
@@ -0,0 +1,9 @@
+temp
+SST
+SSH
+SSS
+MLD_003
+taux
+tauy
+SSU
+SSV
diff --git a/parm/product/gefs_shortparmlist_2d.parm b/parm/product/gefs_shortparmlist_2d.parm
new file mode 100644
index 0000000000..bc13101926
--- /dev/null
+++ b/parm/product/gefs_shortparmlist_2d.parm
@@ -0,0 +1,38 @@
+:PRES:surface:
+:WEASD:surface:
+:TMP:2 m above ground:
+:TMP:surface:
+:RH:2 m above ground:
+:TMAX:2 m above ground:
+:TMIN:2 m above ground:
+:UGRD:10 m above ground:
+:VGRD:10 m above ground:
+:APCP:surface:
+:CSNOW:surface:
+:CICEP:surface:
+:CFRZR:surface:
+:CRAIN:surface:
+:PWAT:entire atmosphere (considered as a single layer):
+:TCDC:entire atmosphere (considered as a single layer):
+:DSWRF:surface:
+:DLWRF:surface:
+:ULWRF:top of atmosphere:
+:HLCY:3000-0 m above ground:
+:CAPE:180-0 mb above ground:
+:CIN:180-0 mb above ground:
+:PRMSL:mean sea level:
+:USWRF:surface:
+:ULWRF:surface:
+:TSOIL:0-0.1 m below ground:
+:TSOIL:0.1-0.4 m below ground:
+:SOILW:0-0.1 m below ground:
+:SOILW:0.1-0.4 m below ground:
+:SOILW:0.4-1 m below ground:
+:SOILW:1-2 m below ground:
+:PEVPR:surface:
+:LHTFL:surface:
+:SHTFL:surface:
+:WATR:surface:
+:TSNOWP:surface:
+:FDNSSTMP:surface:
+:HGT:highest tropospheric freezing level:
diff --git a/parm/product/gefs_shortparmlist_3d_d.parm b/parm/product/gefs_shortparmlist_3d_d.parm
new file mode 100644
index 0000000000..37a2678826
--- /dev/null
+++ b/parm/product/gefs_shortparmlist_3d_d.parm
@@ -0,0 +1,34 @@
+:UGRD:1 mb:
+:UGRD:2 mb:
+:UGRD:3 mb:
+:UGRD:5 mb:
+:UGRD:7 mb:
+:UGRD:10 mb:
+:UGRD:20 mb:
+:UGRD:30 mb:
+:UGRD:50 mb:
+:UGRD:70 mb:
+:VGRD:1 mb:
+:VGRD:2 mb:
+:VGRD:3 mb:
+:VGRD:5 mb:
+:VGRD:7 mb:
+:VGRD:10 mb:
+:VGRD:20 mb:
+:VGRD:30 mb:
+:VGRD:50 mb:
+:VGRD:70 mb:
+:TMP:1 mb:
+:TMP:2 mb:
+:TMP:3 mb:
+:TMP:5 mb:
+:TMP:7 mb:
+:TMP:10 mb:
+:TMP:20 mb:
+:TMP:30 mb:
+:TMP:50 mb:
+:TMP:70 mb:
+:HGT:10 mb:
+:HGT:50 mb:
+:O3MR:10 mb:
+:O3MR:50 mb:
diff --git a/parm/product/gefs_shortparmlist_3d_h.parm b/parm/product/gefs_shortparmlist_3d_h.parm
new file mode 100644
index 0000000000..d7241f633c
--- /dev/null
+++ b/parm/product/gefs_shortparmlist_3d_h.parm
@@ -0,0 +1,45 @@
+:HGT:100 mb:
+:TMP:100 mb:
+:UGRD:100 mb:
+:VGRD:100 mb:
+:O3MR:100 mb:
+:HGT:200 mb:
+:TMP:200 mb:
+:RH:200 mb:
+:UGRD:200 mb:
+:VGRD:200 mb:
+:HGT:250 mb:
+:TMP:250 mb:
+:RH:250 mb:
+:UGRD:250 mb:
+:VGRD:250 mb:
+:HGT:500 mb:
+:TMP:500 mb:
+:RH:500 mb:
+:UGRD:500 mb:
+:VGRD:500 mb:
+:HGT:700 mb:
+:TMP:700 mb:
+:RH:700 mb:
+:UGRD:700 mb:
+:VGRD:700 mb:
+:HGT:850 mb:
+:TMP:850 mb:
+:RH:850 mb:
+:VVEL:850 mb:
+:UGRD:850 mb:
+:VGRD:850 mb:
+:HGT:925 mb:
+:TMP:925 mb:
+:RH:925 mb:
+:UGRD:925 mb:
+:VGRD:925 mb:
+:TMP:1000 mb:
+:RH:1000 mb:
+:UGRD:1000 mb:
+:VGRD:1000 mb:
+:HGT:1000 mb:
+:TMP:0.995 sigma level:
+:RH:0.995 sigma level:
+:UGRD:0.995 sigma level:
+:VGRD:0.995 sigma level:
diff --git a/parm/product/gefs_wav_shortparmlist.parm b/parm/product/gefs_wav_shortparmlist.parm
new file mode 100644
index 0000000000..a45e023c85
--- /dev/null
+++ b/parm/product/gefs_wav_shortparmlist.parm
@@ -0,0 +1,3 @@
+:UGRD:surface:
+:VGRD:surface:
+:HTSGW:surface:
diff --git a/parm/post/global_1x1_paramlist_g2.anl b/parm/product/gfs.anl.paramlist.a.txt
similarity index 99%
rename from parm/post/global_1x1_paramlist_g2.anl
rename to parm/product/gfs.anl.paramlist.a.txt
index dd340636d6..3c7ba28bb9 100644
--- a/parm/post/global_1x1_paramlist_g2.anl
+++ b/parm/product/gfs.anl.paramlist.a.txt
@@ -152,7 +152,6 @@ HLCY:3000-0 m above ground
 ICAHT:max wind
 ICAHT:tropopause
 ICETK:surface
-ICETMP:surface
 ICMR:50 mb
 ICMR:1000 mb
 ICMR:100 mb
diff --git a/parm/post/global_1x1_paramlist_g2.f000 b/parm/product/gfs.f000.paramlist.a.txt
similarity index 99%
rename from parm/post/global_1x1_paramlist_g2.f000
rename to parm/product/gfs.f000.paramlist.a.txt
index b1dcb09ccd..34d84ac3c1 100644
--- a/parm/post/global_1x1_paramlist_g2.f000
+++ b/parm/product/gfs.f000.paramlist.a.txt
@@ -166,7 +166,6 @@ ICAHT:max wind
 ICAHT:tropopause
 ICEC:surface
 ICETK:surface
-ICETMP:surface
 ICMR:50 mb
 ICMR:1000 mb
 ICMR:100 mb
@@ -697,4 +696,3 @@ WEASD:surface
 WILT:surface
 SNOD:surface
 VIS:surface
-ICEG:10 m above mean sea level
diff --git a/parm/post/global_1x1_paramlist_g2 b/parm/product/gfs.fFFF.paramlist.a.txt
similarity index 98%
rename from parm/post/global_1x1_paramlist_g2
rename to parm/product/gfs.fFFF.paramlist.a.txt
index 350a715bac..cb2f82f02a 100644
--- a/parm/post/global_1x1_paramlist_g2
+++ b/parm/product/gfs.fFFF.paramlist.a.txt
@@ -45,7 +45,6 @@ ACPCP:surface
 ALBDO:surface
 APCP:surface
 APTMP:2 m above ground
-var discipline=0 master_table=2 parmcat=0 parm=21:2 m above ground
 AOTK:entire atmosphere
 CAPE:180-0 mb above ground
 CAPE:255-0 mb above ground
@@ -177,7 +176,6 @@ ICAHT:max wind
 ICAHT:tropopause
 ICEC:surface
 ICETK:surface
-ICETMP:surface
 ICSEV:100 mb
 ICSEV:150 mb
 ICSEV:200 mb
@@ -758,4 +756,3 @@ WEASD:surface
 WILT:surface
 SNOD:surface
 VIS:surface
-ICEG:10 m above mean sea level
diff --git a/parm/post/global_master-catchup_parmlist_g2 b/parm/product/gfs.fFFF.paramlist.b.txt
similarity index 100%
rename from parm/post/global_master-catchup_parmlist_g2
rename to parm/product/gfs.fFFF.paramlist.b.txt
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_05.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_05.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_05.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_05.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_10.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_10.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_10.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_10.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_15.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_15.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_15.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_15.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_20.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_20.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_20.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_20.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_25.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_25.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_25.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_25.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_30.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_30.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_30.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_30.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_35.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_35.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_35.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_35.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_40.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_40.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_40.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_40.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_45.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_45.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_45.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_45.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_50.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_50.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_50.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_50.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_55.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_55.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_55.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_55.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_60.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_60.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_60.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_60.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_65.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_65.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_65.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_65.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_70.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_70.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_70.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_70.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_75.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_75.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_75.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_75.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_80.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_80.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_80.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_80.list
diff --git a/parm/transfer/transfer_gdas_enkf_enkf_misc.list b/parm/transfer/transfer_gfs_enkfgdas_enkf_misc.list
similarity index 100%
rename from parm/transfer/transfer_gdas_enkf_enkf_misc.list
rename to parm/transfer/transfer_gfs_enkfgdas_enkf_misc.list
diff --git a/parm/transfer/transfer_gdas_1a.list b/parm/transfer/transfer_gfs_gdas_gdas_1a.list
similarity index 100%
rename from parm/transfer/transfer_gdas_1a.list
rename to parm/transfer/transfer_gfs_gdas_gdas_1a.list
diff --git a/parm/transfer/transfer_gdas_1b.list b/parm/transfer/transfer_gfs_gdas_gdas_1b.list
similarity index 100%
rename from parm/transfer/transfer_gdas_1b.list
rename to parm/transfer/transfer_gfs_gdas_gdas_1b.list
diff --git a/parm/transfer/transfer_gdas_1c.list b/parm/transfer/transfer_gfs_gdas_gdas_1c.list
similarity index 100%
rename from parm/transfer/transfer_gdas_1c.list
rename to parm/transfer/transfer_gfs_gdas_gdas_1c.list
diff --git a/parm/transfer/transfer_gdas_misc.list b/parm/transfer/transfer_gfs_gdas_gdas_misc.list
similarity index 100%
rename from parm/transfer/transfer_gdas_misc.list
rename to parm/transfer/transfer_gfs_gdas_gdas_misc.list
diff --git a/parm/transfer/transfer_gfs_1.list b/parm/transfer/transfer_gfs_gfs_1.list
similarity index 100%
rename from parm/transfer/transfer_gfs_1.list
rename to parm/transfer/transfer_gfs_gfs_1.list
diff --git a/parm/transfer/transfer_gfs_10a.list b/parm/transfer/transfer_gfs_gfs_10a.list
similarity index 100%
rename from parm/transfer/transfer_gfs_10a.list
rename to parm/transfer/transfer_gfs_gfs_10a.list
diff --git a/parm/transfer/transfer_gfs_10b.list b/parm/transfer/transfer_gfs_gfs_10b.list
similarity index 100%
rename from parm/transfer/transfer_gfs_10b.list
rename to parm/transfer/transfer_gfs_gfs_10b.list
diff --git a/parm/transfer/transfer_gfs_2.list b/parm/transfer/transfer_gfs_gfs_2.list
similarity index 100%
rename from parm/transfer/transfer_gfs_2.list
rename to parm/transfer/transfer_gfs_gfs_2.list
diff --git a/parm/transfer/transfer_gfs_3.list b/parm/transfer/transfer_gfs_gfs_3.list
similarity index 100%
rename from parm/transfer/transfer_gfs_3.list
rename to parm/transfer/transfer_gfs_gfs_3.list
diff --git a/parm/transfer/transfer_gfs_4.list b/parm/transfer/transfer_gfs_gfs_4.list
similarity index 100%
rename from parm/transfer/transfer_gfs_4.list
rename to parm/transfer/transfer_gfs_gfs_4.list
diff --git a/parm/transfer/transfer_gfs_5.list b/parm/transfer/transfer_gfs_gfs_5.list
similarity index 100%
rename from parm/transfer/transfer_gfs_5.list
rename to parm/transfer/transfer_gfs_gfs_5.list
diff --git a/parm/transfer/transfer_gfs_6.list b/parm/transfer/transfer_gfs_gfs_6.list
similarity index 100%
rename from parm/transfer/transfer_gfs_6.list
rename to parm/transfer/transfer_gfs_gfs_6.list
diff --git a/parm/transfer/transfer_gfs_7.list b/parm/transfer/transfer_gfs_gfs_7.list
similarity index 100%
rename from parm/transfer/transfer_gfs_7.list
rename to parm/transfer/transfer_gfs_gfs_7.list
diff --git a/parm/transfer/transfer_gfs_8.list b/parm/transfer/transfer_gfs_gfs_8.list
similarity index 100%
rename from parm/transfer/transfer_gfs_8.list
rename to parm/transfer/transfer_gfs_gfs_8.list
diff --git a/parm/transfer/transfer_gfs_9a.list b/parm/transfer/transfer_gfs_gfs_9a.list
similarity index 100%
rename from parm/transfer/transfer_gfs_9a.list
rename to parm/transfer/transfer_gfs_gfs_9a.list
diff --git a/parm/transfer/transfer_gfs_9b.list b/parm/transfer/transfer_gfs_gfs_9b.list
similarity index 100%
rename from parm/transfer/transfer_gfs_9b.list
rename to parm/transfer/transfer_gfs_gfs_9b.list
diff --git a/parm/transfer/transfer_gfs_gempak.list b/parm/transfer/transfer_gfs_gfs_gempak.list
similarity index 100%
rename from parm/transfer/transfer_gfs_gempak.list
rename to parm/transfer/transfer_gfs_gfs_gempak.list
diff --git a/parm/transfer/transfer_gfs_misc.list b/parm/transfer/transfer_gfs_gfs_misc.list
similarity index 100%
rename from parm/transfer/transfer_gfs_misc.list
rename to parm/transfer/transfer_gfs_gfs_misc.list
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_1.list
similarity index 91%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_1.list
index aae14dc120..1f2bece3d8 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_1.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /00/
 + /00/atmos/
 + /00/atmos/mem???/
@@ -37,7 +37,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /00/
 + /00/atmos/
 + /00/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_2.list
similarity index 91%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_2.list
index 1cf3b8f5e4..5ac1ca136d 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_2.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /06/
 + /06/atmos/
 + /06/atmos/mem???/
@@ -37,7 +37,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /06/
 + /06/atmos/
 + /06/atmos/mem???/
@@ -47,4 +47,3 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._
 E
 # This directory is a good candidate for compression
 #Z
-
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_3.list
similarity index 84%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_3.list
index ee0dae4c34..1b0469e9fa 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_3.list
@@ -27,7 +27,8 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+#_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /12/
 + /12/atmos/
 + /12/atmos/mem???/
@@ -37,7 +38,8 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+#_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /12/
 + /12/atmos/
 + /12/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_4.list
similarity index 84%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_4.list
index 29f1a601d1..45ca7cfa7c 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_4.list
@@ -27,7 +27,8 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+#_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /18/
 + /18/atmos/
 + /18/atmos/mem???/
@@ -37,7 +38,8 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+#_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /18/
 + /18/atmos/
 + /18/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_5.list
similarity index 92%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_5.list
index 7d1dd9ff6a..11b24839ea 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_5.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /00/
 + /00/atmos/
 + /00/atmos/mem???/
@@ -38,7 +38,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /00/
 + /00/atmos/
 + /00/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_6.list
similarity index 92%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_6.list
index 124dbe3aad..43bb4b968b 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_6.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /06/
 + /06/atmos/
 + /06/atmos/mem???/
@@ -38,7 +38,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /06/
 + /06/atmos/
 + /06/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_7.list
similarity index 92%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_7.list
index 58ff55b5d6..2b26623122 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_7.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /12/
 + /12/atmos/
 + /12/atmos/mem???/
@@ -38,7 +38,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /12/
 + /12/atmos/
 + /12/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_8.list
similarity index 92%
rename from parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_8.list
index 99d3de2843..3c1d814355 100644
--- a/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_enkf_enkf_8.list
@@ -27,7 +27,7 @@
 # This directory is a good candidate for compression
 #Z
 
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDYm1_/
 + /18/
 + /18/atmos/
 + /18/atmos/mem???/
@@ -38,7 +38,7 @@ _COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/enkfgdas._PDY_/
 + /18/
 + /18/atmos/
 + /18/atmos/mem???/
diff --git a/parm/transfer/transfer_rdhpcs_gdas.list b/parm/transfer/transfer_rdhpcs_gfs_gdas_gdas.list
similarity index 89%
rename from parm/transfer/transfer_rdhpcs_gdas.list
rename to parm/transfer/transfer_rdhpcs_gfs_gdas_gdas.list
index a154b022ed..f0c2954786 100644
--- a/parm/transfer/transfer_rdhpcs_gdas.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gdas_gdas.list
@@ -24,7 +24,8 @@
 # directory are included, so if no exclude patterns match that file, it will be
 # transferred.
 
-_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/
+#_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/
+_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gdas._PDYm1_/
 + /??/
 + /??/atmos/
 + /??/atmos/gdas.t??z*tcvitals*
@@ -45,7 +46,8 @@ _COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_
 E
 # This directory is a good candidate for compression
 #Z
-_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/
+#_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/
+_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gdas._PDY_/
 + /??/
 + /??/atmos/
 + /??/atmos/gdas.t??z*tcvitals*
diff --git a/parm/transfer/transfer_rdhpcs_gfs_nawips.list b/parm/transfer/transfer_rdhpcs_gfs_gempak.list
similarity index 96%
rename from parm/transfer/transfer_rdhpcs_gfs_nawips.list
rename to parm/transfer/transfer_rdhpcs_gfs_gempak.list
index 3465d3c360..ada61f33ca 100644
--- a/parm/transfer/transfer_rdhpcs_gfs_nawips.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gempak.list
@@ -24,6 +24,7 @@
 # directory are included, so if no exclude patterns match that file, it will be
 # transferred.
 
+#_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/  _REMOTEPATH_/com/nawips/_ENVIR_/gfs._PDY_/
 _COMROOT_/gfs/_SHORTVER_/gfs._PDY_/  _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gfs._PDY_/
 + /??/
 + /??/atmos/
diff --git a/parm/transfer/transfer_rdhpcs_gfs.list b/parm/transfer/transfer_rdhpcs_gfs_gfs.list
similarity index 94%
rename from parm/transfer/transfer_rdhpcs_gfs.list
rename to parm/transfer/transfer_rdhpcs_gfs_gfs.list
index 78eedd1f24..1bbe0e8fc4 100644
--- a/parm/transfer/transfer_rdhpcs_gfs.list
+++ b/parm/transfer/transfer_rdhpcs_gfs_gfs.list
@@ -24,7 +24,8 @@
 # directory are included, so if no exclude patterns match that file, it will be
 # transferred.
 
-_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/
+#_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/
+_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gfs._PDY_/
 + /??/
 + /??/atmos/
 + /??/atmos/gfs.t??z.*bufr*
diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml
index 7d901fe17b..374c26873e 100644
--- a/parm/ufs/fix/gfs/atmos.fixed_files.yaml
+++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml
@@ -28,58 +28,58 @@ copy:
   - [$(FIXugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc]
 
   # CO2 climatology
-  - [$(FIXam)/co2monthlycyc.txt,                              $(DATA)/co2monthlycyc.txt]
-  - [$(FIXam)/global_co2historicaldata_glob.txt,              $(DATA)/co2historicaldata_glob.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt]
-  - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt]
+  - [$(FIXgfs)/am/co2monthlycyc.txt,                              $(DATA)/co2monthlycyc.txt]
+  - [$(FIXgfs)/am/global_co2historicaldata_glob.txt,              $(DATA)/co2historicaldata_glob.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt]
+  - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt]
 
-  # FIXam files
-  - [$(FIXam)/global_climaeropac_global.txt,                 $(DATA)/aerosol.dat]
-  - [$(FIXam)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77,    $(DATA)/global_o3prdlos.f77]
-  - [$(FIXam)/global_h2o_pltc.f77,                           $(DATA)/global_h2oprdlos.f77]
-  - [$(FIXam)/global_glacier.2x2.grb,                        $(DATA)/global_glacier.2x2.grb]
-  - [$(FIXam)/global_maxice.2x2.grb,                         $(DATA)/global_maxice.2x2.grb]
-  - [$(FIXam)/global_snoclim.1.875.grb,                      $(DATA)/global_snoclim.1.875.grb]
-  - [$(FIXam)/global_slmask.t1534.3072.1536.grb,             $(DATA)/global_slmask.t1534.3072.1536.grb]
-  - [$(FIXam)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb]
-  - [$(FIXam)/global_solarconstant_noaa_an.txt,              $(DATA)/solarconstant_noaa_an.txt]
-  - [$(FIXam)/global_sfc_emissivity_idx.txt,                 $(DATA)/sfc_emissivity_idx.txt]
-  - [$(FIXam)/RTGSST.1982.2012.monthly.clim.grb,             $(DATA)/RTGSST.1982.2012.monthly.clim.grb]
-  - [$(FIXam)/IMS-NIC.blended.ice.monthly.clim.grb,          $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb]
+  # FIXgfs/am files
+  - [$(FIXgfs)/am/global_climaeropac_global.txt,                 $(DATA)/aerosol.dat]
+  - [$(FIXgfs)/am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77,    $(DATA)/global_o3prdlos.f77]
+  - [$(FIXgfs)/am/global_h2o_pltc.f77,                           $(DATA)/global_h2oprdlos.f77]
+  - [$(FIXgfs)/am/global_glacier.2x2.grb,                        $(DATA)/global_glacier.2x2.grb]
+  - [$(FIXgfs)/am/global_maxice.2x2.grb,                         $(DATA)/global_maxice.2x2.grb]
+  - [$(FIXgfs)/am/global_snoclim.1.875.grb,                      $(DATA)/global_snoclim.1.875.grb]
+  - [$(FIXgfs)/am/global_slmask.t1534.3072.1536.grb,             $(DATA)/global_slmask.t1534.3072.1536.grb]
+  - [$(FIXgfs)/am/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb]
+  - [$(FIXgfs)/am/global_solarconstant_noaa_an.txt,              $(DATA)/solarconstant_noaa_an.txt]
+  - [$(FIXgfs)/am/global_sfc_emissivity_idx.txt,                 $(DATA)/sfc_emissivity_idx.txt]
+  - [$(FIXgfs)/am/RTGSST.1982.2012.monthly.clim.grb,             $(DATA)/RTGSST.1982.2012.monthly.clim.grb]
+  - [$(FIXgfs)/am/IMS-NIC.blended.ice.monthly.clim.grb,          $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb]
 
   # MERRA2 Aerosol Climatology
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc]
-  - [$(FIXaer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc]
+  - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc]
 
   # Optical depth
-  - [$(FIXlut)/optics_BC.v1_3.dat,  $(DATA)/optics_BC.dat]
-  - [$(FIXlut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat]
-  - [$(FIXlut)/optics_OC.v1_3.dat,  $(DATA)/optics_OC.dat]
-  - [$(FIXlut)/optics_SS.v3_3.dat,  $(DATA)/optics_SS.dat]
-  - [$(FIXlut)/optics_SU.v1_3.dat,  $(DATA)/optics_SU.dat]
+  - [$(FIXgfs)/lut/optics_BC.v1_3.dat,  $(DATA)/optics_BC.dat]
+  - [$(FIXgfs)/lut/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat]
+  - [$(FIXgfs)/lut/optics_OC.v1_3.dat,  $(DATA)/optics_OC.dat]
+  - [$(FIXgfs)/lut/optics_SS.v3_3.dat,  $(DATA)/optics_SS.dat]
+  - [$(FIXgfs)/lut/optics_SU.v1_3.dat,  $(DATA)/optics_SU.dat]
 
   # fd_ufs.yaml file
   - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml, $(DATA)/]
diff --git a/parm/ufs/fv3/data_table b/parm/ufs/fv3/data_table
deleted file mode 100644
index 4ca9128415..0000000000
--- a/parm/ufs/fv3/data_table
+++ /dev/null
@@ -1 +0,0 @@
-"OCN", "runoff", "runoff", "./INPUT/@[FRUNOFF]", "none" ,  1.0
diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table
index b972b3470c..dad8b6fac6 100644
--- a/parm/ufs/fv3/diag_table
+++ b/parm/ufs/fv3/diag_table
@@ -1,80 +1,60 @@
 "fv3_history",    0,  "hours",  1,  "hours",  "time"
 "fv3_history2d",  0,  "hours",  1,  "hours",  "time"
-"ocn%4yr%2mo%2dy%2hr",    6,  "hours",  1,  "hours",  "time",  6,  "hours",  "1901 1 1 0 0 0"
-"ocn_daily%4yr%2mo%2dy",  1,  "days",   1,  "days",   "time",  1,  "days",   "1901 1 1 0 0 0"
+"@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", @[FHOUT_OCN],  "hours",  1,  "hours",  "time",  @[FHOUT_OCN],  "hours",  "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR] 0 0"
 
 ##############
 # Ocean fields
 ##############
 # static fields
-"ocean_model", "geolon",      "geolon",      "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolat",      "geolat",      "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolon_c",    "geolon_c",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolat_c",    "geolat_c",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolon_u",    "geolon_u",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolat_u",    "geolat_u",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolon_v",    "geolon_v",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "geolat_v",    "geolat_v",    "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-# "ocean_model", "depth_ocean", "depth_ocean", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-# "ocean_model", "wet",         "wet",         "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "wet_c",       "wet_c",       "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "wet_u",       "wet_u",       "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "wet_v",       "wet_v",       "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "sin_rot",      "sin_rot",      "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
-"ocean_model", "cos_rot",      "cos_rot",      "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon",      "geolon",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat",      "geolat",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_c",    "geolon_c",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_c",    "geolat_c",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_u",    "geolon_u",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_u",    "geolat_u",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_v",    "geolon_v",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_v",    "geolat_v",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "depth_ocean", "depth_ocean", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "wet",         "wet",         "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_c",       "wet_c",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_u",       "wet_u",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_v",       "wet_v",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "sin_rot",     "sin_rot",     "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "cos_rot",     "cos_rot",     "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
 
 # ocean output TSUV and others
-"ocean_model", "SSH",        "SSH",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "SST",        "SST",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "SSS",        "SSS",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "speed",      "speed",     "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "SSU",        "SSU",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "SSV",        "SSV",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "frazil",     "frazil",    "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "ePBL_h_ML",  "ePBL",      "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "MLD_003",    "MLD_003",   "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "MLD_0125",   "MLD_0125",  "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model", "tob",        "tob",       "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "SSH",        "SSH",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "SST",        "SST",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "SSS",        "SSS",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "speed",      "speed",     "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "SSU",        "SSU",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "SSV",        "SSV",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "frazil",     "frazil",    "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "ePBL_h_ML",  "ePBL",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "MLD_003",    "MLD_003",   "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "MLD_0125",   "MLD_0125",  "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model", "tob",        "tob",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
 
 # Z-Space Fields Provided for CMIP6 (CMOR Names):
-"ocean_model_z", "uo",    "uo",    "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model_z", "vo",    "vo",    "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model_z", "so",    "so",    "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
-"ocean_model_z", "temp",  "temp",  "ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model_z", "uo",       "uo",        "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model_z", "vo",       "vo",        "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model_z", "so",       "so",        "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
+"ocean_model_z", "temp",     "temp",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr",  "all",  .true.,  "none",  2
 
 # forcing
-"ocean_model", "taux",      "taux",          "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "tauy",      "tauy",          "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "latent",    "latent",        "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "sensible",  "sensible",      "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "SW",        "SW",            "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "LW",        "LW",            "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "evap",      "evap",          "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "lprec",     "lprec",         "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "lrunoff",   "lrunoff",       "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-# "ocean_model", "frunoff",   "frunoff",       "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "fprec",     "fprec",         "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "LwLatSens", "LwLatSens",     "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-"ocean_model", "Heat_PmE",  "Heat_PmE",      "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
-
-# Daily fields
-"ocean_model", "geolon",     "geolon",     "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolat",     "geolat",     "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolon_c",   "geolon_c",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolat_c",   "geolat_c",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolon_u",   "geolon_u",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolat_u",   "geolat_u",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolon_v",   "geolon_v",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "geolat_v",   "geolat_v",   "ocn_daily%4yr%2mo%2dy",  "all",  .false.,  "none",  2
-"ocean_model", "SST",        "sst",        "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "latent",     "latent",     "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "sensible",   "sensible",   "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "SW",         "SW",         "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "LW",         "LW",         "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "evap",       "evap",       "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "lprec",      "lprec",      "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "taux",       "taux",       "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
-"ocean_model", "tauy",       "tauy",       "ocn_daily%4yr%2mo%2dy",  "all",  .true.,   "none",  2
+"ocean_model", "taux",      "taux",          "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "tauy",      "tauy",          "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "latent",    "latent",        "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "sensible",  "sensible",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "SW",        "SW",            "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "LW",        "LW",            "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "evap",      "evap",          "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "lprec",     "lprec",         "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "lrunoff",   "lrunoff",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+#"ocean_model", "frunoff",   "frunoff",       "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "fprec",     "fprec",         "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "LwLatSens", "LwLatSens",     "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "Heat_PmE",  "Heat_PmE",      "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
 
 ###################
 # Atmosphere fields
diff --git a/parm/ufs/fv3/diag_table_aod b/parm/ufs/fv3/diag_table_aod
index 0de51b66d8..fd8aee1791 100644
--- a/parm/ufs/fv3/diag_table_aod
+++ b/parm/ufs/fv3/diag_table_aod
@@ -3,4 +3,4 @@
 "gfs_phys",  "SU_AOD_550",      "su_aod550",     "fv3_history2d",  "all",  .false.,  "none",  2
 "gfs_phys",  "BC_AOD_550",      "bc_aod550",     "fv3_history2d",  "all",  .false.,  "none",  2
 "gfs_phys",  "OC_AOD_550",      "oc_aod550",     "fv3_history2d",  "all",  .false.,  "none",  2
-"gfs_phys",  "SS_AOD_550",      "ss_aod550",     "fv3_history2d",  "all",  .false.,  "none",  2
\ No newline at end of file
+"gfs_phys",  "SS_AOD_550",      "ss_aod550",     "fv3_history2d",  "all",  .false.,  "none",  2
diff --git a/parm/ufs/fv3/diag_table_da b/parm/ufs/fv3/diag_table_da
index 40824caee9..5e7149663a 100644
--- a/parm/ufs/fv3/diag_table_da
+++ b/parm/ufs/fv3/diag_table_da
@@ -1,16 +1,16 @@
 "fv3_history",    0,  "hours",  1,  "hours",  "time"
 "fv3_history2d",  0,  "hours",  1,  "hours",  "time"
-"ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1,  "hours"
+"@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1,  "hours"
 
-"ocean_model",    "geolon",    "geolon",   "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model",    "geolat",    "geolat",   "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model",    "SSH",       "ave_ssh",  "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model",    "MLD_0125",  "MLD",      "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model_z",  "u",         "u",        "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model_z",  "v",         "v",        "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model_z",  "h",         "h",        "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model_z",  "salt",      "Salt",     "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
-"ocean_model_z",  "temp",      "Temp",     "ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model",    "geolon",    "geolon",   "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model",    "geolat",    "geolat",   "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model",    "SSH",       "ave_ssh",  "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model",    "MLD_0125",  "MLD",      "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model_z",  "u",         "u",        "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model_z",  "v",         "v",        "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model_z",  "h",         "h",        "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model_z",  "salt",      "Salt",     "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
+"ocean_model_z",  "temp",      "Temp",     "@[MOM6_OUTPUT_DIR]/ocn_da%4yr%2mo%2dy%2hr",  "all",  .false.,  "none",  2
 
 "gfs_dyn",     "ucomp",       "ugrd",         "fv3_history",    "all",  .false.,  "none",  2
 "gfs_dyn",     "vcomp",       "vgrd",         "fv3_history",    "all",  .false.,  "none",  2
diff --git a/parm/ufs/fv3/field_table_thompson_aero_tke_progsigma b/parm/ufs/fv3/field_table_thompson_aero_tke_progsigma
index fcb719acd6..b27b504b50 100644
--- a/parm/ufs/fv3/field_table_thompson_aero_tke_progsigma
+++ b/parm/ufs/fv3/field_table_thompson_aero_tke_progsigma
@@ -49,6 +49,11 @@
            "longname",     "ozone mixing ratio"
            "units",        "kg/kg"
        "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic sigmab tracer
+ "TRACER", "atmos_mod", "sigmab"
+           "longname",     "sigma fraction"
+           "units",        "fraction"
+       "profile_type", "fixed", "surface_value=0.0" /
 # water- and ice-friendly aerosols (Thompson)
  "TRACER", "atmos_mod", "liq_aero"
            "longname",     "water-friendly aerosol number concentration"
@@ -63,8 +68,3 @@
            "longname",     "subgrid scale turbulent kinetic energy"
            "units",        "m2/s2"
        "profile_type", "fixed", "surface_value=0.0" /
-# prognotsitc sigmab tracer
- "TRACER", "atmos_mod", "sigmab"
-           "longname",     "sigma fraction"
-           "units",        "fraction"
-       "profile_type", "fixed", "surface_value=0.0" /
diff --git a/parm/ufs/gocart/ExtData.other b/parm/ufs/gocart/ExtData.other
index 789576305e..7a0d63d6ca 100644
--- a/parm/ufs/gocart/ExtData.other
+++ b/parm/ufs/gocart/ExtData.other
@@ -17,12 +17,12 @@ DU_UTHRES         '1'  Y E           -           none none uthres      ExtData/n
 
 #====== Sulfate Sources =================================================
 # Anthropogenic (BF & FF) emissions -- allowed to input as two layers
-SU_ANTHROL1 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2      ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-SU_ANTHROL2 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+SU_ANTHROL1 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2      ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+SU_ANTHROL2 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # Ship emissions
-SU_SHIPSO2 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2_ship  ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-SU_SHIPSO4 NA  N Y %y4-%m2-%d2t12:00:00 none none SO4_ship  ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+SU_SHIPSO2 NA  N Y %y4-%m2-%d2t12:00:00 none none SO2_ship  ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+SU_SHIPSO4 NA  N Y %y4-%m2-%d2t12:00:00 none none SO4_ship  ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # Aircraft fuel consumption
 SU_AIRCRAFT NA  Y Y %y4-%m2-%d2t12:00:00 none none none /dev/null
@@ -54,20 +54,20 @@ pSO2_OCS NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null
 # ---------------
 
 # # VOCs - OFFLINE MEGAN BIOG
-OC_ISOPRENE NA  N Y %y4-%m2-%d2t12:00:00 none none isoprene ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc
-OC_LIMO     NA  N Y %y4-%m2-%d2t12:00:00 none none limo     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc
-OC_MTPA     NA  N Y %y4-%m2-%d2t12:00:00 none none mtpa     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc
-OC_MTPO     NA  N Y %y4-%m2-%d2t12:00:00 none none mtpo     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc
+OC_ISOPRENE NA  Y Y %y4-%m2-%d2t12:00:00 none none isoprene ExtData/nexus/MEGAN_OFFLINE_BVOC/v2021-12/MEGAN_OFFLINE_CLIMO_2000_2022_%m2.nc
+OC_LIMO     NA  Y Y %y4-%m2-%d2t12:00:00 none none limo     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2021-12/MEGAN_OFFLINE_CLIMO_2000_2022_%m2.nc
+OC_MTPA     NA  Y Y %y4-%m2-%d2t12:00:00 none none mtpa     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2021-12/MEGAN_OFFLINE_CLIMO_2000_2022_%m2.nc
+OC_MTPO     NA  Y Y %y4-%m2-%d2t12:00:00 none none mtpo     ExtData/nexus/MEGAN_OFFLINE_BVOC/v2021-12/MEGAN_OFFLINE_CLIMO_2000_2022_%m2.nc
 
 # Biofuel Source -- Included in AeroCom anthropogenic emissions
 OC_BIOFUEL NA  Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null
 
 # Anthropogenic (BF & FF) emissions -- allowed to input as two layers
-OC_ANTEOC1 NA  N Y %y4-%m2-%d2t12:00:00 none none OC       ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-OC_ANTEOC2 NA  N Y %y4-%m2-%d2t12:00:00 none none OC_elev  ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+OC_ANTEOC1 NA  N Y %y4-%m2-%d2t12:00:00 none none OC       ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+OC_ANTEOC2 NA  N Y %y4-%m2-%d2t12:00:00 none none OC_elev  ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # EDGAR based ship emissions
-OC_SHIP    NA  N Y %y4-%m2-%d2t12:00:00 none none OC_ship  ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+OC_SHIP    NA  N Y %y4-%m2-%d2t12:00:00 none none OC_ship  ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # Aircraft fuel consumption
 OC_AIRCRAFT NA  N Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null
@@ -88,11 +88,11 @@ pSOA_ANTHRO_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null
 BC_BIOFUEL NA  Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null
 
 # Anthropogenic (BF & FF) emissions -- allowed to input as two layers
-BC_ANTEBC1 NA  N Y %y4-%m2-%d2t12:00:00 none none BC       ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-BC_ANTEBC2 NA  N Y %y4-%m2-%d2t12:00:00 none none BC_elev  ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+BC_ANTEBC1 NA  N Y %y4-%m2-%d2t12:00:00 none none BC       ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+BC_ANTEBC2 NA  N Y %y4-%m2-%d2t12:00:00 none none BC_elev  ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # EDGAR based ship emissions
-BC_SHIP NA  N Y %y4-%m2-%d2t12:00:00 none none BC_ship     ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+BC_SHIP NA  N Y %y4-%m2-%d2t12:00:00 none none BC_ship     ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 
 # Aircraft fuel consumption
 BC_AIRCRAFT NA  N Y %y4-%m2-%d2t12:00:00 none none bc_aviation /dev/null
@@ -133,11 +133,11 @@ BRC_AVIATION_CRS NA  Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null
 pSOA_BIOB_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null
 
 # # ======= Nitrate Sources ========
-# EMI_NH3_AG   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_ag      ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+# EMI_NH3_AG   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_ag      ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 # EMI_NH3_EN   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_en      /dev/null
-# EMI_NH3_IN   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_in      ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-# EMI_NH3_RE   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_re      ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
-# EMI_NH3_TR   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_tr      ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc
+# EMI_NH3_IN   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_in      ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+# EMI_NH3_RE   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_re      ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
+# EMI_NH3_TR   'kg m-2 s-1'  N Y      %y4-%m2-%d2T12:00:00    none     none     NH3_tr      ExtData/nexus/CEDS/v2019/monthly/%y4/CEDS_2019_monthly.%y4%m2.nc
 # EMI_NH3_OC   'kg m-2 s-1'  Y Y      %y4-%m2-%d2T12:00:00    none     none     emiss_ocn   ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4
 
 # # --------------------------------------------------------------
diff --git a/parm/ufs/gocart/SU2G_instance_SU.rc b/parm/ufs/gocart/SU2G_instance_SU.rc
index e365827760..79484b3068 100644
--- a/parm/ufs/gocart/SU2G_instance_SU.rc
+++ b/parm/ufs/gocart/SU2G_instance_SU.rc
@@ -7,8 +7,8 @@ aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_SU.v1_3.nc
 
 nbins: 4
 
-# Volcanic pointwise sources
-volcano_srcfilen: ExtData/nexus/VOLCANO/v2021-09/%y4/%m2/so2_volcanic_emissions_Carns.%y4%m2%d2.rc
+# Volcanic pointwise sources | degassing only | replace with path to historical volcanic emissions for scientific runs
+volcano_srcfilen: ExtData/nexus/VOLCANO/v2021-09/so2_volcanic_emissions_CARN_v202005.degassing_only.rc
 
 # Heights [m] of LTO, CDS and CRS aviation emissions layers
 aviation_vertical_layers: 0.0 100.0 9.0e3 10.0e3
diff --git a/parm/ufs/mom6/MOM_input_template_025 b/parm/ufs/mom6/MOM_input_template_025
deleted file mode 100644
index df56a3f486..0000000000
--- a/parm/ufs/mom6/MOM_input_template_025
+++ /dev/null
@@ -1,902 +0,0 @@
-! This input file provides the adjustable run-time parameters for version 6 of the Modular Ocean Model (MOM6).
-! Where appropriate, parameters use usually given in MKS units.
-
-! This particular file is for the example in ice_ocean_SIS2/OM4_025.
-
-! This MOM_input file typically contains only the non-default values that are needed to reproduce this example.
-! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file
-! which is generated by the model at run-time.
-! === module MOM_domains ===
-TRIPOLAR_N = True               !   [Boolean] default = False
-                                ! Use tripolar connectivity at the northern edge of the domain.  With
-                                ! TRIPOLAR_N, NIGLOBAL must be even.
-NIGLOBAL = @[NX_GLB]            !
-                                ! The total number of thickness grid points in the x-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NJGLOBAL = @[NY_GLB]            !
-                                ! The total number of thickness grid points in the y-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NIHALO = 4                      ! default = 4
-                                ! The number of halo points on each side in the x-direction.  With
-                                ! STATIC_MEMORY_ this is set as NIHALO_ in MOM_memory.h at compile time; without
-                                ! STATIC_MEMORY_ the default is NIHALO_ in MOM_memory.h (if defined) or 2.
-NJHALO = 4                      ! default = 4
-                                ! The number of halo points on each side in the y-direction.  With
-                                ! STATIC_MEMORY_ this is set as NJHALO_ in MOM_memory.h at compile time; without
-                                ! STATIC_MEMORY_ the default is NJHALO_ in MOM_memory.h (if defined) or 2.
-! LAYOUT = 32, 18               !
-                                ! The processor layout that was actually used.
-! IO_LAYOUT = 1, 1              ! default = 1
-                                ! The processor layout to be used, or 0,0 to automatically set the io_layout to
-                                ! be the same as the layout.
-
-! === module MOM ===
-USE_REGRIDDING = True           !   [Boolean] default = False
-                                ! If True, use the ALE algorithm (regridding/remapping). If False, use the
-                                ! layered isopycnal algorithm.
-THICKNESSDIFFUSE = True         !   [Boolean] default = False
-                                ! If true, interface heights are diffused with a coefficient of KHTH.
-THICKNESSDIFFUSE_FIRST = True   !   [Boolean] default = False
-                                ! If true, do thickness diffusion before dynamics. This is only used if
-                                ! THICKNESSDIFFUSE is true.
-DT = @[DT_DYNAM_MOM6]           !   [s]
-                                ! The (baroclinic) dynamics time step.  The time-step that is actually used will
-                                ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode
-                                ! or the coupling timestep in coupled mode.)
-DT_THERM = @[DT_THERM_MOM6]     !   [s] default = 1800.0
-                                ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be
-                                ! an integer multiple of DT and less than the forcing or coupling time-step,
-                                ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer
-                                ! multiple of the coupling timestep.  By default DT_THERM is set to DT.
-THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN]    !   [Boolean] default = False
-                                ! If true, the MOM will take thermodynamic and tracer timesteps that can be
-                                ! longer than the coupling timestep. The actual thermodynamic timestep that is
-                                ! used in this case is the largest integer multiple of the coupling timestep
-                                ! that is less than or equal to DT_THERM.
-HFREEZE = 20.0                  !   [m] default = -1.0
-                                ! If HFREEZE > 0, melt potential will be computed. The actual depth
-                                ! over which melt potential is computed will be min(HFREEZE, OBLD)
-                                ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default)
-                                ! melt potential will not be computed.
-USE_PSURF_IN_EOS = False        !   [Boolean] default = False
-                                ! If true, always include the surface pressure contributions in equation of
-                                ! state calculations.
-FRAZIL = True                   !   [Boolean] default = False
-                                ! If true, water freezes if it gets too cold, and the accumulated heat deficit
-                                ! is returned in the surface state.  FRAZIL is only used if
-                                ! ENABLE_THERMODYNAMICS is true.
-DO_GEOTHERMAL = True            !   [Boolean] default = False
-                                ! If true, apply geothermal heating.
-BOUND_SALINITY = True           !   [Boolean] default = False
-                                ! If true, limit salinity to being positive. (The sea-ice model may ask for more
-                                ! salt than is available and drive the salinity negative otherwise.)
-MIN_SALINITY = 0.01             !   [PPT] default = 0.01
-                                ! The minimum value of salinity when BOUND_SALINITY=True. The default is 0.01
-                                ! for backward compatibility but ideally should be 0.
-C_P = 3992.0                    !   [J kg-1 K-1] default = 3991.86795711963
-                                ! The heat capacity of sea water, approximated as a constant. This is only used
-                                ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10
-                                ! definition of conservative temperature.
-CHECK_BAD_SURFACE_VALS = True   !   [Boolean] default = False
-                                ! If true, check the surface state for ridiculous values.
-BAD_VAL_SSH_MAX = 50.0          !   [m] default = 20.0
-                                ! The value of SSH above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SSS_MAX = 75.0          !   [PPT] default = 45.0
-                                ! The value of SSS above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MAX = 55.0          !   [deg C] default = 45.0
-                                ! The value of SST above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MIN = -3.0          !   [deg C] default = -2.1
-                                ! The value of SST below which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-DEFAULT_2018_ANSWERS = True     !   [Boolean] default = True
-                                ! This sets the default value for the various _2018_ANSWERS parameters.
-WRITE_GEOM = 2                  ! default = 1
-                                ! If =0, never write the geometry and vertical grid files. If =1, write the
-                                ! geometry and vertical grid files only for a new simulation. If =2, always
-                                ! write the geometry and vertical grid files. Other values are invalid.
-SAVE_INITIAL_CONDS = False      !   [Boolean] default = False
-                                ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE.
-
-! === module MOM_hor_index ===
-! Sets the horizontal array index types.
-
-! === module MOM_fixed_initialization ===
-INPUTDIR = "INPUT"              ! default = "."
-                                ! The directory in which input files are found.
-
-! === module MOM_grid_init ===
-GRID_CONFIG = "mosaic"          !
-                                ! A character string that determines the method for defining the horizontal
-                                ! grid.  Current options are:
-                                !     mosaic - read the grid from a mosaic (supergrid)
-                                !              file set by GRID_FILE.
-                                !     cartesian - use a (flat) Cartesian grid.
-                                !     spherical - use a simple spherical grid.
-                                !     mercator - use a Mercator spherical grid.
-GRID_FILE = "ocean_hgrid.nc"    !
-                                ! Name of the file from which to read horizontal grid data.
-GRID_ROTATION_ANGLE_BUGS = False  ! [Boolean] default = True
-                                ! If true, use an older algorithm to calculate the sine and
-                                ! cosines needed rotate between grid-oriented directions and
-                                ! true north and east.  Differences arise at the tripolar fold
-USE_TRIPOLAR_GEOLONB_BUG = False !   [Boolean] default = True
-                                ! If true, use older code that incorrectly sets the longitude in some points
-                                ! along the tripolar fold to be off by 360 degrees.
-TOPO_CONFIG = "file"            !
-                                ! This specifies how bathymetry is specified:
-                                !     file - read bathymetric information from the file
-                                !       specified by (TOPO_FILE).
-                                !     flat - flat bottom set to MAXIMUM_DEPTH.
-                                !     bowl - an analytically specified bowl-shaped basin
-                                !       ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH.
-                                !     spoon - a similar shape to 'bowl', but with an vertical
-                                !       wall at the southern face.
-                                !     halfpipe - a zonally uniform channel with a half-sine
-                                !       profile in the meridional direction.
-                                !     benchmark - use the benchmark test case topography.
-                                !     Neverland - use the Neverland test case topography.
-                                !     DOME - use a slope and channel configuration for the
-                                !       DOME sill-overflow test case.
-                                !     ISOMIP - use a slope and channel configuration for the
-                                !       ISOMIP test case.
-                                !     DOME2D - use a shelf and slope configuration for the
-                                !       DOME2D gravity current/overflow test case.
-                                !     Kelvin - flat but with rotated land mask.
-                                !     seamount - Gaussian bump for spontaneous motion test case.
-                                !     dumbbell - Sloshing channel with reservoirs on both ends.
-                                !     shelfwave - exponential slope for shelfwave test case.
-                                !     Phillips - ACC-like idealized topography used in the Phillips config.
-                                !     dense - Denmark Strait-like dense water formation and overflow.
-                                !     USER - call a user modified routine.
-TOPO_FILE = "ocean_topog.nc"    ! default = "topog.nc"
-                                ! The file from which the bathymetry is read.
-TOPO_EDITS_FILE = "All_edits.nc" ! default = ""
-                                ! The file from which to read a list of i,j,z topography overrides.
-ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES]   ! default = "False"
-                                ! If true, allow topography overrides to change ocean points to land
-MAXIMUM_DEPTH = 6500.0          !   [m]
-                                ! The maximum depth of the ocean.
-MINIMUM_DEPTH = 9.5             !   [m] default = 0.0
-                                ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is
-                                ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is
-                                ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than
-                                ! MASKING_DEPTH are rounded to MINIMUM_DEPTH.
-
-! === module MOM_open_boundary ===
-! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply,
-! if any.
-MASKING_DEPTH = 0.0             !   [m] default = -9999.0
-                                ! The depth below which to mask points as land points, for which all fluxes are
-                                ! zeroed out. MASKING_DEPTH is ignored if negative.
-CHANNEL_CONFIG = "list"         ! default = "none"
-                                ! A parameter that determines which set of channels are
-                                ! restricted to specific  widths.  Options are:
-                                !     none - All channels have the grid width.
-                                !     global_1deg - Sets 16 specific channels appropriate
-                                !       for a 1-degree model, as used in CM2G.
-                                !     list - Read the channel locations and widths from a
-                                !       text file, like MOM_channel_list in the MOM_SIS
-                                !       test case.
-                                !     file - Read open face widths everywhere from a
-                                !       NetCDF file on the model grid.
-CHANNEL_LIST_FILE = "MOM_channels_global_025" ! default = "MOM_channel_list"
-                                ! The file from which the list of narrowed channels is read.
-
-! === module MOM_verticalGrid ===
-! Parameters providing information about the vertical grid.
-NK = 75                         !   [nondim]
-                                ! The number of model layers.
-
-! === module MOM_tracer_registry ===
-
-! === module MOM_EOS ===
-DTFREEZE_DP = -7.75E-08         !   [deg C Pa-1] default = 0.0
-                                ! When TFREEZE_FORM=LINEAR, this is the derivative of the freezing potential
-                                ! temperature with pressure.
-
-! === module MOM_restart ===
-PARALLEL_RESTARTFILES = True    !   [Boolean] default = False
-                                ! If true, each processor writes its own restart file, otherwise a single
-                                ! restart file is generated
-
-! === module MOM_tracer_flow_control ===
-USE_IDEAL_AGE_TRACER = False    !   [Boolean] default = False
-                                ! If true, use the ideal_age_example tracer package.
-
-! === module ideal_age_example ===
-
-! === module MOM_coord_initialization ===
-COORD_CONFIG = "file"           !
-                                ! This specifies how layers are to be defined:
-                                !     ALE or none - used to avoid defining layers in ALE mode
-                                !     file - read coordinate information from the file
-                                !       specified by (COORD_FILE).
-                                !     BFB - Custom coords for buoyancy-forced basin case
-                                !       based on SST_S, T_BOT and DRHO_DT.
-                                !     linear - linear based on interfaces not layers
-                                !     layer_ref - linear based on layer densities
-                                !     ts_ref - use reference temperature and salinity
-                                !     ts_range - use range of temperature and salinity
-                                !       (T_REF and S_REF) to determine surface density
-                                !       and GINT calculate internal densities.
-                                !     gprime - use reference density (RHO_0) for surface
-                                !       density and GINT calculate internal densities.
-                                !     ts_profile - use temperature and salinity profiles
-                                !       (read from COORD_FILE) to set layer densities.
-                                !     USER - call a user modified routine.
-COORD_FILE = "layer_coord.nc"   !
-                                ! The file from which the coordinate densities are read.
-REMAP_UV_USING_OLD_ALG = True   !   [Boolean] default = True
-                                ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If
-                                ! false, uses the new method that remaps between grids described by an old and
-                                ! new thickness.
-REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER"
-                                ! Coordinate mode for vertical regridding. Choose among the following
-                                ! possibilities:  LAYER - Isopycnal or stacked shallow water layers
-                                !  ZSTAR, Z* - stretched geopotential z*
-                                !  SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf
-                                !  SIGMA - terrain following coordinates
-                                !  RHO   - continuous isopycnal
-                                !  HYCOM1 - HyCOM-like hybrid coordinate
-                                !  SLIGHT - stretched coordinates above continuous isopycnal
-                                !  ADAPTIVE - optimize for smooth neutral density surfaces
-BOUNDARY_EXTRAPOLATION = True   !   [Boolean] default = False
-                                ! When defined, a proper high-order reconstruction scheme is used within
-                                ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM
-                                ! reconstruction will also be used within boundary cells.
-ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter ALE_RESOLUTION
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 !   [m]
-                                ! The distribution of vertical resolution for the target
-                                ! grid used for Eulerian-like coordinates. For example,
-                                ! in z-coordinate mode, the parameter is a list of level
-                                ! thicknesses (in m). In sigma-coordinate mode, the list
-                                ! is of non-dimensional fractions of the water column.
-!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 !   [m]
-                                ! HYBRID target densities for interfaces
-REGRID_COMPRESSIBILITY_FRACTION = 0.01 !   [nondim] default = 0.0
-                                ! When interpolating potential density profiles we can add some artificial
-                                ! compressibility solely to make homogeneous regions appear stratified.
-MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum interface depths.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum interface depths
-                                !  PARAM       - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0
-                                ! The list of maximum depths for each interface.
-MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum layer thicknesses.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum layer thicknesses
-                                !  PARAM       - use the vector-parameter MAX_LAYER_THICKNESS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 !   [m]
-                                ! The list of maximum thickness for each layer.
-REMAPPING_SCHEME = "PPM_H4"     ! default = "PLM"
-                                ! This sets the reconstruction scheme used for vertical remapping for all
-                                ! variables. It can be one of the following schemes: PCM         (1st-order
-                                ! accurate)
-                                ! PLM         (2nd-order accurate)
-                                ! PPM_H4      (3rd-order accurate)
-                                ! PPM_IH4     (3rd-order accurate)
-                                ! PQM_IH4IH3  (4th-order accurate)
-                                ! PQM_IH6IH5  (5th-order accurate)
-
-! === module MOM_grid ===
-! Parameters providing information about the lateral grid.
-
-! === module MOM_state_initialization ===
-INIT_LAYERS_FROM_Z_FILE = True  !   [Boolean] default = False
-                                ! If true, initialize the layer thicknesses, temperatures, and salinities from a
-                                ! Z-space file on a latitude-longitude grid.
-
-! === module MOM_initialize_layers_from_Z ===
-TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc"  ! default = "temp_salt_z.nc"
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures (T) and salinities (S). If T and S are not
-                                ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE
-                                ! must be set.
-Z_INIT_FILE_PTEMP_VAR = "temp"  ! default = "ptemp"
-                                ! The name of the potential temperature variable in
-                                ! TEMP_Z_INIT_FILE.
-Z_INIT_FILE_SALT_VAR = "salt"   ! default = "salt"
-                                ! The name of the salinity variable in
-                                ! SALT_Z_INIT_FILE.
-
-Z_INIT_ALE_REMAPPING = True     !   [Boolean] default = False
-                                ! If True, then remap straight to model coordinate from file.
-Z_INIT_REMAP_OLD_ALG = True     !   [Boolean] default = True
-                                ! If false, uses the preferred remapping algorithm for initialization. If true,
-                                ! use an older, less robust algorithm for remapping.
-
-! === module MOM_diag_mediator ===
-!Jiande NUM_DIAG_COORDS = 2             ! default = 1
-NUM_DIAG_COORDS = 1
-                                ! The number of diagnostic vertical coordinates to use.
-                                ! For each coordinate, an entry in DIAG_COORDS must be provided.
-!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" !
-DIAG_COORDS = "z Z ZSTAR"
-                                ! A list of string tuples associating diag_table modules to
-                                ! a coordinate definition used for diagnostics. Each string
-                                ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME".
-DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw"
-DIAG_MISVAL = @[MOM6_DIAG_MISVAL]
-!DIAG_COORD_DEF_RHO2 = "FILE:diag_rho2.nc,interfaces=rho2" ! default = "WOA09"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter DIAG_COORD_RES_RHO2
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-
-! === module MOM_MEKE ===
-USE_MEKE = True                 !   [Boolean] default = False
-                                ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy
-                                ! kinetic energy budget.
-MEKE_GMCOEFF = 1.0              !   [nondim] default = -1.0
-                                ! The efficiency of the conversion of potential energy into MEKE by the
-                                ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this
-                                ! conversion is not used or calculated.
-MEKE_BGSRC = 1.0E-13            !   [W kg-1] default = 0.0
-                                ! A background energy source for MEKE.
-MEKE_KHMEKE_FAC = 1.0           !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to Kh for MEKE itself.
-MEKE_ALPHA_RHINES = 0.15        !   [nondim] default = 0.05
-                                ! If positive, is a coefficient weighting the Rhines scale in the expression for
-                                ! mixing length used in MEKE-derived diffusivity.
-MEKE_ALPHA_EADY = 0.15          !   [nondim] default = 0.05
-                                ! If positive, is a coefficient weighting the Eady length scale in the
-                                ! expression for mixing length used in MEKE-derived diffusivity.
-
-! === module MOM_lateral_mixing_coeffs ===
-USE_VARIABLE_MIXING = True      !   [Boolean] default = False
-                                ! If true, the variable mixing code will be called.  This allows diagnostics to
-                                ! be created even if the scheme is not used.  If KHTR_SLOPE_CFF>0 or
-                                ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter
-                                ! file.
-RESOLN_SCALED_KH = True         !   [Boolean] default = False
-                                ! If true, the Laplacian lateral viscosity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-RESOLN_SCALED_KHTH = True       !   [Boolean] default = False
-                                ! If true, the interface depth diffusivity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-KHTR_SLOPE_CFF = 0.25           !   [nondim] default = 0.0
-                                ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer
-                                ! diffusivity
-USE_STORED_SLOPES = True        !   [Boolean] default = False
-                                ! If true, the isopycnal slopes are calculated once and stored for re-use. This
-                                ! uses more memory but avoids calling the equation of state more times than
-                                ! should be necessary.
-INTERPOLATE_RES_FN = False      !   [Boolean] default = True
-                                ! If true, interpolate the resolution function to the velocity points from the
-                                ! thickness points; otherwise interpolate the wave speed and calculate the
-                                ! resolution function independently at each point.
-GILL_EQUATORIAL_LD = True       !   [Boolean] default = False
-                                ! If true, uses Gill's definition of the baroclinic equatorial deformation
-                                ! radius, otherwise, if false, use Pedlosky's definition. These definitions
-                                ! differ by a factor of 2 in front of the beta term in the denominator. Gill's
-                                ! is the more appropriate definition.
-INTERNAL_WAVE_SPEED_BETTER_EST = False !   [Boolean] default = True
-                                ! If true, use a more robust estimate of the first mode wave speed as the
-                                ! starting point for iterations.
-
-! === module MOM_set_visc ===
-CHANNEL_DRAG = True             !   [Boolean] default = False
-                                ! If true, the bottom drag is exerted directly on each layer proportional to the
-                                ! fraction of the bottom it overlies.
-PRANDTL_TURB = 1.25             !   [nondim] default = 1.0
-                                ! The turbulent Prandtl number applied to shear instability.
-HBBL = 10.0                     !   [m]
-                                ! The thickness of a bottom boundary layer with a viscosity of KVBBL if
-                                ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom
-                                ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but
-                                ! LINEAR_DRAG is not.
-DRAG_BG_VEL = 0.1               !   [m s-1] default = 0.0
-                                ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an
-                                ! unresolved  velocity that is combined with the resolved velocity to estimate
-                                ! the velocity magnitude.  DRAG_BG_VEL is only used when BOTTOMDRAGLAW is
-                                ! defined.
-BBL_USE_EOS = True              !   [Boolean] default = False
-                                ! If true, use the equation of state in determining the properties of the bottom
-                                ! boundary layer.  Otherwise use the layer target potential densities.
-BBL_THICK_MIN = 0.1             !   [m] default = 0.0
-                                ! The minimum bottom boundary layer thickness that can be used with
-                                ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum
-                                ! near-bottom viscosity.
-KV = 1.0E-04                    !   [m2 s-1]
-                                ! The background kinematic viscosity in the interior. The molecular value, ~1e-6
-                                ! m2 s-1, may be used.
-KV_BBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the bottom boundary layer.
-KV_TBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the top boundary layer.
-
-! === module MOM_thickness_diffuse ===
-KHTH_MAX_CFL = 0.1              !   [nondimensional] default = 0.8
-                                ! The maximum value of the local diffusive CFL ratio that is permitted for the
-                                ! thickness diffusivity. 1.0 is the marginally unstable value in a pure layered
-                                ! model, but much smaller numbers (e.g. 0.1) seem to work better for ALE-based
-                                ! models.
-USE_GM_WORK_BUG = True          !   [Boolean] default = True
-                                ! If true, compute the top-layer work tendency on the u-grid with the incorrect
-                                ! sign, for legacy reproducibility.
-
-! === module MOM_continuity ===
-
-! === module MOM_continuity_PPM ===
-ETA_TOLERANCE = 1.0E-06         !   [m] default = 3.75E-09
-                                ! The tolerance for the differences between the barotropic and baroclinic
-                                ! estimates of the sea surface height due to the fluxes through each face.  The
-                                ! total tolerance for SSH is 4 times this value.  The default is
-                                ! 0.5*NK*ANGSTROM, and this should not be set less than about
-                                ! 10^-15*MAXIMUM_DEPTH.
-ETA_TOLERANCE_AUX = 0.001       !   [m] default = 1.0E-06
-                                ! The tolerance for free-surface height discrepancies between the barotropic
-                                ! solution and the sum of the layer thicknesses when calculating the auxiliary
-                                ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can
-                                ! be made larger for efficiency.
-
-! === module MOM_CoriolisAdv ===
-CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY"
-                                ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid
-                                ! values are:
-                                !    SADOURNY75_ENERGY - Sadourny, 1975; energy cons.
-                                !    ARAKAWA_HSU90     - Arakawa & Hsu, 1990
-                                !    SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons.
-                                !    ARAKAWA_LAMB81    - Arakawa & Lamb, 1981; En. + Enst.
-                                !    ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with
-                                !                         Arakawa & Hsu and Sadourny energy
-BOUND_CORIOLIS = True           !   [Boolean] default = False
-                                ! If true, the Coriolis terms at u-points are bounded by the four estimates of
-                                ! (f+rv)v from the four neighboring v-points, and similarly at v-points.  This
-                                ! option would have no effect on the SADOURNY Coriolis scheme if it were
-                                ! possible to use centered difference thickness fluxes.
-
-! === module MOM_PressureForce ===
-
-! === module MOM_PressureForce_AFV ===
-MASS_WEIGHT_IN_PRESSURE_GRADIENT = True !   [Boolean] default = False
-                                ! If true, use mass weighting when interpolating T/S for integrals near the
-                                ! bathymetry in AFV pressure gradient calculations.
-
-! === module MOM_hor_visc ===
-LAPLACIAN = True                !   [Boolean] default = False
-                                ! If true, use a Laplacian horizontal viscosity.
-AH_VEL_SCALE = 0.01             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the cube of the grid spacing to
-                                ! calculate the biharmonic viscosity. The final viscosity is the largest of this
-                                ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH.
-SMAGORINSKY_AH = True           !   [Boolean] default = False
-                                ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity.
-SMAG_BI_CONST = 0.06            !   [nondim] default = 0.0
-                                ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06.
-USE_LAND_MASK_FOR_HVISC = False !   [Boolean] default = False
-                                ! If true, use Use the land mask for the computation of thicknesses at velocity
-                                ! locations. This eliminates the dependence on arbitrary values over land or
-                                ! outside of the domain. Default is False in order to maintain answers with
-                                ! legacy experiments but should be changed to True for new experiments.
-
-! === module MOM_vert_friction ===
-HMIX_FIXED = 0.5                !   [m]
-                                ! The prescribed depth over which the near-surface viscosity and diffusivity are
-                                ! elevated when the bulk mixed layer is not used.
-KVML = 1.0E-04                  !   [m2 s-1] default = 1.0E-04
-                                ! The kinematic viscosity in the mixed layer.  A typical value is ~1e-2 m2 s-1.
-                                ! KVML is not used if BULKMIXEDLAYER is true.  The default is set by KV.
-MAXVEL = 6.0                    !   [m s-1] default = 3.0E+08
-                                ! The maximum velocity allowed before the velocity components are truncated.
-
-! === module MOM_PointAccel ===
-U_TRUNC_FILE = "U_velocity_truncations" ! default = ""
-                                ! The absolute path to a file into which the accelerations leading to zonal
-                                ! velocity truncations are written. Undefine this for efficiency if this
-                                ! diagnostic is not needed.
-V_TRUNC_FILE = "V_velocity_truncations" ! default = ""
-                                ! The absolute path to a file into which the accelerations leading to meridional
-                                ! velocity truncations are written. Undefine this for efficiency if this
-                                ! diagnostic is not needed.
-
-! === module MOM_barotropic ===
-BOUND_BT_CORRECTION = True      !   [Boolean] default = False
-                                ! If true, the corrective pseudo mass-fluxes into the barotropic solver are
-                                ! limited to values that require less than maxCFL_BT_cont to be accommodated.
-BT_PROJECT_VELOCITY = True      !   [Boolean] default = False
-                                ! If true, step the barotropic velocity first and project out the velocity
-                                ! tendency by 1+BEBT when calculating the transport.  The default (false) is to
-                                ! use a predictor continuity step to find the pressure field, and then to do a
-                                ! corrector continuity step using a weighted average of the old and new
-                                ! velocities, with weights of (1-BEBT) and BEBT.
-DYNAMIC_SURFACE_PRESSURE = True !   [Boolean] default = False
-                                ! If true, add a dynamic pressure due to a viscous ice shelf, for instance.
-BEBT = 0.2                      !   [nondim] default = 0.1
-                                ! BEBT determines whether the barotropic time stepping uses the forward-backward
-                                ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range
-                                ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1
-                                ! (for a backward Euler treatment). In practice, BEBT must be greater than about
-                                ! 0.05.
-DTBT = -0.9                     !   [s or nondim] default = -0.98
-                                ! The barotropic time step, in s. DTBT is only used with the split explicit time
-                                ! stepping. To set the time step automatically based the maximum stable value
-                                ! use 0, or a negative value gives the fraction of the stable value. Setting
-                                ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will
-                                ! actually be used is an integer fraction of DT, rounding down.
-BT_USE_OLD_CORIOLIS_BRACKET_BUG = True !   [Boolean] default = False
-                                ! If True, use an order of operations that is not bitwise rotationally symmetric
-                                ! in the meridional Coriolis term of the barotropic solver.
-
-! === module MOM_mixed_layer_restrat ===
-MIXEDLAYER_RESTRAT = True       !   [Boolean] default = False
-                                ! If true, a density-gradient dependent re-stratifying flow is imposed in the
-                                ! mixed layer. Can be used in ALE mode without restriction but in layer mode can
-                                ! only be used if BULKMIXEDLAYER is true.
-FOX_KEMPER_ML_RESTRAT_COEF = 1.0 !   [nondim] default = 0.0
-                                ! A nondimensional coefficient that is proportional to the ratio of the
-                                ! deformation radius to the dominant lengthscale of the submesoscale mixed layer
-                                ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic
-                                ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of
-                                ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al.
-                                ! (2010)
-MLE_FRONT_LENGTH = 500.0        !   [m] default = 0.0
-                                ! If non-zero, is the frontal-length scale used to calculate the upscaling of
-                                ! buoyancy gradients that is otherwise represented by the parameter
-                                ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended
-                                ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0.
-MLE_USE_PBL_MLD = True          !   [Boolean] default = False
-                                ! If true, the MLE parameterization will use the mixed-layer depth provided by
-                                ! the active PBL parameterization. If false, MLE will estimate a MLD based on a
-                                ! density difference with the surface using the parameter MLE_DENSITY_DIFF.
-MLE_MLD_DECAY_TIME = 2.592E+06  !   [s] default = 0.0
-                                ! The time-scale for a running-mean filter applied to the mixed-layer depth used
-                                ! in the MLE restratification parameterization. When the MLD deepens below the
-                                ! current running-mean the running-mean is instantaneously set to the current
-                                ! MLD.
-
-! === module MOM_diabatic_driver ===
-! The following parameters are used for diabatic processes.
-ENERGETICS_SFC_PBL = True       !   [Boolean] default = False
-                                ! If true, use an implied energetics planetary boundary layer scheme to
-                                ! determine the diffusivity and viscosity in the surface boundary layer.
-EPBL_IS_ADDITIVE = False        !   [Boolean] default = True
-                                ! If true, the diffusivity from ePBL is added to all other diffusivities.
-                                ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used.
-
-! === module MOM_CVMix_KPP ===
-! This is the MOM wrapper to CVMix:KPP
-! See http://cvmix.github.io/
-
-! === module MOM_tidal_mixing ===
-! Vertical Tidal Mixing Parameterization
-INT_TIDE_DISSIPATION = True     !   [Boolean] default = False
-                                ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing,
-                                ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004).
-INT_TIDE_PROFILE = "POLZIN_09"  ! default = "STLAURENT_02"
-                                ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with
-                                ! INT_TIDE_DISSIPATION. Valid values are:
-                                !    STLAURENT_02 - Use the St. Laurent et al exponential
-                                !                   decay profile.
-                                !    POLZIN_09 - Use the Polzin WKB-stretched algebraic
-                                !                   decay profile.
-INT_TIDE_DECAY_SCALE = 300.3003003003003 !   [m] default = 500.0
-                                ! The decay scale away from the bottom for tidal TKE with the new coding when
-                                ! INT_TIDE_DISSIPATION is used.
-KAPPA_ITIDES = 6.28319E-04      !   [m-1] default = 6.283185307179586E-04
-                                ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10
-                                ! km, as in St.Laurent et al. 2002.
-KAPPA_H2_FACTOR = 0.84          !   [nondim] default = 1.0
-                                ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION.
-TKE_ITIDE_MAX = 0.1             !   [W m-2] default = 1000.0
-                                ! The maximum internal tide energy source available to mix above the bottom
-                                ! boundary layer with INT_TIDE_DISSIPATION.
-READ_TIDEAMP = True             !   [Boolean] default = False
-                                ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude
-                                ! with INT_TIDE_DISSIPATION.
-TIDEAMP_FILE = "tidal_amplitude.v20140616.nc" ! default = "tideamp.nc"
-                                ! The path to the file containing the spatially varying tidal amplitudes with
-                                ! INT_TIDE_DISSIPATION.
-H2_FILE = "ocean_topog.nc"      !
-                                ! The path to the file containing the sub-grid-scale topographic roughness
-                                ! amplitude with INT_TIDE_DISSIPATION.
-
-! === module MOM_CVMix_conv ===
-! Parameterization of enhanced mixing due to convection via CVMix
-
-! === module MOM_geothermal ===
-GEOTHERMAL_SCALE = 1.0          !   [W m-2 or various] default = 0.0
-                                ! The constant geothermal heat flux, a rescaling factor for the heat flux read
-                                ! from GEOTHERMAL_FILE, or 0 to disable the geothermal heating.
-GEOTHERMAL_FILE = "geothermal_davies2013_v1.nc" ! default = ""
-                                ! The file from which the geothermal heating is to be read, or blank to use a
-                                ! constant heating rate.
-GEOTHERMAL_VARNAME = "geothermal_hf" ! default = "geo_heat"
-                                ! The name of the geothermal heating variable in GEOTHERMAL_FILE.
-
-! === module MOM_set_diffusivity ===
-BBL_MIXING_AS_MAX = False       !   [Boolean] default = True
-                                ! If true, take the maximum of the diffusivity from the BBL mixing and the other
-                                ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added.
-USE_LOTW_BBL_DIFFUSIVITY = True !   [Boolean] default = False
-                                ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL
-                                ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL
-                                ! scheme.
-SIMPLE_TKE_TO_KD = True         !   [Boolean] default = False
-                                ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary
-                                ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact
-                                ! energetics for an isopycnal layer-formulation.
-
-! === module MOM_bkgnd_mixing ===
-! Adding static vertical background mixing coefficients
-KD = 1.5E-05                    !   [m2 s-1]
-                                ! The background diapycnal diffusivity of density in the interior. Zero or the
-                                ! molecular value, ~1e-7 m2 s-1, may be used.
-KD_MIN = 2.0E-06                !   [m2 s-1] default = 1.5E-07
-                                ! The minimum diapycnal diffusivity.
-HENYEY_IGW_BACKGROUND = True    !   [Boolean] default = False
-                                ! If true, use a latitude-dependent scaling for the near surface background
-                                ! diffusivity, as described in Harrison & Hallberg, JPO 2008.
-KD_MAX = 0.1                    !   [m2 s-1] default = -1.0
-                                ! The maximum permitted increment for the diapycnal diffusivity from TKE-based
-                                ! parameterizations, or a negative value for no limit.
-
-! === module MOM_kappa_shear ===
-! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008
-USE_JACKSON_PARAM = True        !   [Boolean] default = False
-                                ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing
-                                ! parameterization.
-MAX_RINO_IT = 25                !   [nondim] default = 50
-                                ! The maximum number of iterations that may be used to estimate the Richardson
-                                ! number driven mixing.
-VERTEX_SHEAR = False             !   [Boolean] default = False
-                                ! If true, do the calculations of the shear-driven mixing
-                                ! at the cell vertices (i.e., the vorticity points).
-KAPPA_SHEAR_ITER_BUG = True     !   [Boolean] default = True
-                                ! If true, use an older, dimensionally inconsistent estimate of the derivative
-                                ! of diffusivity with energy in the Newton's method iteration.  The bug causes
-                                ! undercorrections when dz > 1 m.
-KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True !   [Boolean] default = True
-                                ! If true, report back the latest estimate of TKE instead of the time average
-                                ! TKE when there is mass in all layers.  Otherwise always report the time
-                                ! averaged TKE, as is currently done when there are some massless layers.
-
-! === module MOM_CVMix_shear ===
-! Parameterization of shear-driven turbulence via CVMix (various options)
-
-! === module MOM_CVMix_ddiff ===
-! Parameterization of mixing due to double diffusion processes via CVMix
-
-! === module MOM_diabatic_aux ===
-! The following parameters are used for auxiliary diabatic processes.
-PRESSURE_DEPENDENT_FRAZIL = False !   [Boolean] default = False
-                                ! If true, use a pressure dependent freezing temperature when making frazil. The
-                                ! default is false, which will be faster but is inappropriate with ice-shelf
-                                ! cavities.
-VAR_PEN_SW = True               !   [Boolean] default = False
-                                ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine
-                                ! the e-folding depth of incoming short wave radiation.
-CHL_FILE = @[CHLCLIM]           !
-                                ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It
-                                ! is used when VAR_PEN_SW and CHL_FROM_FILE are true.
-CHL_VARNAME = "chlor_a"         ! default = "CHL_A"
-                                ! Name of CHL_A variable in CHL_FILE.
-
-! === module MOM_energetic_PBL ===
-ML_OMEGA_FRAC = 0.001           !   [nondim] default = 0.0
-                                ! When setting the decay scale for turbulence, use this fraction of the absolute
-                                ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 +
-                                ! of*4*omega^2).
-TKE_DECAY = 0.01                !   [nondim] default = 2.5
-                                ! TKE_DECAY relates the vertical rate of decay of the TKE available for
-                                ! mechanical entrainment to the natural Ekman depth.
-EPBL_MSTAR_SCHEME = "OM4"       ! default = "CONSTANT"
-                                ! EPBL_MSTAR_SCHEME selects the method for setting mstar.  Valid values are:
-                                !    CONSTANT   - Use a fixed mstar given by MSTAR
-                                !    OM4        - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4
-                                !    REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018.
-MSTAR_CAP = 10.0                !   [nondim] default = -1.0
-                                ! If this value is positive, it sets the maximum value of mstar allowed in ePBL.
-                                ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT).
-MSTAR2_COEF1 = 0.29             !   [nondim] default = 0.3
-                                ! Coefficient in computing mstar when rotation and stabilizing effects are both
-                                ! important (used if EPBL_MSTAR_SCHEME = OM4).
-MSTAR2_COEF2 = 0.152            !   [nondim] default = 0.085
-                                ! Coefficient in computing mstar when only rotation limits the total mixing
-                                ! (used if EPBL_MSTAR_SCHEME = OM4)
-NSTAR = 0.06                    !   [nondim] default = 0.2
-                                ! The portion of the buoyant potential energy imparted by surface fluxes that is
-                                ! available to drive entrainment at the base of mixed layer when that energy is
-                                ! positive.
-EPBL_MLD_BISECTION = True       !   [Boolean] default = False
-                                ! If true, use bisection with the iterative determination of the self-consistent
-                                ! mixed layer depth.  Otherwise use the false position after a maximum and
-                                ! minimum bound have been evaluated and the returned value or bisection before
-                                ! this.
-MSTAR_CONV_ADJ = 0.667          !   [nondim] default = 0.0
-                                ! Coefficient used for reducing mstar during convection due to reduction of
-                                ! stable density gradient.
-USE_MLD_ITERATION = True        !   [Boolean] default = False
-                                ! A logical that specifies whether or not to use the distance to the bottom of
-                                ! the actively turbulent boundary layer to help set the EPBL length scale.
-EPBL_TRANSITION_SCALE = 0.01    !   [nondim] default = 0.1
-                                ! A scale for the mixing length in the transition layer at the edge of the
-                                ! boundary layer as a fraction of the boundary layer thickness.
-MIX_LEN_EXPONENT = 1.0          !   [nondim] default = 2.0
-                                ! The exponent applied to the ratio of the distance to the MLD and the MLD depth
-                                ! which determines the shape of the mixing length. This is only used if
-                                ! USE_MLD_ITERATION is True.
-USE_LA_LI2016 = @[MOM6_USE_LI2016] !   [nondim] default = False
-                                ! A logical to use the Li et al. 2016 (submitted) formula to determine the
-                                ! Langmuir number.
-USE_WAVES = @[MOM6_USE_WAVES]   !   [Boolean] default = False
-                                ! If true, enables surface wave modules.
-WAVE_METHOD = "SURFACE_BANDS"   ! default = "EMPTY"
-                                ! Choice of wave method, valid options include:
-                                !  TEST_PROFILE  - Prescribed from surface Stokes drift
-                                !                  and a decay wavelength.
-                                !  SURFACE_BANDS - Computed from multiple surface values
-                                !                  and decay wavelengths.
-                                !  DHH85         - Uses Donelan et al. 1985 empirical
-                                !                  wave spectrum with prescribed values.
-                                !  LF17          - Infers Stokes drift profile from wind
-                                !                  speed following Li and Fox-Kemper 2017.
-SURFBAND_SOURCE = "COUPLER"     ! default = "EMPTY"
-                                ! Choice of SURFACE_BANDS data mode, valid options include:
-                                !  DATAOVERRIDE  - Read from NetCDF using FMS DataOverride.
-                                !  COUPLER       - Look for variables from coupler pass
-                                !  INPUT         - Testing with fixed values.
-STK_BAND_COUPLER = 3            ! default = 1
-                                ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has
-                                ! to be consistent with the number of Stokes drift bands in WW3, or the model
-                                ! will fail.
-SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 !   [rad/m] default = 0.12566
-                                ! Central wavenumbers for surface Stokes drift bands.
-EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE"
-                                ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence.
-                                ! Valid values are:
-                                !    NONE     - Do not do any extra mixing due to Langmuir turbulence
-                                !    RESCALE  - Use a multiplicative rescaling of mstar to account for Langmuir
-                                !      turbulence
-                                !    ADDITIVE - Add a Langmuir turblence contribution to mstar to other
-                                !      contributions
-LT_ENHANCE_COEF = 0.044         !   [nondim] default = 0.447
-                                ! Coefficient for Langmuir enhancement of mstar
-LT_ENHANCE_EXP = -1.5           !   [nondim] default = -1.33
-                                ! Exponent for Langmuir enhancementt of mstar
-LT_MOD_LAC1 = 0.0               !   [nondim] default = -0.87
-                                ! Coefficient for modification of Langmuir number due to MLD approaching Ekman
-                                ! depth.
-LT_MOD_LAC4 = 0.0               !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! stable Obukhov depth.
-LT_MOD_LAC5 = 0.22              !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! unstable Obukhov depth.
-
-! === module MOM_regularize_layers ===
-
-! === module MOM_opacity ===
-PEN_SW_NBANDS = 3               ! default = 1
-                                ! The number of bands of penetrating shortwave radiation.
-
-! === module MOM_tracer_advect ===
-TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM"
-                                ! The horizontal transport scheme for tracers:
-                                !   PLM    - Piecewise Linear Method
-                                !   PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order)
-                                !   PPM    - Piecewise Parabolic Method (Colella-Woodward)
-
-! === module MOM_tracer_hor_diff ===
-CHECK_DIFFUSIVE_CFL = True      !   [Boolean] default = False
-                                ! If true, use enough iterations the diffusion to ensure that the diffusive
-                                ! equivalent of the CFL limit is not violated.  If false, always use the greater
-                                ! of 1 or MAX_TR_DIFFUSION_CFL iteration.
-
-! === module MOM_neutral_diffusion ===
-! This module implements neutral diffusion of tracers
-
-! === module MOM_lateral_boundary_diffusion ===
-! This module implements lateral diffusion of tracers near boundaries
-
-! === module MOM_sum_output ===
-MAXTRUNC = 100000               !   [truncations save_interval-1] default = 0
-                                ! The run will be stopped, and the day set to a very large value if the velocity
-                                ! is truncated more than MAXTRUNC times between energy saves.  Set MAXTRUNC to 0
-                                ! to stop if there is any truncation of velocities.
-ENERGYSAVEDAYS = 1.00           !   [days] default = 1.0
-                                ! The interval in units of TIMEUNIT between saves of the energies of the run and
-                                ! other globally summed diagnostics.
-
-! === module ocean_model_init ===
-
-! === module MOM_oda_incupd ===
-ODA_INCUPD = @[ODA_INCUPD]   ! [Boolean] default = False
-                             ! If true, oda incremental updates will be applied
-                             ! everywhere in the domain.
-ODA_INCUPD_FILE = "mom6_increment.nc"   ! The name of the file with the T,S,h increments.
-
-ODA_TEMPINC_VAR = "Temp"        ! default = "ptemp_inc"
-                                ! The name of the potential temperature inc. variable in
-				      	   ! ODA_INCUPD_FILE.
-ODA_SALTINC_VAR = "Salt"        ! default = "sal_inc"
-                                ! The name of the salinity inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_THK_VAR = "h"               ! default = "h"
-                                ! The name of the int. depth inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_INCUPD_UV = true            !
-ODA_UINC_VAR = "u"              ! default = "u_inc"
-                                ! The name of the zonal vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_VINC_VAR = "v"              ! default = "v_inc"
-                                ! The name of the meridional vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS]            ! default=3.0
-
-! === module MOM_surface_forcing ===
-OCEAN_SURFACE_STAGGER = "A"     ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the surface velocity field that is
-                                ! returned to the coupler.  Valid values include
-                                ! 'A', 'B', or 'C'.
-
-MAX_P_SURF = 0.0                !   [Pa] default = -1.0
-                                ! The maximum surface pressure that can be exerted by the atmosphere and
-                                ! floating sea-ice or ice shelves. This is needed because the FMS coupling
-                                ! structure does not limit the water that can be frozen out of the ocean and the
-                                ! ice-ocean heat fluxes are treated explicitly.  No limit is applied if a
-                                ! negative value is used.
-WIND_STAGGER = "A"              ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the input wind stress field.  Valid
-                                ! values are 'A', 'B', or 'C'.
-CD_TIDES = 0.0018               !   [nondim] default = 1.0E-04
-                                ! The drag coefficient that applies to the tides.
-GUST_CONST = 0.0                !   [Pa] default = 0.02
-                                ! The background gustiness in the winds.
-FIX_USTAR_GUSTLESS_BUG = False  !   [Boolean] default = False
-                                ! If true correct a bug in the time-averaging of the gustless wind friction
-                                ! velocity
-USE_RIGID_SEA_ICE = True        !   [Boolean] default = False
-                                ! If true, sea-ice is rigid enough to exert a nonhydrostatic pressure that
-                                ! resist vertical motion.
-SEA_ICE_RIGID_MASS = 100.0      !   [kg m-2] default = 1000.0
-                                ! The mass of sea-ice per unit area at which the sea-ice starts to exhibit
-                                ! rigidity
-LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF]  !   [Boolean] default = False
-                                ! If true, allows liquid river runoff to be specified via
-                                ! the data_table using the component name 'OCN'.
-! === module ocean_stochastics ===
-DO_SPPT   = @[DO_OCN_SPPT]      ! [Boolean] default = False
-                                ! If true perturb the diabatic tendencies in MOM_diabatic_driver
-PERT_EPBL = @[PERT_EPBL]        ! [Boolean] default = False
-                                ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL
-! === module MOM_restart ===
-RESTART_CHECKSUMS_REQUIRED = False
-! === module MOM_file_parser ===
diff --git a/parm/ufs/mom6/MOM_input_template_050 b/parm/ufs/mom6/MOM_input_template_050
deleted file mode 100644
index 4c39198c02..0000000000
--- a/parm/ufs/mom6/MOM_input_template_050
+++ /dev/null
@@ -1,947 +0,0 @@
-! This input file provides the adjustable run-time parameters for version 6 of the Modular Ocean Model (MOM6).
-! Where appropriate, parameters use usually given in MKS units.
-
-! This particular file is for the example in ice_ocean_SIS2/OM4_05.
-
-! This MOM_input file typically contains only the non-default values that are needed to reproduce this example.
-! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file
-! which is generated by the model at run-time.
-! === module MOM_domains ===
-TRIPOLAR_N = True               !   [Boolean] default = False
-                                ! Use tripolar connectivity at the northern edge of the domain.  With
-                                ! TRIPOLAR_N, NIGLOBAL must be even.
-NIGLOBAL = @[NX_GLB]            !
-                                ! The total number of thickness grid points in the x-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NJGLOBAL = @[NY_GLB]            !
-                                ! The total number of thickness grid points in the y-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NIHALO = 4                      ! default = 4
-                                ! The number of halo points on each side in the x-direction.  With
-                                ! STATIC_MEMORY_ this is set as NIHALO_ in MOM_memory.h at compile time; without
-                                ! STATIC_MEMORY_ the default is NIHALO_ in MOM_memory.h (if defined) or 2.
-NJHALO = 4                      ! default = 4
-                                ! The number of halo points on each side in the y-direction.  With
-                                ! STATIC_MEMORY_ this is set as NJHALO_ in MOM_memory.h at compile time; without
-                                ! STATIC_MEMORY_ the default is NJHALO_ in MOM_memory.h (if defined) or 2.
-! LAYOUT = 21, 20               !
-                                ! The processor layout that was actually used.
-! IO_LAYOUT = 1, 1              ! default = 1
-                                ! The processor layout to be used, or 0,0 to automatically set the io_layout to
-                                ! be the same as the layout.
-
-! === module MOM ===
-USE_REGRIDDING = True           !   [Boolean] default = False
-                                ! If True, use the ALE algorithm (regridding/remapping). If False, use the
-                                ! layered isopycnal algorithm.
-THICKNESSDIFFUSE = True         !   [Boolean] default = False
-                                ! If true, interface heights are diffused with a coefficient of KHTH.
-THICKNESSDIFFUSE_FIRST = True   !   [Boolean] default = False
-                                ! If true, do thickness diffusion before dynamics. This is only used if
-                                ! THICKNESSDIFFUSE is true.
-DT = @[DT_DYNAM_MOM6]           !   [s]
-                                ! The (baroclinic) dynamics time step.  The time-step that is actually used will
-                                ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode
-                                ! or the coupling timestep in coupled mode.)
-DT_THERM = @[DT_THERM_MOM6]     !   [s] default = 1800.0
-                                ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be
-                                ! an integer multiple of DT and less than the forcing or coupling time-step,
-                                ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer
-                                ! multiple of the coupling timestep.  By default DT_THERM is set to DT.
-THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN]    !   [Boolean] default = False
-                                ! If true, the MOM will take thermodynamic and tracer timesteps that can be
-                                ! longer than the coupling timestep. The actual thermodynamic timestep that is
-                                ! used in this case is the largest integer multiple of the coupling timestep
-                                ! that is less than or equal to DT_THERM.
-HFREEZE = 20.0                  !   [m] default = -1.0
-                                ! If HFREEZE > 0, melt potential will be computed. The actual depth
-                                ! over which melt potential is computed will be min(HFREEZE, OBLD)
-                                ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default)
-                                ! melt potential will not be computed.
-USE_PSURF_IN_EOS = False        !   [Boolean] default = False
-                                ! If true, always include the surface pressure contributions in equation of
-                                ! state calculations.
-FRAZIL = True                   !   [Boolean] default = False
-                                ! If true, water freezes if it gets too cold, and the accumulated heat deficit
-                                ! is returned in the surface state.  FRAZIL is only used if
-                                ! ENABLE_THERMODYNAMICS is true.
-DO_GEOTHERMAL = True            !   [Boolean] default = False
-                                ! If true, apply geothermal heating.
-BOUND_SALINITY = True           !   [Boolean] default = False
-                                ! If true, limit salinity to being positive. (The sea-ice model may ask for more
-                                ! salt than is available and drive the salinity negative otherwise.)
-MIN_SALINITY = 0.01             !   [PPT] default = 0.01
-                                ! The minimum value of salinity when BOUND_SALINITY=True. The default is 0.01
-                                ! for backward compatibility but ideally should be 0.
-C_P = 3992.0                    !   [J kg-1 K-1] default = 3991.86795711963
-                                ! The heat capacity of sea water, approximated as a constant. This is only used
-                                ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10
-                                ! definition of conservative temperature.
-CHECK_BAD_SURFACE_VALS = True   !   [Boolean] default = False
-                                ! If true, check the surface state for ridiculous values.
-BAD_VAL_SSH_MAX = 50.0          !   [m] default = 20.0
-                                ! The value of SSH above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SSS_MAX = 75.0          !   [PPT] default = 45.0
-                                ! The value of SSS above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MAX = 55.0          !   [deg C] default = 45.0
-                                ! The value of SST above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MIN = -3.0          !   [deg C] default = -2.1
-                                ! The value of SST below which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-DEFAULT_2018_ANSWERS = True     !   [Boolean] default = True
-                                ! This sets the default value for the various _2018_ANSWERS parameters.
-WRITE_GEOM = 2                  ! default = 1
-                                ! If =0, never write the geometry and vertical grid files. If =1, write the
-                                ! geometry and vertical grid files only for a new simulation. If =2, always
-                                ! write the geometry and vertical grid files. Other values are invalid.
-SAVE_INITIAL_CONDS = False      !   [Boolean] default = False
-                                ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE.
-
-! === module MOM_hor_index ===
-! Sets the horizontal array index types.
-
-! === module MOM_fixed_initialization ===
-INPUTDIR = "INPUT"              ! default = "."
-                                ! The directory in which input files are found.
-
-! === module MOM_grid_init ===
-GRID_CONFIG = "mosaic"          !
-                                ! A character string that determines the method for defining the horizontal
-                                ! grid.  Current options are:
-                                !     mosaic - read the grid from a mosaic (supergrid)
-                                !              file set by GRID_FILE.
-                                !     cartesian - use a (flat) Cartesian grid.
-                                !     spherical - use a simple spherical grid.
-                                !     mercator - use a Mercator spherical grid.
-GRID_FILE = "ocean_hgrid.nc"    !
-                                ! Name of the file from which to read horizontal grid data.
-GRID_ROTATION_ANGLE_BUGS = False  ! [Boolean] default = True
-                                ! If true, use an older algorithm to calculate the sine and
-                                ! cosines needed rotate between grid-oriented directions and
-                                ! true north and east.  Differences arise at the tripolar fold
-USE_TRIPOLAR_GEOLONB_BUG = False !   [Boolean] default = True
-                                ! If true, use older code that incorrectly sets the longitude in some points
-                                ! along the tripolar fold to be off by 360 degrees.
-TOPO_CONFIG = "file"            !
-                                ! This specifies how bathymetry is specified:
-                                !     file - read bathymetric information from the file
-                                !       specified by (TOPO_FILE).
-                                !     flat - flat bottom set to MAXIMUM_DEPTH.
-                                !     bowl - an analytically specified bowl-shaped basin
-                                !       ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH.
-                                !     spoon - a similar shape to 'bowl', but with an vertical
-                                !       wall at the southern face.
-                                !     halfpipe - a zonally uniform channel with a half-sine
-                                !       profile in the meridional direction.
-                                !     benchmark - use the benchmark test case topography.
-                                !     Neverland - use the Neverland test case topography.
-                                !     DOME - use a slope and channel configuration for the
-                                !       DOME sill-overflow test case.
-                                !     ISOMIP - use a slope and channel configuration for the
-                                !       ISOMIP test case.
-                                !     DOME2D - use a shelf and slope configuration for the
-                                !       DOME2D gravity current/overflow test case.
-                                !     Kelvin - flat but with rotated land mask.
-                                !     seamount - Gaussian bump for spontaneous motion test case.
-                                !     dumbbell - Sloshing channel with reservoirs on both ends.
-                                !     shelfwave - exponential slope for shelfwave test case.
-                                !     Phillips - ACC-like idealized topography used in the Phillips config.
-                                !     dense - Denmark Strait-like dense water formation and overflow.
-                                !     USER - call a user modified routine.
-TOPO_FILE = "ocean_topog.nc"    ! default = "topog.nc"
-                                ! The file from which the bathymetry is read.
-ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES]   ! default = "False"
-                                ! If true, allow topography overrides to change ocean points to land
-MAXIMUM_DEPTH = 6500.0          !   [m]
-                                ! The maximum depth of the ocean.
-MINIMUM_DEPTH = 9.5             !   [m] default = 0.0
-                                ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is
-                                ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is
-                                ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than
-                                ! MASKING_DEPTH are rounded to MINIMUM_DEPTH.
-
-! === module MOM_open_boundary ===
-! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply,
-! if any.
-MASKING_DEPTH = 0.0             !   [m] default = -9999.0
-                                ! The depth below which to mask points as land points, for which all fluxes are
-                                ! zeroed out. MASKING_DEPTH is ignored if negative.
-CHANNEL_CONFIG = "list"         ! default = "none"
-                                ! A parameter that determines which set of channels are
-                                ! restricted to specific  widths.  Options are:
-                                !     none - All channels have the grid width.
-                                !     global_1deg - Sets 16 specific channels appropriate
-                                !       for a 1-degree model, as used in CM2G.
-                                !     list - Read the channel locations and widths from a
-                                !       text file, like MOM_channel_list in the MOM_SIS
-                                !       test case.
-                                !     file - Read open face widths everywhere from a
-                                !       NetCDF file on the model grid.
-CHANNEL_LIST_FILE = "MOM_channels_global_025" ! default = "MOM_channel_list"
-                                ! The file from which the list of narrowed channels is read.
-
-! === module MOM_verticalGrid ===
-! Parameters providing information about the vertical grid.
-NK = 75                         !   [nondim]
-                                ! The number of model layers.
-
-! === module MOM_tracer_registry ===
-
-! === module MOM_EOS ===
-DTFREEZE_DP = -7.75E-08         !   [deg C Pa-1] default = 0.0
-                                ! When TFREEZE_FORM=LINEAR, this is the derivative of the freezing potential
-                                ! temperature with pressure.
-
-! === module MOM_restart ===
-PARALLEL_RESTARTFILES = True    !   [Boolean] default = False
-                                ! If true, each processor writes its own restart file, otherwise a single
-                                ! restart file is generated
-
-! === module MOM_tracer_flow_control ===
-USE_IDEAL_AGE_TRACER = False    !   [Boolean] default = False
-                                ! If true, use the ideal_age_example tracer package.
-
-! === module ideal_age_example ===
-
-! === module MOM_coord_initialization ===
-COORD_CONFIG = "file"           !
-                                ! This specifies how layers are to be defined:
-                                !     ALE or none - used to avoid defining layers in ALE mode
-                                !     file - read coordinate information from the file
-                                !       specified by (COORD_FILE).
-                                !     BFB - Custom coords for buoyancy-forced basin case
-                                !       based on SST_S, T_BOT and DRHO_DT.
-                                !     linear - linear based on interfaces not layers
-                                !     layer_ref - linear based on layer densities
-                                !     ts_ref - use reference temperature and salinity
-                                !     ts_range - use range of temperature and salinity
-                                !       (T_REF and S_REF) to determine surface density
-                                !       and GINT calculate internal densities.
-                                !     gprime - use reference density (RHO_0) for surface
-                                !       density and GINT calculate internal densities.
-                                !     ts_profile - use temperature and salinity profiles
-                                !       (read from COORD_FILE) to set layer densities.
-                                !     USER - call a user modified routine.
-COORD_FILE = "layer_coord.nc"   !
-                                ! The file from which the coordinate densities are read.
-REMAP_UV_USING_OLD_ALG = True   !   [Boolean] default = True
-                                ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If
-                                ! false, uses the new method that remaps between grids described by an old and
-                                ! new thickness.
-REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER"
-                                ! Coordinate mode for vertical regridding. Choose among the following
-                                ! possibilities:  LAYER - Isopycnal or stacked shallow water layers
-                                !  ZSTAR, Z* - stretched geopotential z*
-                                !  SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf
-                                !  SIGMA - terrain following coordinates
-                                !  RHO   - continuous isopycnal
-                                !  HYCOM1 - HyCOM-like hybrid coordinate
-                                !  SLIGHT - stretched coordinates above continuous isopycnal
-                                !  ADAPTIVE - optimize for smooth neutral density surfaces
-BOUNDARY_EXTRAPOLATION = True   !   [Boolean] default = False
-                                ! When defined, a proper high-order reconstruction scheme is used within
-                                ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM
-                                ! reconstruction will also be used within boundary cells.
-ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter ALE_RESOLUTION
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 !   [m]
-                                ! The distribution of vertical resolution for the target
-                                ! grid used for Eulerian-like coordinates. For example,
-                                ! in z-coordinate mode, the parameter is a list of level
-                                ! thicknesses (in m). In sigma-coordinate mode, the list
-                                ! is of non-dimensional fractions of the water column.
-!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 !   [m]
-                                ! HYBRID target densities for interfaces
-REGRID_COMPRESSIBILITY_FRACTION = 0.01 !   [nondim] default = 0.0
-                                ! When interpolating potential density profiles we can add some artificial
-                                ! compressibility solely to make homogeneous regions appear stratified.
-MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum interface depths.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum interface depths
-                                !  PARAM       - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0
-                                ! The list of maximum depths for each interface.
-MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum layer thicknesses.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum layer thicknesses
-                                !  PARAM       - use the vector-parameter MAX_LAYER_THICKNESS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 !   [m]
-                                ! The list of maximum thickness for each layer.
-REMAPPING_SCHEME = "PPM_H4"     ! default = "PLM"
-                                ! This sets the reconstruction scheme used for vertical remapping for all
-                                ! variables. It can be one of the following schemes: PCM         (1st-order
-                                ! accurate)
-                                ! PLM         (2nd-order accurate)
-                                ! PPM_H4      (3rd-order accurate)
-                                ! PPM_IH4     (3rd-order accurate)
-                                ! PQM_IH4IH3  (4th-order accurate)
-                                ! PQM_IH6IH5  (5th-order accurate)
-
-! === module MOM_grid ===
-! Parameters providing information about the lateral grid.
-
-! === module MOM_state_initialization ===
-INIT_LAYERS_FROM_Z_FILE = True  !   [Boolean] default = False
-                                ! If true, initialize the layer thicknesses, temperatures, and salinities from a
-                                ! Z-space file on a latitude-longitude grid.
-
-! === module MOM_initialize_layers_from_Z ===
-TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc"  ! default = "temp_salt_z.nc"
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures (T) and salinities (S). If T and S are not
-                                ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE
-                                ! must be set.
-Z_INIT_FILE_PTEMP_VAR = "temp" ! default = "ptemp"
-                                ! The name of the potential temperature variable in
-                                ! TEMP_Z_INIT_FILE.
-Z_INIT_FILE_SALT_VAR = "salt"   ! default = "salt"
-                                ! The name of the salinity variable in
-                                ! SALT_Z_INIT_FILE.
-
-Z_INIT_ALE_REMAPPING = True     !   [Boolean] default = False
-                                ! If True, then remap straight to model coordinate from file.
-Z_INIT_REMAP_OLD_ALG = True     !   [Boolean] default = True
-                                ! If false, uses the preferred remapping algorithm for initialization. If true,
-                                ! use an older, less robust algorithm for remapping.
-
-! === module MOM_diag_mediator ===
-!Jiande NUM_DIAG_COORDS = 2             ! default = 1
-NUM_DIAG_COORDS = 1             ! default = 1
-                                ! The number of diagnostic vertical coordinates to use.
-                                ! For each coordinate, an entry in DIAG_COORDS must be provided.
-!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" !
-DIAG_COORDS = "z Z ZSTAR"
-                                ! A list of string tuples associating diag_table modules to
-                                ! a coordinate definition used for diagnostics. Each string
-                                ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME".
-DIAG_COORD_DEF_Z="FILE:interpolate_zgrid_40L.nc,interfaces=zw"
-DIAG_MISVAL = -1e34
-!DIAG_COORD_DEF_RHO2 = "RFNC1:35,999.5,1028,1028.5,8.,1038.,0.0078125" ! default = "WOA09"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter DIAG_COORD_RES_RHO2
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-
-! === module MOM_MEKE ===
-USE_MEKE = True                 !   [Boolean] default = False
-                                ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy
-                                ! kinetic energy budget.
-MEKE_GMCOEFF = 1.0              !   [nondim] default = -1.0
-                                ! The efficiency of the conversion of potential energy into MEKE by the
-                                ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this
-                                ! conversion is not used or calculated.
-MEKE_BGSRC = 1.0E-13            !   [W kg-1] default = 0.0
-                                ! A background energy source for MEKE.
-MEKE_KHTH_FAC = 0.5             !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to KhTh.
-MEKE_KHTR_FAC = 0.5             !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to KhTr.
-MEKE_KHMEKE_FAC = 1.0           !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to Kh for MEKE itself.
-MEKE_VISCOSITY_COEFF_KU = 1.0   !   [nondim] default = 0.0
-                                ! If non-zero, is the scaling coefficient in the expression forviscosity used to
-                                ! parameterize harmonic lateral momentum mixing byunresolved eddies represented
-                                ! by MEKE. Can be negative torepresent backscatter from the unresolved eddies.
-MEKE_ALPHA_RHINES = 0.15        !   [nondim] default = 0.05
-                                ! If positive, is a coefficient weighting the Rhines scale in the expression for
-                                ! mixing length used in MEKE-derived diffusivity.
-MEKE_ALPHA_EADY = 0.15          !   [nondim] default = 0.05
-                                ! If positive, is a coefficient weighting the Eady length scale in the
-                                ! expression for mixing length used in MEKE-derived diffusivity.
-
-! === module MOM_lateral_mixing_coeffs ===
-USE_VARIABLE_MIXING = True      !   [Boolean] default = False
-                                ! If true, the variable mixing code will be called.  This allows diagnostics to
-                                ! be created even if the scheme is not used.  If KHTR_SLOPE_CFF>0 or
-                                ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter
-                                ! file.
-RESOLN_SCALED_KH = True         !   [Boolean] default = False
-                                ! If true, the Laplacian lateral viscosity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-RESOLN_SCALED_KHTH = True       !   [Boolean] default = False
-                                ! If true, the interface depth diffusivity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-KHTH_USE_EBT_STRUCT = True      !   [Boolean] default = False
-                                ! If true, uses the equivalent barotropic structure as the vertical structure of
-                                ! thickness diffusivity.
-KHTR_SLOPE_CFF = 0.25           !   [nondim] default = 0.0
-                                ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer
-                                ! diffusivity
-USE_STORED_SLOPES = True        !   [Boolean] default = False
-                                ! If true, the isopycnal slopes are calculated once and stored for re-use. This
-                                ! uses more memory but avoids calling the equation of state more times than
-                                ! should be necessary.
-KH_RES_FN_POWER = 100           !   [nondim] default = 2
-                                ! The power of dx/Ld in the Kh resolution function.  Any positive integer may be
-                                ! used, although even integers are more efficient to calculate.  Setting this
-                                ! greater than 100 results in a step-function being used.
-INTERPOLATE_RES_FN = False      !   [Boolean] default = True
-                                ! If true, interpolate the resolution function to the velocity points from the
-                                ! thickness points; otherwise interpolate the wave speed and calculate the
-                                ! resolution function independently at each point.
-GILL_EQUATORIAL_LD = True       !   [Boolean] default = False
-                                ! If true, uses Gill's definition of the baroclinic equatorial deformation
-                                ! radius, otherwise, if false, use Pedlosky's definition. These definitions
-                                ! differ by a factor of 2 in front of the beta term in the denominator. Gill's
-                                ! is the more appropriate definition.
-INTERNAL_WAVE_SPEED_BETTER_EST = False !   [Boolean] default = True
-                                ! If true, use a more robust estimate of the first mode wave speed as the
-                                ! starting point for iterations.
-
-! === module MOM_set_visc ===
-CHANNEL_DRAG = True             !   [Boolean] default = False
-                                ! If true, the bottom drag is exerted directly on each layer proportional to the
-                                ! fraction of the bottom it overlies.
-PRANDTL_TURB = 1.25             !   [nondim] default = 1.0
-                                ! The turbulent Prandtl number applied to shear instability.
-HBBL = 10.0                     !   [m]
-                                ! The thickness of a bottom boundary layer with a viscosity of KVBBL if
-                                ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom
-                                ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but
-                                ! LINEAR_DRAG is not.
-DRAG_BG_VEL = 0.1               !   [m s-1] default = 0.0
-                                ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an
-                                ! unresolved  velocity that is combined with the resolved velocity to estimate
-                                ! the velocity magnitude.  DRAG_BG_VEL is only used when BOTTOMDRAGLAW is
-                                ! defined.
-BBL_USE_EOS = True              !   [Boolean] default = False
-                                ! If true, use the equation of state in determining the properties of the bottom
-                                ! boundary layer.  Otherwise use the layer target potential densities.
-BBL_THICK_MIN = 0.1             !   [m] default = 0.0
-                                ! The minimum bottom boundary layer thickness that can be used with
-                                ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum
-                                ! near-bottom viscosity.
-KV = 1.0E-04                    !   [m2 s-1]
-                                ! The background kinematic viscosity in the interior. The molecular value, ~1e-6
-                                ! m2 s-1, may be used.
-KV_BBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the bottom boundary layer.
-KV_TBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the top boundary layer.
-
-! === module MOM_thickness_diffuse ===
-KHTH_MAX_CFL = 0.1              !   [nondimensional] default = 0.8
-                                ! The maximum value of the local diffusive CFL ratio that is permitted for the
-                                ! thickness diffusivity. 1.0 is the marginally unstable value in a pure layered
-                                ! model, but much smaller numbers (e.g. 0.1) seem to work better for ALE-based
-                                ! models.
-KHTH_USE_FGNV_STREAMFUNCTION = True !   [Boolean] default = False
-                                ! If true, use the streamfunction formulation of Ferrari et al., 2010, which
-                                ! effectively emphasizes graver vertical modes by smoothing in the vertical.
-FGNV_FILTER_SCALE = 0.1         !   [nondim] default = 1.0
-                                ! A coefficient scaling the vertical smoothing term in the Ferrari et al., 2010,
-                                ! streamfunction formulation.
-USE_GM_WORK_BUG = True          !   [Boolean] default = True
-                                ! If true, compute the top-layer work tendency on the u-grid with the incorrect
-                                ! sign, for legacy reproducibility.
-
-! === module MOM_continuity ===
-
-! === module MOM_continuity_PPM ===
-ETA_TOLERANCE = 1.0E-06         !   [m] default = 3.75E-09
-                                ! The tolerance for the differences between the barotropic and baroclinic
-                                ! estimates of the sea surface height due to the fluxes through each face.  The
-                                ! total tolerance for SSH is 4 times this value.  The default is
-                                ! 0.5*NK*ANGSTROM, and this should not be set less than about
-                                ! 10^-15*MAXIMUM_DEPTH.
-ETA_TOLERANCE_AUX = 0.001       !   [m] default = 1.0E-06
-                                ! The tolerance for free-surface height discrepancies between the barotropic
-                                ! solution and the sum of the layer thicknesses when calculating the auxiliary
-                                ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can
-                                ! be made larger for efficiency.
-
-! === module MOM_CoriolisAdv ===
-CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY"
-                                ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid
-                                ! values are:
-                                !    SADOURNY75_ENERGY - Sadourny, 1975; energy cons.
-                                !    ARAKAWA_HSU90     - Arakawa & Hsu, 1990
-                                !    SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons.
-                                !    ARAKAWA_LAMB81    - Arakawa & Lamb, 1981; En. + Enst.
-                                !    ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with
-                                !                         Arakawa & Hsu and Sadourny energy
-BOUND_CORIOLIS = True           !   [Boolean] default = False
-                                ! If true, the Coriolis terms at u-points are bounded by the four estimates of
-                                ! (f+rv)v from the four neighboring v-points, and similarly at v-points.  This
-                                ! option would have no effect on the SADOURNY Coriolis scheme if it were
-                                ! possible to use centered difference thickness fluxes.
-
-! === module MOM_PressureForce ===
-
-! === module MOM_PressureForce_AFV ===
-MASS_WEIGHT_IN_PRESSURE_GRADIENT = True !   [Boolean] default = False
-                                ! If true, use mass weighting when interpolating T/S for integrals near the
-                                ! bathymetry in AFV pressure gradient calculations.
-
-! === module MOM_hor_visc ===
-LAPLACIAN = True                !   [Boolean] default = False
-                                ! If true, use a Laplacian horizontal viscosity.
-KH_VEL_SCALE = 0.01             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the grid spacing to calculate the
-                                ! Laplacian viscosity. The final viscosity is the largest of this scaled
-                                ! viscosity, the Smagorinsky and Leith viscosities, and KH.
-KH_SIN_LAT = 2000.0             !   [m2 s-1] default = 0.0
-                                ! The amplitude of a latitudinally-dependent background viscosity of the form
-                                ! KH_SIN_LAT*(SIN(LAT)**KH_PWR_OF_SINE).
-SMAGORINSKY_KH = True           !   [Boolean] default = False
-                                ! If true, use a Smagorinsky nonlinear eddy viscosity.
-SMAG_LAP_CONST = 0.15           !   [nondim] default = 0.0
-                                ! The nondimensional Laplacian Smagorinsky constant, often 0.15.
-AH_VEL_SCALE = 0.01             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the cube of the grid spacing to
-                                ! calculate the biharmonic viscosity. The final viscosity is the largest of this
-                                ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH.
-SMAGORINSKY_AH = True           !   [Boolean] default = False
-                                ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity.
-SMAG_BI_CONST = 0.06            !   [nondim] default = 0.0
-                                ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06.
-USE_LAND_MASK_FOR_HVISC = False !   [Boolean] default = False
-                                ! If true, use Use the land mask for the computation of thicknesses at velocity
-                                ! locations. This eliminates the dependence on arbitrary values over land or
-                                ! outside of the domain. Default is False in order to maintain answers with
-                                ! legacy experiments but should be changed to True for new experiments.
-
-! === module MOM_vert_friction ===
-HMIX_FIXED = 0.5                !   [m]
-                                ! The prescribed depth over which the near-surface viscosity and diffusivity are
-                                ! elevated when the bulk mixed layer is not used.
-KVML = 1.0E-04                  !   [m2 s-1] default = 1.0E-04
-                                ! The kinematic viscosity in the mixed layer.  A typical value is ~1e-2 m2 s-1.
-                                ! KVML is not used if BULKMIXEDLAYER is true.  The default is set by KV.
-MAXVEL = 6.0                    !   [m s-1] default = 3.0E+08
-                                ! The maximum velocity allowed before the velocity components are truncated.
-
-! === module MOM_PointAccel ===
-U_TRUNC_FILE = "U_velocity_truncations" ! default = ""
-                                ! The absolute path to a file into which the accelerations leading to zonal
-                                ! velocity truncations are written. Undefine this for efficiency if this
-                                ! diagnostic is not needed.
-V_TRUNC_FILE = "V_velocity_truncations" ! default = ""
-                                ! The absolute path to a file into which the accelerations leading to meridional
-                                ! velocity truncations are written. Undefine this for efficiency if this
-                                ! diagnostic is not needed.
-
-! === module MOM_barotropic ===
-BOUND_BT_CORRECTION = True      !   [Boolean] default = False
-                                ! If true, the corrective pseudo mass-fluxes into the barotropic solver are
-                                ! limited to values that require less than maxCFL_BT_cont to be accommodated.
-BT_PROJECT_VELOCITY = True      !   [Boolean] default = False
-                                ! If true, step the barotropic velocity first and project out the velocity
-                                ! tendency by 1+BEBT when calculating the transport.  The default (false) is to
-                                ! use a predictor continuity step to find the pressure field, and then to do a
-                                ! corrector continuity step using a weighted average of the old and new
-                                ! velocities, with weights of (1-BEBT) and BEBT.
-DYNAMIC_SURFACE_PRESSURE = True !   [Boolean] default = False
-                                ! If true, add a dynamic pressure due to a viscous ice shelf, for instance.
-BEBT = 0.2                      !   [nondim] default = 0.1
-                                ! BEBT determines whether the barotropic time stepping uses the forward-backward
-                                ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range
-                                ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1
-                                ! (for a backward Euler treatment). In practice, BEBT must be greater than about
-                                ! 0.05.
-DTBT = -0.9                     !   [s or nondim] default = -0.98
-                                ! The barotropic time step, in s. DTBT is only used with the split explicit time
-                                ! stepping. To set the time step automatically based the maximum stable value
-                                ! use 0, or a negative value gives the fraction of the stable value. Setting
-                                ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will
-                                ! actually be used is an integer fraction of DT, rounding down.
-BT_USE_OLD_CORIOLIS_BRACKET_BUG = True !   [Boolean] default = False
-                                ! If True, use an order of operations that is not bitwise rotationally symmetric
-                                ! in the meridional Coriolis term of the barotropic solver.
-
-! === module MOM_mixed_layer_restrat ===
-MIXEDLAYER_RESTRAT = True       !   [Boolean] default = False
-                                ! If true, a density-gradient dependent re-stratifying flow is imposed in the
-                                ! mixed layer. Can be used in ALE mode without restriction but in layer mode can
-                                ! only be used if BULKMIXEDLAYER is true.
-FOX_KEMPER_ML_RESTRAT_COEF = 1.0 !   [nondim] default = 0.0
-                                ! A nondimensional coefficient that is proportional to the ratio of the
-                                ! deformation radius to the dominant lengthscale of the submesoscale mixed layer
-                                ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic
-                                ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of
-                                ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al.
-                                ! (2010)
-MLE_FRONT_LENGTH = 200.0        !   [m] default = 0.0
-                                ! If non-zero, is the frontal-length scale used to calculate the upscaling of
-                                ! buoyancy gradients that is otherwise represented by the parameter
-                                ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended
-                                ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0.
-MLE_USE_PBL_MLD = True          !   [Boolean] default = False
-                                ! If true, the MLE parameterization will use the mixed-layer depth provided by
-                                ! the active PBL parameterization. If false, MLE will estimate a MLD based on a
-                                ! density difference with the surface using the parameter MLE_DENSITY_DIFF.
-MLE_MLD_DECAY_TIME = 2.592E+06  !   [s] default = 0.0
-                                ! The time-scale for a running-mean filter applied to the mixed-layer depth used
-                                ! in the MLE restratification parameterization. When the MLD deepens below the
-                                ! current running-mean the running-mean is instantaneously set to the current
-                                ! MLD.
-
-! === module MOM_diabatic_driver ===
-! The following parameters are used for diabatic processes.
-ENERGETICS_SFC_PBL = True       !   [Boolean] default = False
-                                ! If true, use an implied energetics planetary boundary layer scheme to
-                                ! determine the diffusivity and viscosity in the surface boundary layer.
-EPBL_IS_ADDITIVE = False        !   [Boolean] default = True
-                                ! If true, the diffusivity from ePBL is added to all other diffusivities.
-                                ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used.
-
-! === module MOM_CVMix_KPP ===
-! This is the MOM wrapper to CVMix:KPP
-! See http://cvmix.github.io/
-
-! === module MOM_tidal_mixing ===
-! Vertical Tidal Mixing Parameterization
-INT_TIDE_DISSIPATION = True     !   [Boolean] default = False
-                                ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing,
-                                ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004).
-INT_TIDE_PROFILE = "POLZIN_09"  ! default = "STLAURENT_02"
-                                ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with
-                                ! INT_TIDE_DISSIPATION. Valid values are:
-                                !    STLAURENT_02 - Use the St. Laurent et al exponential
-                                !                   decay profile.
-                                !    POLZIN_09 - Use the Polzin WKB-stretched algebraic
-                                !                   decay profile.
-INT_TIDE_DECAY_SCALE = 300.3003003003003 !   [m] default = 500.0
-                                ! The decay scale away from the bottom for tidal TKE with the new coding when
-                                ! INT_TIDE_DISSIPATION is used.
-KAPPA_ITIDES = 6.28319E-04      !   [m-1] default = 6.283185307179586E-04
-                                ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10
-                                ! km, as in St.Laurent et al. 2002.
-KAPPA_H2_FACTOR = 0.84          !   [nondim] default = 1.0
-                                ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION.
-TKE_ITIDE_MAX = 0.1             !   [W m-2] default = 1000.0
-                                ! The maximum internal tide energy source available to mix above the bottom
-                                ! boundary layer with INT_TIDE_DISSIPATION.
-READ_TIDEAMP = True             !   [Boolean] default = False
-                                ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude
-                                ! with INT_TIDE_DISSIPATION.
-TIDEAMP_FILE = "tidal_amplitude.nc" ! default = "tideamp.nc"
-                                ! The path to the file containing the spatially varying tidal amplitudes with
-                                ! INT_TIDE_DISSIPATION.
-H2_FILE = "ocean_topog.nc"      !
-                                ! The path to the file containing the sub-grid-scale topographic roughness
-                                ! amplitude with INT_TIDE_DISSIPATION.
-
-! === module MOM_CVMix_conv ===
-! Parameterization of enhanced mixing due to convection via CVMix
-
-! === module MOM_geothermal ===
-GEOTHERMAL_SCALE = 1.0          !   [W m-2 or various] default = 0.0
-                                ! The constant geothermal heat flux, a rescaling factor for the heat flux read
-                                ! from GEOTHERMAL_FILE, or 0 to disable the geothermal heating.
-GEOTHERMAL_FILE = "geothermal_davies2013_v1.nc" ! default = ""
-                                ! The file from which the geothermal heating is to be read, or blank to use a
-                                ! constant heating rate.
-GEOTHERMAL_VARNAME = "geothermal_hf" ! default = "geo_heat"
-                                ! The name of the geothermal heating variable in GEOTHERMAL_FILE.
-
-! === module MOM_set_diffusivity ===
-BBL_MIXING_AS_MAX = False       !   [Boolean] default = True
-                                ! If true, take the maximum of the diffusivity from the BBL mixing and the other
-                                ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added.
-USE_LOTW_BBL_DIFFUSIVITY = True !   [Boolean] default = False
-                                ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL
-                                ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL
-                                ! scheme.
-SIMPLE_TKE_TO_KD = True         !   [Boolean] default = False
-                                ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary
-                                ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact
-                                ! energetics for an isopycnal layer-formulation.
-
-! === module MOM_bkgnd_mixing ===
-! Adding static vertical background mixing coefficients
-KD = 1.5E-05                    !   [m2 s-1]
-                                ! The background diapycnal diffusivity of density in the interior. Zero or the
-                                ! molecular value, ~1e-7 m2 s-1, may be used.
-KD_MIN = 2.0E-06                !   [m2 s-1] default = 1.5E-07
-                                ! The minimum diapycnal diffusivity.
-HENYEY_IGW_BACKGROUND = True    !   [Boolean] default = False
-                                ! If true, use a latitude-dependent scaling for the near surface background
-                                ! diffusivity, as described in Harrison & Hallberg, JPO 2008.
-KD_MAX = 0.1                    !   [m2 s-1] default = -1.0
-                                ! The maximum permitted increment for the diapycnal diffusivity from TKE-based
-                                ! parameterizations, or a negative value for no limit.
-
-! === module MOM_kappa_shear ===
-! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008
-USE_JACKSON_PARAM = True        !   [Boolean] default = False
-                                ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing
-                                ! parameterization.
-MAX_RINO_IT = 25                !   [nondim] default = 50
-                                ! The maximum number of iterations that may be used to estimate the Richardson
-                                ! number driven mixing.
-VERTEX_SHEAR = False             !   [Boolean] default = False
-                                ! If true, do the calculations of the shear-driven mixing
-                                ! at the cell vertices (i.e., the vorticity points).
-KAPPA_SHEAR_ITER_BUG = True     !   [Boolean] default = True
-                                ! If true, use an older, dimensionally inconsistent estimate of the derivative
-                                ! of diffusivity with energy in the Newton's method iteration.  The bug causes
-                                ! undercorrections when dz > 1 m.
-KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True !   [Boolean] default = True
-                                ! If true, report back the latest estimate of TKE instead of the time average
-                                ! TKE when there is mass in all layers.  Otherwise always report the time
-                                ! averaged TKE, as is currently done when there are some massless layers.
-
-! === module MOM_CVMix_shear ===
-! Parameterization of shear-driven turbulence via CVMix (various options)
-
-! === module MOM_CVMix_ddiff ===
-! Parameterization of mixing due to double diffusion processes via CVMix
-
-! === module MOM_diabatic_aux ===
-! The following parameters are used for auxiliary diabatic processes.
-PRESSURE_DEPENDENT_FRAZIL = False !   [Boolean] default = False
-                                ! If true, use a pressure dependent freezing temperature when making frazil. The
-                                ! default is false, which will be faster but is inappropriate with ice-shelf
-                                ! cavities.
-VAR_PEN_SW = True               !   [Boolean] default = False
-                                ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine
-                                ! the e-folding depth of incoming short wave radiation.
-CHL_FILE = @[CHLCLIM]           !
-                                ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It
-                                ! is used when VAR_PEN_SW and CHL_FROM_FILE are true.
-CHL_VARNAME = "chlor_a"         ! default = "CHL_A"
-                                ! Name of CHL_A variable in CHL_FILE.
-
-! === module MOM_energetic_PBL ===
-ML_OMEGA_FRAC = 0.001           !   [nondim] default = 0.0
-                                ! When setting the decay scale for turbulence, use this fraction of the absolute
-                                ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 +
-                                ! of*4*omega^2).
-TKE_DECAY = 0.01                !   [nondim] default = 2.5
-                                ! TKE_DECAY relates the vertical rate of decay of the TKE available for
-                                ! mechanical entrainment to the natural Ekman depth.
-EPBL_MSTAR_SCHEME = "OM4"       ! default = "CONSTANT"
-                                ! EPBL_MSTAR_SCHEME selects the method for setting mstar.  Valid values are:
-                                !    CONSTANT   - Use a fixed mstar given by MSTAR
-                                !    OM4        - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4
-                                !    REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018.
-MSTAR_CAP = 10.0                !   [nondim] default = -1.0
-                                ! If this value is positive, it sets the maximum value of mstar allowed in ePBL.
-                                ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT).
-MSTAR2_COEF1 = 0.29             !   [nondim] default = 0.3
-                                ! Coefficient in computing mstar when rotation and stabilizing effects are both
-                                ! important (used if EPBL_MSTAR_SCHEME = OM4).
-MSTAR2_COEF2 = 0.152            !   [nondim] default = 0.085
-                                ! Coefficient in computing mstar when only rotation limits the total mixing
-                                ! (used if EPBL_MSTAR_SCHEME = OM4)
-EPBL_MLD_BISECTION = True       !   [Boolean] default = False
-                                ! If true, use bisection with the iterative determination of the self-consistent
-                                ! mixed layer depth.  Otherwise use the false position after a maximum and
-                                ! minimum bound have been evaluated and the returned value or bisection before
-                                ! this.
-NSTAR = 0.06                    !   [nondim] default = 0.2
-                                ! The portion of the buoyant potential energy imparted by surface fluxes that is
-                                ! available to drive entrainment at the base of mixed layer when that energy is
-                                ! positive.
-MSTAR_CONV_ADJ = 0.667          !   [nondim] default = 0.0
-                                ! Coefficient used for reducing mstar during convection due to reduction of
-                                ! stable density gradient.
-USE_MLD_ITERATION = True        !   [Boolean] default = False
-                                ! A logical that specifies whether or not to use the distance to the bottom of
-                                ! the actively turbulent boundary layer to help set the EPBL length scale.
-EPBL_TRANSITION_SCALE = 0.01    !   [nondim] default = 0.1
-                                ! A scale for the mixing length in the transition layer at the edge of the
-                                ! boundary layer as a fraction of the boundary layer thickness.
-MIX_LEN_EXPONENT = 1.0          !   [nondim] default = 2.0
-                                ! The exponent applied to the ratio of the distance to the MLD and the MLD depth
-                                ! which determines the shape of the mixing length. This is only used if
-                                ! USE_MLD_ITERATION is True.
-USE_LA_LI2016 = @[MOM6_USE_LI2016] !   [nondim] default = False
-                                ! A logical to use the Li et al. 2016 (submitted) formula to determine the
-                                ! Langmuir number.
-USE_WAVES = @[MOM6_USE_WAVES]   !   [Boolean] default = False
-                                ! If true, enables surface wave modules.
-WAVE_METHOD = "SURFACE_BANDS"   ! default = "EMPTY"
-                                ! Choice of wave method, valid options include:
-                                !  TEST_PROFILE  - Prescribed from surface Stokes drift
-                                !                  and a decay wavelength.
-                                !  SURFACE_BANDS - Computed from multiple surface values
-                                !                  and decay wavelengths.
-                                !  DHH85         - Uses Donelan et al. 1985 empirical
-                                !                  wave spectrum with prescribed values.
-                                !  LF17          - Infers Stokes drift profile from wind
-                                !                  speed following Li and Fox-Kemper 2017.
-SURFBAND_SOURCE = "COUPLER"     ! default = "EMPTY"
-                                ! Choice of SURFACE_BANDS data mode, valid options include:
-                                !  DATAOVERRIDE  - Read from NetCDF using FMS DataOverride.
-                                !  COUPLER       - Look for variables from coupler pass
-                                !  INPUT         - Testing with fixed values.
-STK_BAND_COUPLER = 3            ! default = 1
-                                ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has
-                                ! to be consistent with the number of Stokes drift bands in WW3, or the model
-                                ! will fail.
-SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 !   [rad/m] default = 0.12566
-                                ! Central wavenumbers for surface Stokes drift bands.
-EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE"
-                                ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence.
-                                ! Valid values are:
-                                !    NONE     - Do not do any extra mixing due to Langmuir turbulence
-                                !    RESCALE  - Use a multiplicative rescaling of mstar to account for Langmuir
-                                !      turbulence
-                                !    ADDITIVE - Add a Langmuir turblence contribution to mstar to other
-                                !      contributions
-LT_ENHANCE_COEF = 0.044         !   [nondim] default = 0.447
-                                ! Coefficient for Langmuir enhancement of mstar
-LT_ENHANCE_EXP = -1.5           !   [nondim] default = -1.33
-                                ! Exponent for Langmuir enhancementt of mstar
-LT_MOD_LAC1 = 0.0               !   [nondim] default = -0.87
-                                ! Coefficient for modification of Langmuir number due to MLD approaching Ekman
-                                ! depth.
-LT_MOD_LAC4 = 0.0               !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! stable Obukhov depth.
-LT_MOD_LAC5 = 0.22              !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! unstable Obukhov depth.
-
-! === module MOM_regularize_layers ===
-
-! === module MOM_opacity ===
-PEN_SW_NBANDS = 3               ! default = 1
-                                ! The number of bands of penetrating shortwave radiation.
-
-! === module MOM_tracer_advect ===
-TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM"
-                                ! The horizontal transport scheme for tracers:
-                                !   PLM    - Piecewise Linear Method
-                                !   PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order)
-                                !   PPM    - Piecewise Parabolic Method (Colella-Woodward)
-
-! === module MOM_tracer_hor_diff ===
-KHTR = 50.0                     !   [m2 s-1] default = 0.0
-                                ! The background along-isopycnal tracer diffusivity.
-CHECK_DIFFUSIVE_CFL = True      !   [Boolean] default = False
-                                ! If true, use enough iterations the diffusion to ensure that the diffusive
-                                ! equivalent of the CFL limit is not violated.  If false, always use the greater
-                                ! of 1 or MAX_TR_DIFFUSION_CFL iteration.
-MAX_TR_DIFFUSION_CFL = 2.0      !   [nondim] default = -1.0
-                                ! If positive, locally limit the along-isopycnal tracer diffusivity to keep the
-                                ! diffusive CFL locally at or below this value.  The number of diffusive
-                                ! iterations is often this value or the next greater integer.
-
-! === module MOM_neutral_diffusion ===
-! This module implements neutral diffusion of tracers
-USE_NEUTRAL_DIFFUSION = True    !   [Boolean] default = False
-                                ! If true, enables the neutral diffusion module.
-
-! === module ocean_model_init ===
-RESTART_CHECKSUMS_REQUIRED = False
-
-! === module MOM_oda_incupd ===
-ODA_INCUPD = @[ODA_INCUPD]   ! [Boolean] default = False
-                             ! If true, oda incremental updates will be applied
-                             ! everywhere in the domain.
-ODA_INCUPD_FILE = "mom6_increment.nc"   ! The name of the file with the T,S,h increments.
-
-ODA_TEMPINC_VAR = "Temp"        ! default = "ptemp_inc"
-                                ! The name of the potential temperature inc. variable in
-				      	   ! ODA_INCUPD_FILE.
-ODA_SALTINC_VAR = "Salt"        ! default = "sal_inc"
-                                ! The name of the salinity inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_THK_VAR = "h"               ! default = "h"
-                                ! The name of the int. depth inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_INCUPD_UV = false           !
-!ODA_UINC_VAR = "u"             ! default = "u_inc"
-                                ! The name of the zonal vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-!ODA_VINC_VAR = "v"             ! default = "v_inc"
-                                ! The name of the meridional vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS]            ! default=3.0
-
-! === module MOM_lateral_boundary_diffusion ===
-! This module implements lateral diffusion of tracers near boundaries
-
-! === module MOM_sum_output ===
-MAXTRUNC = 100000               !   [truncations save_interval-1] default = 0
-                                ! The run will be stopped, and the day set to a very large value if the velocity
-                                ! is truncated more than MAXTRUNC times between energy saves.  Set MAXTRUNC to 0
-                                ! to stop if there is any truncation of velocities.
-ENERGYSAVEDAYS = 1.0           !   [days] default = 1.0
-                                ! The interval in units of TIMEUNIT between saves of the energies of the run and
-                                ! other globally summed diagnostics.
-ENERGYSAVEDAYS_GEOMETRIC = 0.25 !   [days] default = 0.0
-                                ! The starting interval in units of TIMEUNIT for the first call to save the
-                                ! energies of the run and other globally summed diagnostics. The interval
-                                ! increases by a factor of 2. after each call to write_energy.
-
-! === module ocean_model_init ===
-
-! === module MOM_surface_forcing ===
-OCEAN_SURFACE_STAGGER = "A"     ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the surface velocity field that is
-                                ! returned to the coupler.  Valid values include
-                                ! 'A', 'B', or 'C'.
-
-MAX_P_SURF = 0.0                !   [Pa] default = -1.0
-                                ! The maximum surface pressure that can be exerted by the atmosphere and
-                                ! floating sea-ice or ice shelves. This is needed because the FMS coupling
-                                ! structure does not limit the water that can be frozen out of the ocean and the
-                                ! ice-ocean heat fluxes are treated explicitly.  No limit is applied if a
-                                ! negative value is used.
-WIND_STAGGER = "A"              ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the input wind stress field.  Valid
-                                ! values are 'A', 'B', or 'C'.
-CD_TIDES = 0.0018               !   [nondim] default = 1.0E-04
-                                ! The drag coefficient that applies to the tides.
-GUST_CONST = 0.0                !   [Pa] default = 0.02
-                                ! The background gustiness in the winds.
-FIX_USTAR_GUSTLESS_BUG = False  !   [Boolean] default = False
-                                ! If true correct a bug in the time-averaging of the gustless wind friction
-                                ! velocity
-USE_RIGID_SEA_ICE = True        !   [Boolean] default = False
-                                ! If true, sea-ice is rigid enough to exert a nonhydrostatic pressure that
-                                ! resist vertical motion.
-SEA_ICE_RIGID_MASS = 100.0      !   [kg m-2] default = 1000.0
-                                ! The mass of sea-ice per unit area at which the sea-ice starts to exhibit
-                                ! rigidity
-LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF]  !   [Boolean] default = False
-                                ! If true, allows liquid river runoff to be specified via
-                                ! the data_table using the component name 'OCN'.
-! === module ocean_stochastics ===
-DO_SPPT   = @[DO_OCN_SPPT]      ! [Boolean] default = False
-                                ! If true perturb the diabatic tendencies in MOM_diabatic_driver
-PERT_EPBL = @[PERT_EPBL]        ! [Boolean] default = False
-                                ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL
-! === module MOM_restart ===
-
-! === module MOM_file_parser ===
diff --git a/parm/ufs/mom6/MOM_input_template_100 b/parm/ufs/mom6/MOM_input_template_100
deleted file mode 100644
index f26d6e4bfb..0000000000
--- a/parm/ufs/mom6/MOM_input_template_100
+++ /dev/null
@@ -1,866 +0,0 @@
-! This file was written by the model and records all non-layout or debugging parameters used at run-time.
-! === module MOM ===
-
-! === module MOM_unit_scaling ===
-! Parameters for doing unit scaling of variables.
-USE_REGRIDDING = True           !   [Boolean] default = False
-                                ! If True, use the ALE algorithm (regridding/remapping). If False, use the
-                                ! layered isopycnal algorithm.
-THICKNESSDIFFUSE = True         !   [Boolean] default = False
-                                ! If true, interface heights are diffused with a coefficient of KHTH.
-THICKNESSDIFFUSE_FIRST = True   !   [Boolean] default = False
-                                ! If true, do thickness diffusion before dynamics. This is only used if
-                                ! THICKNESSDIFFUSE is true.
-DT = @[DT_DYNAM_MOM6]           !   [s]
-                                ! The (baroclinic) dynamics time step.  The time-step that is actually used will
-                                ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode
-                                ! or the coupling timestep in coupled mode.)
-DT_THERM = @[DT_THERM_MOM6]     !   [s] default = 1800.0
-                                ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be
-                                ! an integer multiple of DT and less than the forcing or coupling time-step,
-                                ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer
-                                ! multiple of the coupling timestep.  By default DT_THERM is set to DT.
-THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN]    !   [Boolean] default = False
-                                ! If true, the MOM will take thermodynamic and tracer timesteps that can be
-                                ! longer than the coupling timestep. The actual thermodynamic timestep that is
-                                ! used in this case is the largest integer multiple of the coupling timestep
-                                ! that is less than or equal to DT_THERM.
-HFREEZE = 20.0                  !   [m] default = -1.0
-                                ! If HFREEZE > 0, melt potential will be computed. The actual depth
-                                ! over which melt potential is computed will be min(HFREEZE, OBLD)
-                                ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default)
-                                ! melt potential will not be computed.
-DTBT_RESET_PERIOD = -1.0        !   [s] default = 7200.0
-                                ! The period between recalculations of DTBT (if DTBT <= 0). If DTBT_RESET_PERIOD
-                                ! is negative, DTBT is set based only on information available at
-                                ! initialization.  If 0, DTBT will be set every dynamics time step. The default
-                                ! is set by DT_THERM.  This is only used if SPLIT is true.
-FRAZIL = True                   !   [Boolean] default = False
-                                ! If true, water freezes if it gets too cold, and the accumulated heat deficit
-                                ! is returned in the surface state.  FRAZIL is only used if
-                                ! ENABLE_THERMODYNAMICS is true.
-BOUND_SALINITY = True           !   [Boolean] default = False
-                                ! If true, limit salinity to being positive. (The sea-ice model may ask for more
-                                ! salt than is available and drive the salinity negative otherwise.)
-MIN_SALINITY = 0.01             !   [PPT] default = 0.0
-                                ! The minimum value of salinity when BOUND_SALINITY=True.
-C_P = 3925.0                    !   [J kg-1 K-1] default = 3991.86795711963
-                                ! The heat capacity of sea water, approximated as a constant. This is only used
-                                ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10
-                                ! definition of conservative temperature.
-USE_PSURF_IN_EOS = False        !   [Boolean] default = True
-                                ! If true, always include the surface pressure contributions in equation of
-                                ! state calculations.
-CHECK_BAD_SURFACE_VALS = True   !   [Boolean] default = False
-                                ! If true, check the surface state for ridiculous values.
-BAD_VAL_SSH_MAX = 50.0          !   [m] default = 20.0
-                                ! The value of SSH above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SSS_MAX = 75.0          !   [PPT] default = 45.0
-                                ! The value of SSS above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MAX = 55.0          !   [deg C] default = 45.0
-                                ! The value of SST above which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-BAD_VAL_SST_MIN = -3.0          !   [deg C] default = -2.1
-                                ! The value of SST below which a bad value message is triggered, if
-                                ! CHECK_BAD_SURFACE_VALS is true.
-DEFAULT_2018_ANSWERS = True     !   [Boolean] default = False
-                                ! This sets the default value for the various _2018_ANSWERS parameters.
-WRITE_GEOM = 2                  ! default = 1
-                                ! If =0, never write the geometry and vertical grid files. If =1, write the
-                                ! geometry and vertical grid files only for a new simulation. If =2, always
-                                ! write the geometry and vertical grid files. Other values are invalid.
-SAVE_INITIAL_CONDS = False      !   [Boolean] default = False
-                                ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE.
-
-! === module MOM_domains ===
-TRIPOLAR_N = True               !   [Boolean] default = False
-                                ! Use tripolar connectivity at the northern edge of the domain.  With
-                                ! TRIPOLAR_N, NIGLOBAL must be even.
-NIGLOBAL = @[NX_GLB]            !
-                                ! The total number of thickness grid points in the x-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NJGLOBAL = @[NY_GLB]            !
-                                ! The total number of thickness grid points in the y-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-
-! === module MOM_hor_index ===
-! Sets the horizontal array index types.
-
-! === module MOM_fixed_initialization ===
-INPUTDIR = "INPUT"              ! default = "."
-                                ! The directory in which input files are found.
-
-! === module MOM_grid_init ===
-GRID_CONFIG = "mosaic"          !
-                                ! A character string that determines the method for defining the horizontal
-                                ! grid.  Current options are:
-                                !     mosaic - read the grid from a mosaic (supergrid)
-                                !              file set by GRID_FILE.
-                                !     cartesian - use a (flat) Cartesian grid.
-                                !     spherical - use a simple spherical grid.
-                                !     mercator - use a Mercator spherical grid.
-GRID_FILE = "ocean_hgrid.nc"    !
-                                ! Name of the file from which to read horizontal grid data.
-GRID_ROTATION_ANGLE_BUGS = False  ! [Boolean] default = True
-                                ! If true, use an older algorithm to calculate the sine and
-                                ! cosines needed rotate between grid-oriented directions and
-                                ! true north and east.  Differences arise at the tripolar fold
-USE_TRIPOLAR_GEOLONB_BUG = False !   [Boolean] default = True
-                                ! If true, use older code that incorrectly sets the longitude in some points
-                                ! along the tripolar fold to be off by 360 degrees.
-TOPO_CONFIG = "file"            !
-                                ! This specifies how bathymetry is specified:
-                                !     file - read bathymetric information from the file
-                                !       specified by (TOPO_FILE).
-                                !     flat - flat bottom set to MAXIMUM_DEPTH.
-                                !     bowl - an analytically specified bowl-shaped basin
-                                !       ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH.
-                                !     spoon - a similar shape to 'bowl', but with an vertical
-                                !       wall at the southern face.
-                                !     halfpipe - a zonally uniform channel with a half-sine
-                                !       profile in the meridional direction.
-                                !     bbuilder - build topography from list of functions.
-                                !     benchmark - use the benchmark test case topography.
-                                !     Neverworld - use the Neverworld test case topography.
-                                !     DOME - use a slope and channel configuration for the
-                                !       DOME sill-overflow test case.
-                                !     ISOMIP - use a slope and channel configuration for the
-                                !       ISOMIP test case.
-                                !     DOME2D - use a shelf and slope configuration for the
-                                !       DOME2D gravity current/overflow test case.
-                                !     Kelvin - flat but with rotated land mask.
-                                !     seamount - Gaussian bump for spontaneous motion test case.
-                                !     dumbbell - Sloshing channel with reservoirs on both ends.
-                                !     shelfwave - exponential slope for shelfwave test case.
-                                !     Phillips - ACC-like idealized topography used in the Phillips config.
-                                !     dense - Denmark Strait-like dense water formation and overflow.
-                                !     USER - call a user modified routine.
-TOPO_EDITS_FILE = "@[TOPOEDITS]" ! default = ""
-                                ! The file from which to read a list of i,j,z topography overrides.
-ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES]   ! default = "False"
-                                ! If true, allow topography overrides to change ocean points to land
-MAXIMUM_DEPTH = 6500.0          !   [m]
-                                ! The maximum depth of the ocean.
-MINIMUM_DEPTH = 9.5             !   [m] default = 0.0
-                                ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is
-                                ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is
-                                ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than
-                                ! MASKING_DEPTH are rounded to MINIMUM_DEPTH.
-
-! === module MOM_open_boundary ===
-! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply,
-! if any.
-MASKING_DEPTH = 0.0             !   [m] default = -9999.0
-                                ! The depth below which to mask points as land points, for which all fluxes are
-                                ! zeroed out. MASKING_DEPTH is ignored if negative.
-CHANNEL_CONFIG = "list"         ! default = "none"
-                                ! A parameter that determines which set of channels are
-                                ! restricted to specific  widths.  Options are:
-                                !     none - All channels have the grid width.
-                                !     global_1deg - Sets 16 specific channels appropriate
-                                !       for a 1-degree model, as used in CM2G.
-                                !     list - Read the channel locations and widths from a
-                                !       text file, like MOM_channel_list in the MOM_SIS
-                                !       test case.
-                                !     file - Read open face widths everywhere from a
-                                !       NetCDF file on the model grid.
-CHANNEL_LIST_FILE = "MOM_channels_SPEAR" ! default = "MOM_channel_list"
-                                ! The file from which the list of narrowed channels is read.
-
-! === module MOM_verticalGrid ===
-! Parameters providing information about the vertical grid.
-NK = 75                         !   [nondim]
-                                ! The number of model layers.
-
-! === module MOM_tracer_registry ===
-
-! === module MOM_EOS ===
-TFREEZE_FORM = "MILLERO_78"     ! default = "LINEAR"
-                                ! TFREEZE_FORM determines which expression should be used for the freezing
-                                ! point.  Currently, the valid choices are "LINEAR", "MILLERO_78", "TEOS10"
-
-! === module MOM_restart ===
-PARALLEL_RESTARTFILES = True    !   [Boolean] default = False
-                                ! If true, each processor writes its own restart file, otherwise a single
-                                ! restart file is generated
-
-! === module MOM_tracer_flow_control ===
-USE_IDEAL_AGE_TRACER = False     !   [Boolean] default = False
-                                ! If true, use the ideal_age_example tracer package.
-
-! === module ideal_age_example ===
-
-! === module MOM_coord_initialization ===
-COORD_CONFIG = "file"           ! default = "none"
-                                ! This specifies how layers are to be defined:
-                                !     ALE or none - used to avoid defining layers in ALE mode
-                                !     file - read coordinate information from the file
-                                !       specified by (COORD_FILE).
-                                !     BFB - Custom coords for buoyancy-forced basin case
-                                !       based on SST_S, T_BOT and DRHO_DT.
-                                !     linear - linear based on interfaces not layers
-                                !     layer_ref - linear based on layer densities
-                                !     ts_ref - use reference temperature and salinity
-                                !     ts_range - use range of temperature and salinity
-                                !       (T_REF and S_REF) to determine surface density
-                                !       and GINT calculate internal densities.
-                                !     gprime - use reference density (RHO_0) for surface
-                                !       density and GINT calculate internal densities.
-                                !     ts_profile - use temperature and salinity profiles
-                                !       (read from COORD_FILE) to set layer densities.
-                                !     USER - call a user modified routine.
-COORD_FILE = "layer_coord.nc"   !
-                                ! The file from which the coordinate densities are read.
-REMAP_UV_USING_OLD_ALG = True   !   [Boolean] default = False
-                                ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If
-                                ! false, uses the new method that remaps between grids described by an old and
-                                ! new thickness.
-REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER"
-                                ! Coordinate mode for vertical regridding. Choose among the following
-                                ! possibilities:  LAYER - Isopycnal or stacked shallow water layers
-                                !  ZSTAR, Z* - stretched geopotential z*
-                                !  SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf
-                                !  SIGMA - terrain following coordinates
-                                !  RHO   - continuous isopycnal
-                                !  HYCOM1 - HyCOM-like hybrid coordinate
-                                !  SLIGHT - stretched coordinates above continuous isopycnal
-                                !  ADAPTIVE - optimize for smooth neutral density surfaces
-BOUNDARY_EXTRAPOLATION = True   !   [Boolean] default = False
-                                ! When defined, a proper high-order reconstruction scheme is used within
-                                ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM
-                                ! reconstruction will also be used within boundary cells.
-ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter ALE_RESOLUTION
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 !   [m]
-                                ! The distribution of vertical resolution for the target
-                                ! grid used for Eulerian-like coordinates. For example,
-                                ! in z-coordinate mode, the parameter is a list of level
-                                ! thicknesses (in m). In sigma-coordinate mode, the list
-                                ! is of non-dimensional fractions of the water column.
-!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 !   [m]
-                                ! HYBRID target densities for interfaces
-MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum interface depths.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum interface depths
-                                !  PARAM       - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0
-                                ! The list of maximum depths for each interface.
-MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum layer thicknesses.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum layer thicknesses
-                                !  PARAM       - use the vector-parameter MAX_LAYER_THICKNESS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 !   [m]
-                                ! The list of maximum thickness for each layer.
-REMAPPING_SCHEME = "PPM_H4"     ! default = "PLM"
-                                ! This sets the reconstruction scheme used for vertical remapping for all
-                                ! variables. It can be one of the following schemes: PCM         (1st-order
-                                ! accurate)
-                                ! PLM         (2nd-order accurate)
-                                ! PPM_H4      (3rd-order accurate)
-                                ! PPM_IH4     (3rd-order accurate)
-                                ! PQM_IH4IH3  (4th-order accurate)
-                                ! PQM_IH6IH5  (5th-order accurate)
-
-! === module MOM_grid ===
-! Parameters providing information about the lateral grid.
-
-! === module MOM_state_initialization ===
-INIT_LAYERS_FROM_Z_FILE = True  !   [Boolean] default = False
-                                ! If true, initialize the layer thicknesses, temperatures, and salinities from a
-                                ! Z-space file on a latitude-longitude grid.
-
-! === module MOM_initialize_layers_from_Z ===
-TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc"  ! default = "temp_salt_z.nc"
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures (T) and salinities (S). If T and S are not
-                                ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE
-                                ! must be set.
-Z_INIT_FILE_PTEMP_VAR = "temp"  ! default = "ptemp"
-                                ! The name of the potential temperature variable in
-                                ! TEMP_Z_INIT_FILE.
-Z_INIT_FILE_SALT_VAR = "salt"   ! default = "salt"
-                                ! The name of the salinity variable in
-                                ! SALT_Z_INIT_FILE.
-Z_INIT_ALE_REMAPPING = True     !   [Boolean] default = False
-                                ! If True, then remap straight to model coordinate from file.
-Z_INIT_REMAP_OLD_ALG = True     !   [Boolean] default = False
-                                ! If false, uses the preferred remapping algorithm for initialization. If true,
-                                ! use an older, less robust algorithm for remapping.
-
-! === module MOM_diag_mediator ===
-!Jiande NUM_DIAG_COORDS = 2             ! default = 1
-NUM_DIAG_COORDS = 1
-                                ! The number of diagnostic vertical coordinates to use.
-                                ! For each coordinate, an entry in DIAG_COORDS must be provided.
-!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" !
-DIAG_COORDS = "z Z ZSTAR"
-                                ! A list of string tuples associating diag_table modules to
-                                ! a coordinate definition used for diagnostics. Each string
-                                ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME".
-DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw"
-DIAG_MISVAL =  @[MOM6_DIAG_MISVAL]
-!AVAILABLE_DIAGS_FILE = "available_diags.002160" ! default = "available_diags.000000"
-                                ! A file into which to write a list of all available ocean diagnostics that can
-                                ! be included in a diag_table.
-!DIAG_COORD_DEF_Z = "FILE:vgrid_75_2m.nc,dz" ! default = "WOA09"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter DIAG_COORD_RES_Z
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-!DIAG_COORD_DEF_RHO2 = "RFNC1:35,999.5,1028,1028.5,8.,1038.,0.0078125" ! default = "WOA09"
-                                ! Determines how to specify the coordinate resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter DIAG_COORD_RES_RHO2
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-
-! === module MOM_MEKE ===
-USE_MEKE = True                 !   [Boolean] default = False
-                                ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy
-                                ! kinetic energy budget.
-MEKE_GMCOEFF = 1.0              !   [nondim] default = -1.0
-                                ! The efficiency of the conversion of potential energy into MEKE by the
-                                ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this
-                                ! conversion is not used or calculated.
-MEKE_BGSRC = 1.0E-13            !   [W kg-1] default = 0.0
-                                ! A background energy source for MEKE.
-MEKE_KHTH_FAC = 0.8             !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to KhTh.
-MEKE_KHTR_FAC = 0.8             !   [nondim] default = 0.0
-                                ! A factor that maps MEKE%Kh to KhTr.
-MEKE_ALPHA_RHINES = 0.05        !   [nondim] default = 0.0
-                                ! If positive, is a coefficient weighting the Rhines scale in the expression for
-                                ! mixing length used in MEKE-derived diffusivity.
-MEKE_ALPHA_EADY = 0.05          !   [nondim] default = 0.0
-                                ! If positive, is a coefficient weighting the Eady length scale in the
-                                ! expression for mixing length used in MEKE-derived diffusivity.
-
-! === module MOM_lateral_mixing_coeffs ===
-USE_VARIABLE_MIXING = True      !   [Boolean] default = False
-                                ! If true, the variable mixing code will be called.  This allows diagnostics to
-                                ! be created even if the scheme is not used.  If KHTR_SLOPE_CFF>0 or
-                                ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter
-                                ! file.
-RESOLN_SCALED_KH = True         !   [Boolean] default = False
-                                ! If true, the Laplacian lateral viscosity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-RESOLN_SCALED_KHTH = True       !   [Boolean] default = False
-                                ! If true, the interface depth diffusivity is scaled away when the first
-                                ! baroclinic deformation radius is well resolved.
-KHTR_SLOPE_CFF = 0.25           !   [nondim] default = 0.0
-                                ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer
-                                ! diffusivity
-USE_STORED_SLOPES = True        !   [Boolean] default = False
-                                ! If true, the isopycnal slopes are calculated once and stored for re-use. This
-                                ! uses more memory but avoids calling the equation of state more times than
-                                ! should be necessary.
-KH_RES_FN_POWER = 100           !   [nondim] default = 2
-                                ! The power of dx/Ld in the Kh resolution function.  Any positive integer may be
-                                ! used, although even integers are more efficient to calculate.  Setting this
-                                ! greater than 100 results in a step-function being used.
-VISC_RES_FN_POWER = 2           !   [nondim] default = 100
-                                ! The power of dx/Ld in the Kh resolution function.  Any positive integer may be
-                                ! used, although even integers are more efficient to calculate.  Setting this
-                                ! greater than 100 results in a step-function being used. This function affects
-                                ! lateral viscosity, Kh, and not KhTh.
-INTERNAL_WAVE_SPEED_BETTER_EST = False !   [Boolean] default = True
-                                ! If true, use a more robust estimate of the first mode wave speed as the
-                                ! starting point for iterations.
-
-! === module MOM_set_visc ===
-CHANNEL_DRAG = True             !   [Boolean] default = False
-                                ! If true, the bottom drag is exerted directly on each layer proportional to the
-                                ! fraction of the bottom it overlies.
-HBBL = 10.0                     !   [m]
-                                ! The thickness of a bottom boundary layer with a viscosity of KVBBL if
-                                ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom
-                                ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but
-                                ! LINEAR_DRAG is not.
-DRAG_BG_VEL = 0.1               !   [m s-1] default = 0.0
-                                ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an
-                                ! unresolved  velocity that is combined with the resolved velocity to estimate
-                                ! the velocity magnitude.  DRAG_BG_VEL is only used when BOTTOMDRAGLAW is
-                                ! defined.
-BBL_USE_EOS = True              !   [Boolean] default = False
-                                ! If true, use the equation of state in determining the properties of the bottom
-                                ! boundary layer.  Otherwise use the layer target potential densities.
-BBL_THICK_MIN = 0.1             !   [m] default = 0.0
-                                ! The minimum bottom boundary layer thickness that can be used with
-                                ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum
-                                ! near-bottom viscosity.
-KV = 1.0E-04                    !   [m2 s-1]
-                                ! The background kinematic viscosity in the interior. The molecular value, ~1e-6
-                                ! m2 s-1, may be used.
-KV_BBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the bottom boundary layer.
-KV_TBL_MIN = 0.0                !   [m2 s-1] default = 1.0E-04
-                                ! The minimum viscosities in the top boundary layer.
-
-! === module MOM_thickness_diffuse ===
-USE_GM_WORK_BUG = True          !   [Boolean] default = False
-                                ! If true, compute the top-layer work tendency on the u-grid with the incorrect
-                                ! sign, for legacy reproducibility.
-
-! === module MOM_dynamics_split_RK2 ===
-
-! === module MOM_continuity ===
-
-! === module MOM_continuity_PPM ===
-ETA_TOLERANCE = 1.0E-06         !   [m] default = 3.75E-09
-                                ! The tolerance for the differences between the barotropic and baroclinic
-                                ! estimates of the sea surface height due to the fluxes through each face.  The
-                                ! total tolerance for SSH is 4 times this value.  The default is
-                                ! 0.5*NK*ANGSTROM, and this should not be set less than about
-                                ! 10^-15*MAXIMUM_DEPTH.
-ETA_TOLERANCE_AUX = 0.001       !   [m] default = 1.0E-06
-                                ! The tolerance for free-surface height discrepancies between the barotropic
-                                ! solution and the sum of the layer thicknesses when calculating the auxiliary
-                                ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can
-                                ! be made larger for efficiency.
-
-! === module MOM_CoriolisAdv ===
-CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY"
-                                ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid
-                                ! values are:
-                                !    SADOURNY75_ENERGY - Sadourny, 1975; energy cons.
-                                !    ARAKAWA_HSU90     - Arakawa & Hsu, 1990
-                                !    SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons.
-                                !    ARAKAWA_LAMB81    - Arakawa & Lamb, 1981; En. + Enst.
-                                !    ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with
-                                !                         Arakawa & Hsu and Sadourny energy
-BOUND_CORIOLIS = True           !   [Boolean] default = False
-                                ! If true, the Coriolis terms at u-points are bounded by the four estimates of
-                                ! (f+rv)v from the four neighboring v-points, and similarly at v-points.  This
-                                ! option would have no effect on the SADOURNY Coriolis scheme if it were
-                                ! possible to use centered difference thickness fluxes.
-
-! === module MOM_PressureForce ===
-
-! === module MOM_PressureForce_AFV ===
-MASS_WEIGHT_IN_PRESSURE_GRADIENT = True !   [Boolean] default = False
-                                ! If true, use mass weighting when interpolating T/S for integrals near the
-                                ! bathymetry in AFV pressure gradient calculations.
-
-! === module MOM_hor_visc ===
-LAPLACIAN = True                !   [Boolean] default = False
-                                ! If true, use a Laplacian horizontal viscosity.
-SMAGORINSKY_KH = True           !   [Boolean] default = False
-                                ! If true, use a Smagorinsky nonlinear eddy viscosity.
-SMAG_LAP_CONST = 0.15           !   [nondim] default = 0.0
-                                ! The nondimensional Laplacian Smagorinsky constant, often 0.15.
-AH_VEL_SCALE = 0.05             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the cube of the grid spacing to
-                                ! calculate the biharmonic viscosity. The final viscosity is the largest of this
-                                ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH.
-SMAGORINSKY_AH = True           !   [Boolean] default = False
-                                ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity.
-SMAG_BI_CONST = 0.06            !   [nondim] default = 0.0
-                                ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06.
-USE_KH_BG_2D = True             !   [Boolean] default = False
-                                ! If true, read a file containing 2-d background harmonic viscosities. The final
-                                ! viscosity is the maximum of the other terms and this background value.
-
-! === module MOM_vert_friction ===
-HMIX_FIXED = 0.5                !   [m]
-                                ! The prescribed depth over which the near-surface viscosity and diffusivity are
-                                ! elevated when the bulk mixed layer is not used.
-KVML = 1.0E-04                  !   [m2 s-1] default = 1.0E-04
-                                ! The kinematic viscosity in the mixed layer.  A typical value is ~1e-2 m2 s-1.
-                                ! KVML is not used if BULKMIXEDLAYER is true.  The default is set by KV.
-MAXVEL = 6.0                    !   [m s-1] default = 3.0E+08
-                                ! The maximum velocity allowed before the velocity components are truncated.
-
-! === module MOM_barotropic ===
-BOUND_BT_CORRECTION = True      !   [Boolean] default = False
-                                ! If true, the corrective pseudo mass-fluxes into the barotropic solver are
-                                ! limited to values that require less than maxCFL_BT_cont to be accommodated.
-BT_PROJECT_VELOCITY = True      !   [Boolean] default = False
-                                ! If true, step the barotropic velocity first and project out the velocity
-                                ! tendency by 1+BEBT when calculating the transport.  The default (false) is to
-                                ! use a predictor continuity step to find the pressure field, and then to do a
-                                ! corrector continuity step using a weighted average of the old and new
-                                ! velocities, with weights of (1-BEBT) and BEBT.
-BT_STRONG_DRAG = True           !   [Boolean] default = False
-                                ! If true, use a stronger estimate of the retarding effects of strong bottom
-                                ! drag, by making it implicit with the barotropic time-step instead of implicit
-                                ! with the baroclinic time-step and dividing by the number of barotropic steps.
-BEBT = 0.2                      !   [nondim] default = 0.1
-                                ! BEBT determines whether the barotropic time stepping uses the forward-backward
-                                ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range
-                                ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1
-                                ! (for a backward Euler treatment). In practice, BEBT must be greater than about
-                                ! 0.05.
-DTBT = -0.9                     !   [s or nondim] default = -0.98
-                                ! The barotropic time step, in s. DTBT is only used with the split explicit time
-                                ! stepping. To set the time step automatically based the maximum stable value
-                                ! use 0, or a negative value gives the fraction of the stable value. Setting
-                                ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will
-                                ! actually be used is an integer fraction of DT, rounding down.
-
-! === module MOM_mixed_layer_restrat ===
-MIXEDLAYER_RESTRAT = True       !   [Boolean] default = False
-                                ! If true, a density-gradient dependent re-stratifying flow is imposed in the
-                                ! mixed layer. Can be used in ALE mode without restriction but in layer mode can
-                                ! only be used if BULKMIXEDLAYER is true.
-FOX_KEMPER_ML_RESTRAT_COEF = 60.0 !   [nondim] default = 0.0
-                                ! A nondimensional coefficient that is proportional to the ratio of the
-                                ! deformation radius to the dominant lengthscale of the submesoscale mixed layer
-                                ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic
-                                ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of
-                                ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al.
-                                ! (2010)
-MLE_USE_PBL_MLD = True          !   [Boolean] default = False
-                                ! If true, the MLE parameterization will use the mixed-layer depth provided by
-                                ! the active PBL parameterization. If false, MLE will estimate a MLD based on a
-                                ! density difference with the surface using the parameter MLE_DENSITY_DIFF.
-MLE_MLD_DECAY_TIME = 2.592E+06  !   [s] default = 0.0
-                                ! The time-scale for a running-mean filter applied to the mixed-layer depth used
-                                ! in the MLE restratification parameterization. When the MLD deepens below the
-                                ! current running-mean the running-mean is instantaneously set to the current
-                                ! MLD.
-
-! === module MOM_diabatic_driver ===
-! The following parameters are used for diabatic processes.
-ENERGETICS_SFC_PBL = True       !   [Boolean] default = False
-                                ! If true, use an implied energetics planetary boundary layer scheme to
-                                ! determine the diffusivity and viscosity in the surface boundary layer.
-EPBL_IS_ADDITIVE = False        !   [Boolean] default = True
-                                ! If true, the diffusivity from ePBL is added to all other diffusivities.
-                                ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used.
-KD_MIN_TR = 2.0E-06             !   [m2 s-1] default = 2.0E-06
-                                ! A minimal diffusivity that should always be applied to tracers, especially in
-                                ! massless layers near the bottom. The default is 0.1*KD.
-
-! === module MOM_CVMix_KPP ===
-! This is the MOM wrapper to CVMix:KPP
-! See http://cvmix.github.io/
-
-! === module MOM_tidal_mixing ===
-! Vertical Tidal Mixing Parameterization
-INT_TIDE_DISSIPATION = True     !   [Boolean] default = False
-                                ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing,
-                                ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004).
-INT_TIDE_PROFILE = "POLZIN_09"  ! default = "STLAURENT_02"
-                                ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with
-                                ! INT_TIDE_DISSIPATION. Valid values are:
-                                !    STLAURENT_02 - Use the St. Laurent et al exponential
-                                !                   decay profile.
-                                !    POLZIN_09 - Use the Polzin WKB-stretched algebraic
-                                !                   decay profile.
-KAPPA_ITIDES = 6.28319E-04      !   [m-1] default = 6.283185307179586E-04
-                                ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10
-                                ! km, as in St.Laurent et al. 2002.
-KAPPA_H2_FACTOR = 0.84          !   [nondim] default = 1.0
-                                ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION.
-TKE_ITIDE_MAX = 0.1             !   [W m-2] default = 1000.0
-                                ! The maximum internal tide energy source available to mix above the bottom
-                                ! boundary layer with INT_TIDE_DISSIPATION.
-READ_TIDEAMP = True             !   [Boolean] default = False
-                                ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude
-                                ! with INT_TIDE_DISSIPATION.
-TIDEAMP_FILE = "tidal_amplitude.nc" ! default = "tideamp.nc"
-                                ! The path to the file containing the spatially varying tidal amplitudes with
-                                ! INT_TIDE_DISSIPATION.
-H2_FILE = "topog.nc"            !
-                                ! The path to the file containing the sub-grid-scale topographic roughness
-                                ! amplitude with INT_TIDE_DISSIPATION.
-
-! === module MOM_CVMix_conv ===
-! Parameterization of enhanced mixing due to convection via CVMix
-
-! === module MOM_set_diffusivity ===
-BBL_MIXING_AS_MAX = False       !   [Boolean] default = True
-                                ! If true, take the maximum of the diffusivity from the BBL mixing and the other
-                                ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added.
-USE_LOTW_BBL_DIFFUSIVITY = True !   [Boolean] default = False
-                                ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL
-                                ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL
-                                ! scheme.
-SIMPLE_TKE_TO_KD = True         !   [Boolean] default = False
-                                ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary
-                                ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact
-                                ! energetics for an isopycnal layer-formulation.
-
-! === module MOM_bkgnd_mixing ===
-! Adding static vertical background mixing coefficients
-KD = 2.0E-05                    !   [m2 s-1] default = 0.0
-                                ! The background diapycnal diffusivity of density in the interior. Zero or the
-                                ! molecular value, ~1e-7 m2 s-1, may be used.
-KD_MIN = 2.0E-06                !   [m2 s-1] default = 2.0E-07
-                                ! The minimum diapycnal diffusivity.
-HENYEY_IGW_BACKGROUND = True    !   [Boolean] default = False
-                                ! If true, use a latitude-dependent scaling for the near surface background
-                                ! diffusivity, as described in Harrison & Hallberg, JPO 2008.
-KD_MAX = 0.1                    !   [m2 s-1] default = -1.0
-                                ! The maximum permitted increment for the diapycnal diffusivity from TKE-based
-                                ! parameterizations, or a negative value for no limit.
-
-! === module MOM_kappa_shear ===
-! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008
-USE_JACKSON_PARAM = True        !   [Boolean] default = False
-                                ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing
-                                ! parameterization.
-MAX_RINO_IT = 25                !   [nondim] default = 50
-                                ! The maximum number of iterations that may be used to estimate the Richardson
-                                ! number driven mixing.
-VERTEX_SHEAR = False             !   [Boolean] default = False
-                                ! If true, do the calculations of the shear-driven mixing
-                                ! at the cell vertices (i.e., the vorticity points).
-KD_TRUNC_KAPPA_SHEAR = 2.0E-07  !   [m2 s-1] default = 2.0E-07
-                                ! The value of shear-driven diffusivity that is considered negligible and is
-                                ! rounded down to 0. The default is 1% of KD_KAPPA_SHEAR_0.
-KAPPA_SHEAR_ITER_BUG = True     !   [Boolean] default = False
-                                ! If true, use an older, dimensionally inconsistent estimate of the derivative
-                                ! of diffusivity with energy in the Newton's method iteration.  The bug causes
-                                ! undercorrections when dz > 1 m.
-KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True !   [Boolean] default = False
-                                ! If true, report back the latest estimate of TKE instead of the time average
-                                ! TKE when there is mass in all layers.  Otherwise always report the time
-                                ! averaged TKE, as is currently done when there are some massless layers.
-
-! === module MOM_CVMix_shear ===
-! Parameterization of shear-driven turbulence via CVMix (various options)
-
-! === module MOM_CVMix_ddiff ===
-! Parameterization of mixing due to double diffusion processes via CVMix
-
-! === module MOM_diabatic_aux ===
-! The following parameters are used for auxiliary diabatic processes.
-PRESSURE_DEPENDENT_FRAZIL = False !   [Boolean] default = False
-                                ! If true, use a pressure dependent freezing temperature when making frazil. The
-                                ! default is false, which will be faster but is inappropriate with ice-shelf
-                                ! cavities.
-VAR_PEN_SW = True               !   [Boolean] default = False
-                                ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine
-                                ! the e-folding depth of incoming short wave radiation.
-CHL_FILE = @[CHLCLIM]           !
-                                ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It
-                                ! is used when VAR_PEN_SW and CHL_FROM_FILE are true.
-
-! === module MOM_energetic_PBL ===
-ML_OMEGA_FRAC = 0.001           !   [nondim] default = 0.0
-                                ! When setting the decay scale for turbulence, use this fraction of the absolute
-                                ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 +
-                                ! of*4*omega^2).
-TKE_DECAY = 0.01                !   [nondim] default = 2.5
-                                ! TKE_DECAY relates the vertical rate of decay of the TKE available for
-                                ! mechanical entrainment to the natural Ekman depth.
-EPBL_MSTAR_SCHEME = "OM4"       ! default = "CONSTANT"
-                                ! EPBL_MSTAR_SCHEME selects the method for setting mstar.  Valid values are:
-                                !    CONSTANT   - Use a fixed mstar given by MSTAR
-                                !    OM4        - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4
-                                !    REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018.
-MSTAR_CAP = 10.0                !   [nondim] default = -1.0
-                                ! If this value is positive, it sets the maximum value of mstar allowed in ePBL.
-                                ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT).
-MSTAR2_COEF1 = 0.29             !   [nondim] default = 0.3
-                                ! Coefficient in computing mstar when rotation and stabilizing effects are both
-                                ! important (used if EPBL_MSTAR_SCHEME = OM4).
-MSTAR2_COEF2 = 0.152            !   [nondim] default = 0.085
-                                ! Coefficient in computing mstar when only rotation limits the total mixing
-                                ! (used if EPBL_MSTAR_SCHEME = OM4)
-NSTAR = 0.06                    !   [nondim] default = 0.2
-                                ! The portion of the buoyant potential energy imparted by surface fluxes that is
-                                ! available to drive entrainment at the base of mixed layer when that energy is
-                                ! positive.
-MSTAR_CONV_ADJ = 0.667          !   [nondim] default = 0.0
-                                ! Coefficient used for reducing mstar during convection due to reduction of
-                                ! stable density gradient.
-USE_MLD_ITERATION = False       !   [Boolean] default = True
-                                ! A logical that specifies whether or not to use the distance to the bottom of
-                                ! the actively turbulent boundary layer to help set the EPBL length scale.
-EPBL_TRANSITION_SCALE = 0.01    !   [nondim] default = 0.1
-                                ! A scale for the mixing length in the transition layer at the edge of the
-                                ! boundary layer as a fraction of the boundary layer thickness.
-MIX_LEN_EXPONENT = 1.0          !   [nondim] default = 2.0
-                                ! The exponent applied to the ratio of the distance to the MLD and the MLD depth
-                                ! which determines the shape of the mixing length. This is only used if
-                                ! USE_MLD_ITERATION is True.
-USE_LA_LI2016 = @[MOM6_USE_LI2016] !   [nondim] default = False
-                                ! A logical to use the Li et al. 2016 (submitted) formula to determine the
-                                ! Langmuir number.
-USE_WAVES = @[MOM6_USE_WAVES]   !   [Boolean] default = False
-                                ! If true, enables surface wave modules.
-WAVE_METHOD = "SURFACE_BANDS"   ! default = "EMPTY"
-                                ! Choice of wave method, valid options include:
-                                !  TEST_PROFILE  - Prescribed from surface Stokes drift
-                                !                  and a decay wavelength.
-                                !  SURFACE_BANDS - Computed from multiple surface values
-                                !                  and decay wavelengths.
-                                !  DHH85         - Uses Donelan et al. 1985 empirical
-                                !                  wave spectrum with prescribed values.
-                                !  LF17          - Infers Stokes drift profile from wind
-                                !                  speed following Li and Fox-Kemper 2017.
-SURFBAND_SOURCE = "COUPLER"     ! default = "EMPTY"
-                                ! Choice of SURFACE_BANDS data mode, valid options include:
-                                !  DATAOVERRIDE  - Read from NetCDF using FMS DataOverride.
-                                !  COUPLER       - Look for variables from coupler pass
-                                !  INPUT         - Testing with fixed values.
-STK_BAND_COUPLER = 3            ! default = 1
-                                ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has
-                                ! to be consistent with the number of Stokes drift bands in WW3, or the model
-                                ! will fail.
-SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 !   [rad/m] default = 0.12566
-                                ! Central wavenumbers for surface Stokes drift bands.
-EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE"
-                                ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence.
-                                ! Valid values are:
-                                !    NONE     - Do not do any extra mixing due to Langmuir turbulence
-                                !    RESCALE  - Use a multiplicative rescaling of mstar to account for Langmuir
-                                !      turbulence
-                                !    ADDITIVE - Add a Langmuir turblence contribution to mstar to other
-                                !      contributions
-LT_ENHANCE_COEF = 0.044         !   [nondim] default = 0.447
-                                ! Coefficient for Langmuir enhancement of mstar
-LT_ENHANCE_EXP = -1.5           !   [nondim] default = -1.33
-                                ! Exponent for Langmuir enhancementt of mstar
-LT_MOD_LAC1 = 0.0               !   [nondim] default = -0.87
-                                ! Coefficient for modification of Langmuir number due to MLD approaching Ekman
-                                ! depth.
-LT_MOD_LAC4 = 0.0               !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! stable Obukhov depth.
-LT_MOD_LAC5 = 0.22              !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! unstable Obukhov depth.
-
-! === module MOM_regularize_layers ===
-
-! === module MOM_opacity ===
-PEN_SW_NBANDS = 3               ! default = 1
-                                ! The number of bands of penetrating shortwave radiation.
-
-! === module MOM_tracer_advect ===
-TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM"
-                                ! The horizontal transport scheme for tracers:
-                                !   PLM    - Piecewise Linear Method
-                                !   PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order)
-                                !   PPM    - Piecewise Parabolic Method (Colella-Woodward)
-
-! === module MOM_tracer_hor_diff ===
-CHECK_DIFFUSIVE_CFL = True      !   [Boolean] default = False
-                                ! If true, use enough iterations the diffusion to ensure that the diffusive
-                                ! equivalent of the CFL limit is not violated.  If false, always use the greater
-                                ! of 1 or MAX_TR_DIFFUSION_CFL iteration.
-
-! === module MOM_neutral_diffusion ===
-! This module implements neutral diffusion of tracers
-USE_NEUTRAL_DIFFUSION = True    !   [Boolean] default = False
-                                ! If true, enables the neutral diffusion module.
-
-! === module MOM_lateral_boundary_diffusion ===
-! This module implements lateral diffusion of tracers near boundaries
-
-! === module MOM_sum_output ===
-CALCULATE_APE = False           !   [Boolean] default = True
-                                ! If true, calculate the available potential energy of the interfaces.  Setting
-                                ! this to false reduces the memory footprint of high-PE-count models
-                                ! dramatically.
-MAXTRUNC = 100000               !   [truncations save_interval-1] default = 0
-                                ! The run will be stopped, and the day set to a very large value if the velocity
-                                ! is truncated more than MAXTRUNC times between energy saves.  Set MAXTRUNC to 0
-                                ! to stop if there is any truncation of velocities.
-ENERGYSAVEDAYS = 0.25           !   [days] default = 1.0
-                                ! The interval in units of TIMEUNIT between saves of the energies of the run and
-                                ! other globally summed diagnostics.
-
-! === module ocean_model_init ===
-
-! === module MOM_oda_incupd ===
-ODA_INCUPD = @[ODA_INCUPD]   ! [Boolean] default = False
-                             ! If true, oda incremental updates will be applied
-                             ! everywhere in the domain.
-ODA_INCUPD_FILE = "mom6_increment.nc"   ! The name of the file with the T,S,h increments.
-
-ODA_TEMPINC_VAR = "Temp"        ! default = "ptemp_inc"
-                                ! The name of the potential temperature inc. variable in
-				      	   ! ODA_INCUPD_FILE.
-ODA_SALTINC_VAR = "Salt"        ! default = "sal_inc"
-                                ! The name of the salinity inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_THK_VAR = "h"               ! default = "h"
-                                ! The name of the int. depth inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_INCUPD_UV = true            !
-ODA_UINC_VAR = "u"              ! default = "u_inc"
-                                ! The name of the zonal vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_VINC_VAR = "v"              ! default = "v_inc"
-                                ! The name of the meridional vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS]            ! default=3.0
-
-! === module MOM_surface_forcing ===
-OCEAN_SURFACE_STAGGER = "A"     ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the surface velocity field that is
-                                ! returned to the coupler.  Valid values include
-                                ! 'A', 'B', or 'C'.
-
-MAX_P_SURF = 0.0                !   [Pa] default = -1.0
-                                ! The maximum surface pressure that can be exerted by the atmosphere and
-                                ! floating sea-ice or ice shelves. This is needed because the FMS coupling
-                                ! structure does not limit the water that can be frozen out of the ocean and the
-                                ! ice-ocean heat fluxes are treated explicitly.  No limit is applied if a
-                                ! negative value is used.
-WIND_STAGGER = "A"              ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the input wind stress field.  Valid
-                                ! values are 'A', 'B', or 'C'.
-CD_TIDES = 0.0018               !   [nondim] default = 1.0E-04
-                                ! The drag coefficient that applies to the tides.
-GUST_CONST = 0.02               !   [Pa] default = 0.0
-                                ! The background gustiness in the winds.
-FIX_USTAR_GUSTLESS_BUG = False  !   [Boolean] default = True
-                                ! If true correct a bug in the time-averaging of the gustless wind friction
-                                ! velocity
-! === module ocean_stochastics ===
-DO_SPPT   = @[DO_OCN_SPPT]      ! [Boolean] default = False
-                                ! If true perturb the diabatic tendencies in MOM_diabatic_driver
-PERT_EPBL = @[PERT_EPBL]        ! [Boolean] default = False
-                                ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL
-
-! === module MOM_restart ===
-
-! === module MOM_file_parser ===
diff --git a/parm/ufs/mom6/MOM_input_template_500 b/parm/ufs/mom6/MOM_input_template_500
deleted file mode 100644
index dde805d247..0000000000
--- a/parm/ufs/mom6/MOM_input_template_500
+++ /dev/null
@@ -1,592 +0,0 @@
-! This file was written by the model and records the non-default parameters used at run-time.
-! === module MOM ===
-
-! === module MOM_unit_scaling ===
-! Parameters for doing unit scaling of variables.
-USE_REGRIDDING = True           !   [Boolean] default = False
-                                ! If True, use the ALE algorithm (regridding/remapping). If False, use the
-                                ! layered isopycnal algorithm.
-THICKNESSDIFFUSE = True         !   [Boolean] default = False
-                                ! If true, interface heights are diffused with a coefficient of KHTH.
-THICKNESSDIFFUSE_FIRST = True   !   [Boolean] default = False
-                                ! If true, do thickness diffusion before dynamics. This is only used if
-                                ! THICKNESSDIFFUSE is true.
-DT = @[DT_DYNAM_MOM6]           !   [s]
-                                ! The (baroclinic) dynamics time step.  The time-step that is actually used will
-                                ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode
-                                ! or the coupling timestep in coupled mode.)
-DT_THERM = @[DT_THERM_MOM6]     !   [s] default = 1800.0
-                                ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be
-                                ! an integer multiple of DT and less than the forcing or coupling time-step,
-                                ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer
-                                ! multiple of the coupling timestep.  By default DT_THERM is set to DT.
-THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN]    !   [Boolean] default = False
-                                ! If true, the MOM will take thermodynamic and tracer timesteps that can be
-                                ! longer than the coupling timestep. The actual thermodynamic timestep that is
-                                ! used in this case is the largest integer multiple of the coupling timestep
-                                ! that is less than or equal to DT_THERM.
-HFREEZE = 20.0                  !   [m] default = -1.0
-                                ! If HFREEZE > 0, melt potential will be computed. The actual depth
-                                ! over which melt potential is computed will be min(HFREEZE, OBLD)
-                                ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default)
-                                ! melt potential will not be computed.
-FRAZIL = True                   !   [Boolean] default = False
-                                ! If true, water freezes if it gets too cold, and the accumulated heat deficit
-                                ! is returned in the surface state.  FRAZIL is only used if
-                                ! ENABLE_THERMODYNAMICS is true.
-BOUND_SALINITY = True           !   [Boolean] default = False
-                                ! If true, limit salinity to being positive. (The sea-ice model may ask for more
-                                ! salt than is available and drive the salinity negative otherwise.)
-
-! === module MOM_domains ===
-TRIPOLAR_N = True               !   [Boolean] default = False
-                                ! Use tripolar connectivity at the northern edge of the domain.  With
-                                ! TRIPOLAR_N, NIGLOBAL must be even.
-NIGLOBAL = @[NX_GLB]            !
-                                ! The total number of thickness grid points in the x-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-NJGLOBAL = @[NY_GLB]            !
-                                ! The total number of thickness grid points in the y-direction in the physical
-                                ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time.
-
-! === module MOM_hor_index ===
-! Sets the horizontal array index types.
-
-! === module MOM_fixed_initialization ===
-INPUTDIR = "INPUT"              ! default = "."
-                                ! The directory in which input files are found.
-
-! === module MOM_grid_init ===
-GRID_CONFIG = "mosaic"          !
-                                ! A character string that determines the method for defining the horizontal
-                                ! grid.  Current options are:
-                                !     mosaic - read the grid from a mosaic (supergrid)
-                                !              file set by GRID_FILE.
-                                !     cartesian - use a (flat) Cartesian grid.
-                                !     spherical - use a simple spherical grid.
-                                !     mercator - use a Mercator spherical grid.
-GRID_FILE = "ocean_hgrid.nc"    !
-                                ! Name of the file from which to read horizontal grid data.
-GRID_ROTATION_ANGLE_BUGS = False  ! [Boolean] default = True
-                                ! If true, use an older algorithm to calculate the sine and
-                                ! cosines needed rotate between grid-oriented directions and
-                                ! true north and east.  Differences arise at the tripolar fold
-USE_TRIPOLAR_GEOLONB_BUG = False !   [Boolean] default = True
-                                ! If true, use older code that incorrectly sets the longitude in some points
-                                ! along the tripolar fold to be off by 360 degrees.
-TOPO_CONFIG = "file"            !
-                                ! This specifies how bathymetry is specified:
-                                !     file - read bathymetric information from the file
-                                !       specified by (TOPO_FILE).
-                                !     flat - flat bottom set to MAXIMUM_DEPTH.
-                                !     bowl - an analytically specified bowl-shaped basin
-                                !       ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH.
-                                !     spoon - a similar shape to 'bowl', but with an vertical
-                                !       wall at the southern face.
-                                !     halfpipe - a zonally uniform channel with a half-sine
-                                !       profile in the meridional direction.
-                                !     bbuilder - build topography from list of functions.
-                                !     benchmark - use the benchmark test case topography.
-                                !     Neverworld - use the Neverworld test case topography.
-                                !     DOME - use a slope and channel configuration for the
-                                !       DOME sill-overflow test case.
-                                !     ISOMIP - use a slope and channel configuration for the
-                                !       ISOMIP test case.
-                                !     DOME2D - use a shelf and slope configuration for the
-                                !       DOME2D gravity current/overflow test case.
-                                !     Kelvin - flat but with rotated land mask.
-                                !     seamount - Gaussian bump for spontaneous motion test case.
-                                !     dumbbell - Sloshing channel with reservoirs on both ends.
-                                !     shelfwave - exponential slope for shelfwave test case.
-                                !     Phillips - ACC-like idealized topography used in the Phillips config.
-                                !     dense - Denmark Strait-like dense water formation and overflow.
-                                !     USER - call a user modified routine.
-TOPO_FILE = "ocean_topog.nc"    ! default = "topog.nc"
-                                ! The file from which the bathymetry is read.
-!MAXIMUM_DEPTH = 5801.341919389728 !   [m]
-                                ! The (diagnosed) maximum depth of the ocean.
-MINIMUM_DEPTH = 10.0            !   [m] default = 0.0
-                                ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is
-                                ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is
-                                ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than
-                                ! MASKING_DEPTH are rounded to MINIMUM_DEPTH.
-
-! === module MOM_open_boundary ===
-! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply,
-! if any.
-MASKING_DEPTH = 0.0             !   [m] default = -9999.0
-                                ! The depth below which to mask points as land points, for which all fluxes are
-                                ! zeroed out. MASKING_DEPTH is ignored if negative.
-
-! === module MOM_verticalGrid ===
-! Parameters providing information about the vertical grid.
-NK = 25                         !   [nondim]
-                                ! The number of model layers.
-
-! === module MOM_tracer_registry ===
-
-! === module MOM_EOS ===
-TFREEZE_FORM = "MILLERO_78"     ! default = "LINEAR"
-                                ! TFREEZE_FORM determines which expression should be used for the freezing
-                                ! point.  Currently, the valid choices are "LINEAR", "MILLERO_78", "TEOS10"
-
-! === module MOM_restart ===
-RESTART_CHECKSUMS_REQUIRED = False
-! === module MOM_tracer_flow_control ===
-
-! === module MOM_coord_initialization ===
-COORD_CONFIG = "file"           ! default = "none"
-                                ! This specifies how layers are to be defined:
-                                !     ALE or none - used to avoid defining layers in ALE mode
-                                !     file - read coordinate information from the file
-                                !       specified by (COORD_FILE).
-                                !     BFB - Custom coords for buoyancy-forced basin case
-                                !       based on SST_S, T_BOT and DRHO_DT.
-                                !     linear - linear based on interfaces not layers
-                                !     layer_ref - linear based on layer densities
-                                !     ts_ref - use reference temperature and salinity
-                                !     ts_range - use range of temperature and salinity
-                                !       (T_REF and S_REF) to determine surface density
-                                !       and GINT calculate internal densities.
-                                !     gprime - use reference density (RHO_0) for surface
-                                !       density and GINT calculate internal densities.
-                                !     ts_profile - use temperature and salinity profiles
-                                !       (read from COORD_FILE) to set layer densities.
-                                !     USER - call a user modified routine.
-COORD_FILE = "layer_coord25.nc" !
-                                ! The file from which the coordinate densities are read.
-REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER"
-                                ! Coordinate mode for vertical regridding. Choose among the following
-                                ! possibilities:  LAYER - Isopycnal or stacked shallow water layers
-                                !  ZSTAR, Z* - stretched geopotential z*
-                                !  SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf
-                                !  SIGMA - terrain following coordinates
-                                !  RHO   - continuous isopycnal
-                                !  HYCOM1 - HyCOM-like hybrid coordinate
-                                !  SLIGHT - stretched coordinates above continuous isopycnal
-                                !  ADAPTIVE - optimize for smooth neutral density surfaces
-BOUNDARY_EXTRAPOLATION = True   !   [Boolean] default = False
-                                ! When defined, a proper high-order reconstruction scheme is used within
-                                ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM
-                                ! reconstruction will also be used within boundary cells.
-ALE_COORDINATE_CONFIG = "HYBRID:hycom1_25.nc,sigma2,FNC1:5,4000,4.5,.01" ! default = "UNIFORM"
-                                ! Determines how to specify the coordinate
-                                ! resolution. Valid options are:
-                                !  PARAM       - use the vector-parameter ALE_RESOLUTION
-                                !  UNIFORM[:N] - uniformly distributed
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,dz
-                                !                or FILE:lev.nc,interfaces=zw
-                                !  WOA09[:N]   - the WOA09 vertical grid (approximately)
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-                                !  HYBRID:string - read from a file. The string specifies
-                                !                the filename and two variable names, separated
-                                !                by a comma or space, for sigma-2 and dz. e.g.
-                                !                HYBRID:vgrid.nc,sigma2,dz
-!ALE_RESOLUTION = 2*5.0, 5.01, 5.07, 5.25, 5.68, 6.55, 8.1, 10.66, 14.620000000000001, 20.450000000000003, 28.73, 40.1, 55.32, 75.23, 100.8, 133.09, 173.26, 222.62, 282.56, 354.62, 440.47, 541.87, 660.76, 799.1800000000001 !   [m]
-                                ! The distribution of vertical resolution for the target
-                                ! grid used for Eulerian-like coordinates. For example,
-                                ! in z-coordinate mode, the parameter is a list of level
-                                ! thicknesses (in m). In sigma-coordinate mode, the list
-                                ! is of non-dimensional fractions of the water column.
-!TARGET_DENSITIES = 1010.0, 1020.843017578125, 1027.0274658203125, 1029.279541015625, 1030.862548828125, 1032.1572265625, 1033.27978515625, 1034.251953125, 1034.850830078125, 1035.28857421875, 1035.651123046875, 1035.967529296875, 1036.2410888671875, 1036.473876953125, 1036.6800537109375, 1036.8525390625, 1036.9417724609375, 1037.0052490234375, 1037.057373046875, 1037.1065673828125, 1037.15576171875, 1037.2060546875, 1037.26416015625, 1037.3388671875, 1037.4749755859375, 1038.0 !   [m]
-                                ! HYBRID target densities for itnerfaces
-REGRID_COMPRESSIBILITY_FRACTION = 0.01 !   [not defined] default = 0.0
-                                ! When interpolating potential density profiles we can add
-                                ! some artificial compressibility solely to make homogenous
-                                ! regions appear stratified.
-MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.125" ! default = "NONE"
-                                ! Determines how to specify the maximum interface depths.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum interface depths
-                                !  PARAM       - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 36.25, 93.75, 177.5, 287.5, 423.75, 586.25, 775.0, 990.0, 1231.25, 1498.75, 1792.5, 2112.5, 2458.75, 2831.25, 3230.0, 3655.0, 4106.25, 4583.75, 5087.5, 5617.5, 6173.75, 6756.25, 7365.0, 8000.0 !   [m]
-                                ! The list of maximum depths for each interface.
-MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE"
-                                ! Determines how to specify the maximum layer thicknesses.
-                                ! Valid options are:
-                                !  NONE        - there are no maximum layer thicknesses
-                                !  PARAM       - use the vector-parameter MAX_LAYER_THICKNESS
-                                !  FILE:string - read from a file. The string specifies
-                                !                the filename and variable name, separated
-                                !                by a comma or space, e.g. FILE:lev.nc,Z
-                                !  FNC1:string - FNC1:dz_min,H_total,power,precision
-!MAX_LAYER_THICKNESS = 400.0, 1094.2, 1144.02, 1174.81, 1197.42, 1215.4099999999999, 1230.42, 1243.3200000000002, 1254.65, 1264.78, 1273.94, 1282.31, 1290.02, 1297.17, 1303.85, 1310.1, 1316.0, 1321.5700000000002, 1326.85, 1331.87, 1336.67, 1341.25, 1345.6399999999999, 1349.85, 1353.88 !   [m]
-                                ! The list of maximum thickness for each layer.
-REMAPPING_SCHEME = "PPM_H4"     ! default = "PLM"
-                                ! This sets the reconstruction scheme used for vertical remapping for all
-                                ! variables. It can be one of the following schemes: PCM         (1st-order
-                                ! accurate)
-                                ! PLM         (2nd-order accurate)
-                                ! PPM_H4      (3rd-order accurate)
-                                ! PPM_IH4     (3rd-order accurate)
-                                ! PQM_IH4IH3  (4th-order accurate)
-                                ! PQM_IH6IH5  (5th-order accurate)
-
-! === module MOM_grid ===
-! Parameters providing information about the lateral grid.
-
-! === module MOM_state_initialization ===
-INIT_LAYERS_FROM_Z_FILE = True  !   [Boolean] default = False
-                                ! If true, initialize the layer thicknesses, temperatures, and salinities from a
-                                ! Z-space file on a latitude-longitude grid.
-
-! === module MOM_initialize_layers_from_Z ===
-TEMP_SALT_Z_INIT_FILE = ""      ! default = "temp_salt_z.nc"
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures (T) and salinities (S). If T and S are not
-                                ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE
-                                ! must be set.
-TEMP_Z_INIT_FILE = "woa18_decav_t00_01.nc" ! default = ""
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures, only.
-SALT_Z_INIT_FILE = "woa18_decav_s00_01.nc" ! default = ""
-                                ! The name of the z-space input file used to initialize
-                                ! temperatures, only.
-Z_INIT_FILE_PTEMP_VAR = "t_an"  ! default = "ptemp"
-                                ! The name of the potential temperature variable in
-                                ! TEMP_Z_INIT_FILE.
-Z_INIT_FILE_SALT_VAR = "s_an"   ! default = "salt"
-                                ! The name of the salinity variable in
-                                ! SALT_Z_INIT_FILE.
-Z_INIT_ALE_REMAPPING = True     !   [Boolean] default = False
-                                ! If True, then remap straight to model coordinate from file.
-
-! === module MOM_diag_mediator ===
-NUM_DIAG_COORDS = 1
-                                ! The number of diagnostic vertical coordinates to use.
-                                ! For each coordinate, an entry in DIAG_COORDS must be provided.
-DIAG_COORDS = "z Z ZSTAR"
-                                ! A list of string tuples associating diag_table modules to
-                                ! a coordinate definition used for diagnostics. Each string
-                                ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME".
-DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw"
-DIAG_MISVAL = @[MOM6_DIAG_MISVAL]
-
-! === module MOM_MEKE ===
-USE_MEKE = True                 !   [Boolean] default = False
-                                ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy
-                                ! kinetic energy budget.
-
-! === module MOM_lateral_mixing_coeffs ===
-USE_VARIABLE_MIXING = True      !   [Boolean] default = False
-                                ! If true, the variable mixing code will be called.  This allows diagnostics to
-                                ! be created even if the scheme is not used.  If KHTR_SLOPE_CFF>0 or
-                                ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter
-                                ! file.
-! === module MOM_set_visc ===
-CHANNEL_DRAG = True             !   [Boolean] default = False
-                                ! If true, the bottom drag is exerted directly on each layer proportional to the
-                                ! fraction of the bottom it overlies.
-HBBL = 10.0                     !   [m]
-                                ! The thickness of a bottom boundary layer with a viscosity of KVBBL if
-                                ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom
-                                ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but
-                                ! LINEAR_DRAG is not.
-KV = 1.0E-04                    !   [m2 s-1]
-                                ! The background kinematic viscosity in the interior. The molecular value, ~1e-6
-                                ! m2 s-1, may be used.
-
-! === module MOM_continuity ===
-
-! === module MOM_continuity_PPM ===
-
-! === module MOM_CoriolisAdv ===
-CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY"
-                                ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid
-                                ! values are:
-                                !    SADOURNY75_ENERGY - Sadourny, 1975; energy cons.
-                                !    ARAKAWA_HSU90     - Arakawa & Hsu, 1990
-                                !    SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons.
-                                !    ARAKAWA_LAMB81    - Arakawa & Lamb, 1981; En. + Enst.
-                                !    ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with
-                                !                         Arakawa & Hsu and Sadourny energy
-BOUND_CORIOLIS = True           !   [Boolean] default = False
-                                ! If true, the Coriolis terms at u-points are bounded by the four estimates of
-                                ! (f+rv)v from the four neighboring v-points, and similarly at v-points.  This
-                                ! option would have no effect on the SADOURNY Coriolis scheme if it were
-                                ! possible to use centered difference thickness fluxes.
-
-! === module MOM_PressureForce ===
-
-! === module MOM_PressureForce_AFV ===
-MASS_WEIGHT_IN_PRESSURE_GRADIENT = True !   [Boolean] default = False
-                                ! If true, use mass weighting when interpolating T/S for integrals near the
-                                ! bathymetry in AFV pressure gradient calculations.
-
-! === module MOM_hor_visc ===
-LAPLACIAN = True                !   [Boolean] default = False
-                                ! If true, use a Laplacian horizontal viscosity.
-KH_VEL_SCALE = 0.01             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the grid spacing to calculate the
-                                ! Laplacian viscosity. The final viscosity is the largest of this scaled
-                                ! viscosity, the Smagorinsky and Leith viscosities, and KH.
-KH_SIN_LAT = 2000.0             !   [m2 s-1] default = 0.0
-                                ! The amplitude of a latitudinally-dependent background viscosity of the form
-                                ! KH_SIN_LAT*(SIN(LAT)**KH_PWR_OF_SINE).
-SMAGORINSKY_KH = True           !   [Boolean] default = False
-                                ! If true, use a Smagorinsky nonlinear eddy viscosity.
-SMAG_LAP_CONST = 0.15           !   [nondim] default = 0.0
-                                ! The nondimensional Laplacian Smagorinsky constant, often 0.15.
-AH_VEL_SCALE = 0.01             !   [m s-1] default = 0.0
-                                ! The velocity scale which is multiplied by the cube of the grid spacing to
-                                ! calculate the biharmonic viscosity. The final viscosity is the largest of this
-                                ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH.
-SMAGORINSKY_AH = True           !   [Boolean] default = False
-                                ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity.
-SMAG_BI_CONST = 0.06            !   [nondim] default = 0.0
-                                ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06.
-USE_LAND_MASK_FOR_HVISC = True  !   [Boolean] default = False
-                                ! If true, use Use the land mask for the computation of thicknesses at velocity
-                                ! locations. This eliminates the dependence on arbitrary values over land or
-                                ! outside of the domain.
-
-! === module MOM_vert_friction ===
-HMIX_FIXED = 0.5                !   [m]
-                                ! The prescribed depth over which the near-surface viscosity and diffusivity are
-                                ! elevated when the bulk mixed layer is not used.
-KVML = 1.0E-04                  !   [m2 s-1] default = 1.0E-04
-                                ! The kinematic viscosity in the mixed layer.  A typical value is ~1e-2 m2 s-1.
-                                ! KVML is not used if BULKMIXEDLAYER is true.  The default is set by KV.
-MAXVEL = 6.0                    !   [m s-1] default = 3.0E+08
-                                ! The maximum velocity allowed before the velocity components are truncated.
-
-! === module MOM_barotropic ===
-BOUND_BT_CORRECTION = True      !   [Boolean] default = False
-                                ! If true, the corrective pseudo mass-fluxes into the barotropic solver are
-                                ! limited to values that require less than maxCFL_BT_cont to be accommodated.
-BT_PROJECT_VELOCITY = True      !   [Boolean] default = False
-                                ! If true, step the barotropic velocity first and project out the velocity
-                                ! tendency by 1+BEBT when calculating the transport.  The default (false) is to
-                                ! use a predictor continuity step to find the pressure field, and then to do a
-                                ! corrector continuity step using a weighted average of the old and new
-                                ! velocities, with weights of (1-BEBT) and BEBT.
-DYNAMIC_SURFACE_PRESSURE = False !   [Boolean] default = False
-                                ! If true, add a dynamic pressure due to a viscous ice shelf, for instance.
-BEBT = 0.2                      !   [nondim] default = 0.1
-                                ! BEBT determines whether the barotropic time stepping uses the forward-backward
-                                ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range
-                                ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1
-                                ! (for a backward Euler treatment). In practice, BEBT must be greater than about
-                                ! 0.05.
-DTBT = -0.9                     !   [s or nondim] default = -0.98
-                                ! The barotropic time step, in s. DTBT is only used with the split explicit time
-                                ! stepping. To set the time step automatically based the maximum stable value
-                                ! use 0, or a negative value gives the fraction of the stable value. Setting
-                                ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will
-                                ! actually be used is an integer fraction of DT, rounding down.
-
-! === module MOM_mixed_layer_restrat ===
-MIXEDLAYER_RESTRAT = False      !   [Boolean] default = False
-                                ! If true, a density-gradient dependent re-stratifying flow is imposed in the
-                                ! mixed layer. Can be used in ALE mode without restriction but in layer mode can
-                                ! only be used if BULKMIXEDLAYER is true.
-FOX_KEMPER_ML_RESTRAT_COEF = 60.0 !   [nondim] default = 0.0
-                                ! A nondimensional coefficient that is proportional to the ratio of the
-                                ! deformation radius to the dominant lengthscale of the submesoscale mixed layer
-                                ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic
-                                ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of
-                                ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al.
-                                ! (2010)
-MLE_FRONT_LENGTH = 200.0        !   [m] default = 0.0
-                                ! If non-zero, is the frontal-length scale used to calculate the upscaling of
-                                ! buoyancy gradients that is otherwise represented by the parameter
-                                ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended
-                                ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0.
-MLE_USE_PBL_MLD = True          !   [Boolean] default = False
-                                ! If true, the MLE parameterization will use the mixed-layer depth provided by
-                                ! the active PBL parameterization. If false, MLE will estimate a MLD based on a
-                                ! density difference with the surface using the parameter MLE_DENSITY_DIFF.
-MLE_MLD_DECAY_TIME = 2.592E+06  !   [s] default = 0.0
-                                ! The time-scale for a running-mean filter applied to the mixed-layer depth used
-                                ! in the MLE restratification parameterization. When the MLD deepens below the
-                                ! current running-mean the running-mean is instantaneously set to the current
-                                ! MLD.
-
-! === module MOM_diabatic_driver ===
-! The following parameters are used for diabatic processes.
-ENERGETICS_SFC_PBL = True       !   [Boolean] default = False
-                                ! If true, use an implied energetics planetary boundary layer scheme to
-                                ! determine the diffusivity and viscosity in the surface boundary layer.
-EPBL_IS_ADDITIVE = False        !   [Boolean] default = True
-                                ! If true, the diffusivity from ePBL is added to all other diffusivities.
-                                ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used.
-
-! === module MOM_CVMix_KPP ===
-! This is the MOM wrapper to CVMix:KPP
-! See http://cvmix.github.io/
-
-! === module MOM_tidal_mixing ===
-! Vertical Tidal Mixing Parameterization
-
-! === module MOM_CVMix_conv ===
-! Parameterization of enhanced mixing due to convection via CVMix
-
-! === module MOM_set_diffusivity ===
-
-! === module MOM_bkgnd_mixing ===
-! Adding static vertical background mixing coefficients
-KD = 1.5E-05                    !   [m2 s-1] default = 0.0
-                                ! The background diapycnal diffusivity of density in the interior. Zero or the
-                                ! molecular value, ~1e-7 m2 s-1, may be used.
-KD_MIN = 2.0E-06                !   [m2 s-1] default = 2.0E-07
-                                ! The minimum diapycnal diffusivity.
-HENYEY_IGW_BACKGROUND = True    !   [Boolean] default = False
-                                ! If true, use a latitude-dependent scaling for the near surface background
-                                ! diffusivity, as described in Harrison & Hallberg, JPO 2008.
-
-! === module MOM_kappa_shear ===
-! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008
-USE_JACKSON_PARAM = True        !   [Boolean] default = False
-                                ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing
-                                ! parameterization.
-MAX_RINO_IT = 25                !   [nondim] default = 50
-                                ! The maximum number of iterations that may be used to estimate the Richardson
-                                ! number driven mixing.
-
-! === module MOM_CVMix_shear ===
-! Parameterization of shear-driven turbulence via CVMix (various options)
-
-! === module MOM_CVMix_ddiff ===
-! Parameterization of mixing due to double diffusion processes via CVMix
-
-! === module MOM_diabatic_aux ===
-! The following parameters are used for auxiliary diabatic processes.
-
-! === module MOM_energetic_PBL ===
-EPBL_USTAR_MIN = 1.45842E-18    !   [m s-1]
-                                ! The (tiny) minimum friction velocity used within the ePBL code, derived from
-                                ! OMEGA and ANGSTROM..
-USE_LA_LI2016 = @[MOM6_USE_LI2016] !   [nondim] default = False
-                                ! A logical to use the Li et al. 2016 (submitted) formula to determine the
-                                ! Langmuir number.
-USE_WAVES = @[MOM6_USE_WAVES]   !   [Boolean] default = False
-                                ! If true, enables surface wave modules.
-WAVE_METHOD = "SURFACE_BANDS"   ! default = "EMPTY"
-                                ! Choice of wave method, valid options include:
-                                !  TEST_PROFILE  - Prescribed from surface Stokes drift
-                                !                  and a decay wavelength.
-                                !  SURFACE_BANDS - Computed from multiple surface values
-                                !                  and decay wavelengths.
-                                !  DHH85         - Uses Donelan et al. 1985 empirical
-                                !                  wave spectrum with prescribed values.
-                                !  LF17          - Infers Stokes drift profile from wind
-                                !                  speed following Li and Fox-Kemper 2017.
-SURFBAND_SOURCE = "COUPLER"     ! default = "EMPTY"
-                                ! Choice of SURFACE_BANDS data mode, valid options include:
-                                !  DATAOVERRIDE  - Read from NetCDF using FMS DataOverride.
-                                !  COUPLER       - Look for variables from coupler pass
-                                !  INPUT         - Testing with fixed values.
-STK_BAND_COUPLER = 3            ! default = 1
-                                ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has
-                                ! to be consistent with the number of Stokes drift bands in WW3, or the model
-                                ! will fail.
-SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 !   [rad/m] default = 0.12566
-                                ! Central wavenumbers for surface Stokes drift bands.
-EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE"
-                                ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence.
-                                ! Valid values are:
-                                !    NONE     - Do not do any extra mixing due to Langmuir turbulence
-                                !    RESCALE  - Use a multiplicative rescaling of mstar to account for Langmuir
-                                !      turbulence
-                                !    ADDITIVE - Add a Langmuir turblence contribution to mstar to other
-                                !      contributions
-LT_ENHANCE_COEF = 0.044         !   [nondim] default = 0.447
-                                ! Coefficient for Langmuir enhancement of mstar
-LT_ENHANCE_EXP = -1.5           !   [nondim] default = -1.33
-                                ! Exponent for Langmuir enhancementt of mstar
-LT_MOD_LAC1 = 0.0               !   [nondim] default = -0.87
-                                ! Coefficient for modification of Langmuir number due to MLD approaching Ekman
-                                ! depth.
-LT_MOD_LAC4 = 0.0               !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! stable Obukhov depth.
-LT_MOD_LAC5 = 0.22              !   [nondim] default = 0.95
-                                ! Coefficient for modification of Langmuir number due to ratio of Ekman to
-                                ! unstable Obukhov depth.
-
-! === module MOM_regularize_layers ===
-
-! === module MOM_opacity ===
-
-! === module MOM_tracer_advect ===
-TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM"
-                                ! The horizontal transport scheme for tracers:
-                                !   PLM    - Piecewise Linear Method
-                                !   PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order)
-                                !   PPM    - Piecewise Parabolic Method (Colella-Woodward)
-
-! === module MOM_tracer_hor_diff ===
-KHTR = 50.0                     !   [m2 s-1] default = 0.0
-                                ! The background along-isopycnal tracer diffusivity.
-CHECK_DIFFUSIVE_CFL = True      !   [Boolean] default = False
-                                ! If true, use enough iterations the diffusion to ensure that the diffusive
-                                ! equivalent of the CFL limit is not violated.  If false, always use the greater
-                                ! of 1 or MAX_TR_DIFFUSION_CFL iteration.
-MAX_TR_DIFFUSION_CFL = 2.0      !   [nondim] default = -1.0
-                                ! If positive, locally limit the along-isopycnal tracer diffusivity to keep the
-                                ! diffusive CFL locally at or below this value.  The number of diffusive
-                                ! iterations is often this value or the next greater integer.
-
-! === module MOM_neutral_diffusion ===
-! This module implements neutral diffusion of tracers
-USE_NEUTRAL_DIFFUSION = True    !   [Boolean] default = False
-                                ! If true, enables the neutral diffusion module.
-
-! === module MOM_sum_output ===
-MAXTRUNC = 1000                 !   [truncations save_interval-1] default = 0
-                                ! The run will be stopped, and the day set to a very large value if the velocity
-                                ! is truncated more than MAXTRUNC times between energy saves.  Set MAXTRUNC to 0
-                                ! to stop if there is any truncation of velocities.
-
-! === module ocean_model_init ===
-
-! === module MOM_oda_incupd ===
-ODA_INCUPD = @[ODA_INCUPD]   ! [Boolean] default = False
-                             ! If true, oda incremental updates will be applied
-                             ! everywhere in the domain.
-ODA_INCUPD_FILE = "mom6_increment.nc"   ! The name of the file with the T,S,h increments.
-
-ODA_TEMPINC_VAR = "Temp"        ! default = "ptemp_inc"
-                                ! The name of the potential temperature inc. variable in
-				      	   ! ODA_INCUPD_FILE.
-ODA_SALTINC_VAR = "Salt"        ! default = "sal_inc"
-                                ! The name of the salinity inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_THK_VAR = "h"               ! default = "h"
-                                ! The name of the int. depth inc. variable in
-                                ! ODA_INCUPD_FILE.
-ODA_INCUPD_UV = true            !
-ODA_UINC_VAR = "u"              ! default = "u_inc"
-                                ! The name of the zonal vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_VINC_VAR = "v"              ! default = "v_inc"
-                                ! The name of the meridional vel. inc. variable in
-                                ! ODA_INCUPD_UV_FILE.
-ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS]            ! default=3.0
-
-! === module MOM_surface_forcing ===
-OCEAN_SURFACE_STAGGER = "A"     ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the surface velocity field that is
-                                ! returned to the coupler.  Valid values include
-                                ! 'A', 'B', or 'C'.
-
-MAX_P_SURF = 0.0                !   [Pa] default = -1.0
-                                ! The maximum surface pressure that can be exerted by the atmosphere and
-                                ! floating sea-ice or ice shelves. This is needed because the FMS coupling
-                                ! structure does not limit the water that can be frozen out of the ocean and the
-                                ! ice-ocean heat fluxes are treated explicitly.  No limit is applied if a
-                                ! negative value is used.
-WIND_STAGGER = "A"              ! default = "C"
-                                ! A case-insensitive character string to indicate the
-                                ! staggering of the input wind stress field.  Valid
-                                ! values are 'A', 'B', or 'C'.
-! === module MOM_restart ===
-
-! === module MOM_file_parser ===
diff --git a/parm/ufs/ufs.configure.atm.IN b/parm/ufs/ufs.configure.atm.IN
deleted file mode 100644
index 3457d8cf53..0000000000
--- a/parm/ufs/ufs.configure.atm.IN
+++ /dev/null
@@ -1,22 +0,0 @@
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-EARTH_component_list: ATM
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  Diagnostic = 0
-::
-
-# Run Sequence #
-runSeq::
-  ATM
-::
diff --git a/parm/ufs/ufs.configure.atm_aero.IN b/parm/ufs/ufs.configure.atm_aero.IN
deleted file mode 100644
index 629cc156ce..0000000000
--- a/parm/ufs/ufs.configure.atm_aero.IN
+++ /dev/null
@@ -1,40 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-# EARTH #
-EARTH_component_list: ATM CHM
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-::
-
-# CHM #
-CHM_model:                      @[chm_model]
-CHM_petlist_bounds:             @[chm_petlist_bounds]
-CHM_omp_num_threads:            @[chm_omp_num_threads]
-CHM_attributes::
-  Verbosity = 0
-::
-
-# Run Sequence #
-runSeq::
-  @@[coupling_interval_fast_sec]
-    ATM phase1
-    ATM -> CHM
-    CHM
-    CHM -> ATM
-    ATM phase2
-  @
-::
diff --git a/parm/ufs/ufs.configure.blocked_atm_wav.IN b/parm/ufs/ufs.configure.blocked_atm_wav.IN
deleted file mode 100644
index b68aa2e735..0000000000
--- a/parm/ufs/ufs.configure.blocked_atm_wav.IN
+++ /dev/null
@@ -1,41 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:           @[esmf_logkind]
-globalResourceControl: true
-
-# EARTH #
-EARTH_component_list: ATM WAV
-EARTH_attributes::
-  Verbosity = max
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = max
-  DumpFields = true
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = max
-::
-
-
-
-# Run Sequence #
-runSeq::
-  @@[coupling_interval_sec]
-    ATM -> WAV
-    ATM
-    WAV
-  @
-::
diff --git a/parm/ufs/ufs.configure.cpld.IN b/parm/ufs/ufs.configure.cpld.IN
deleted file mode 100644
index e473fb2a03..0000000000
--- a/parm/ufs/ufs.configure.cpld.IN
+++ /dev/null
@@ -1,122 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-# EARTH #
-EARTH_component_list: MED ATM OCN ICE
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_ocn_avg
-   MED -> OCN :remapMethod=redist
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     ATM
-     ICE
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-   @
-   OCN -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.cpld_aero.IN b/parm/ufs/ufs.configure.cpld_aero.IN
deleted file mode 100644
index d90d377006..0000000000
--- a/parm/ufs/ufs.configure.cpld_aero.IN
+++ /dev/null
@@ -1,134 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:           @[esmf_logkind]
-globalResourceControl: true
-
-# EARTH #
-EARTH_component_list: MED ATM CHM OCN ICE
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# CHM #
-CHM_model:                      @[chm_model]
-CHM_petlist_bounds:             @[chm_petlist_bounds]
-CHM_omp_num_threads:            @[chm_omp_num_threads]
-CHM_attributes::
-  Verbosity = 0
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_ocn_avg
-   MED -> OCN :remapMethod=redist
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     ATM phase1
-     ATM -> CHM
-     CHM
-     CHM -> ATM
-     ATM phase2
-     ICE
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-   @
-   OCN -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.cpld_aero_outerwave.IN b/parm/ufs/ufs.configure.cpld_aero_outerwave.IN
deleted file mode 100644
index 23e7751112..0000000000
--- a/parm/ufs/ufs.configure.cpld_aero_outerwave.IN
+++ /dev/null
@@ -1,151 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:           @[esmf_logkind]
-globalResourceControl: true
-
-# EARTH #
-EARTH_component_list: MED ATM CHM OCN ICE WAV
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# CHM #
-CHM_model:                      @[chm_model]
-CHM_petlist_bounds:             @[chm_petlist_bounds]
-CHM_omp_num_threads:            @[chm_omp_num_threads]
-CHM_attributes::
-  Verbosity = 0
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = 0
-  OverwriteSlice = false
-  mesh_wav = @[MESH_WAV]
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_wav_avg
-   MED med_phases_prep_ocn_avg
-   MED -> WAV :remapMethod=redist
-   MED -> OCN :remapMethod=redist
-   WAV
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     ATM phase1
-     ATM -> CHM
-     CHM
-     CHM -> ATM
-     ATM phase2
-     ICE
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-     MED med_phases_prep_wav_accum
-   @
-   OCN -> MED :remapMethod=redist
-   WAV -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_post_wav
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      WAV_model = @[wav_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.cpld_aero_wave.IN b/parm/ufs/ufs.configure.cpld_aero_wave.IN
deleted file mode 100644
index ab0f6a9f8d..0000000000
--- a/parm/ufs/ufs.configure.cpld_aero_wave.IN
+++ /dev/null
@@ -1,151 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:           @[esmf_logkind]
-globalResourceControl: true
-
-# EARTH #
-EARTH_component_list: MED ATM CHM OCN ICE WAV
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# CHM #
-CHM_model:                      @[chm_model]
-CHM_petlist_bounds:             @[chm_petlist_bounds]
-CHM_omp_num_threads:            @[chm_omp_num_threads]
-CHM_attributes::
-  Verbosity = 0
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = 0
-  OverwriteSlice = false
-  mesh_wav = @[MESH_WAV]
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_ocn_avg
-   MED -> OCN :remapMethod=redist
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED med_phases_prep_wav_accum
-     MED med_phases_prep_wav_avg
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     MED -> WAV :remapMethod=redist
-     ATM phase1
-     ATM -> CHM
-     CHM
-     CHM -> ATM
-     ATM phase2
-     ICE
-     WAV
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     WAV -> MED :remapMethod=redist
-     MED med_phases_post_wav
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-   @
-   OCN -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      WAV_model = @[wav_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.cpld_outerwave.IN b/parm/ufs/ufs.configure.cpld_outerwave.IN
deleted file mode 100644
index 9a45d5ff9a..0000000000
--- a/parm/ufs/ufs.configure.cpld_outerwave.IN
+++ /dev/null
@@ -1,139 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-# EARTH #
-EARTH_component_list: MED ATM OCN ICE WAV
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = 0
-  OverwriteSlice = false
-  mesh_wav = @[MESH_WAV]
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_wav_avg
-   MED med_phases_prep_ocn_avg
-   MED -> WAV :remapMethod=redist
-   MED -> OCN :remapMethod=redist
-   WAV
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     ATM
-     ICE
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-     MED med_phases_prep_wav_accum
-   @
-   OCN -> MED :remapMethod=redist
-   WAV -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_post_wav
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      WAV_model = @[wav_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.cpld_wave.IN b/parm/ufs/ufs.configure.cpld_wave.IN
deleted file mode 100644
index 37a462a5d4..0000000000
--- a/parm/ufs/ufs.configure.cpld_wave.IN
+++ /dev/null
@@ -1,139 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-# EARTH #
-EARTH_component_list: MED ATM OCN ICE WAV
-EARTH_attributes::
-  Verbosity = 0
-::
-
-# MED #
-MED_model:                      @[med_model]
-MED_petlist_bounds:             @[med_petlist_bounds]
-MED_omp_num_threads:            @[med_omp_num_threads]
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-::
-
-# OCN #
-OCN_model:                      @[ocn_model]
-OCN_petlist_bounds:             @[ocn_petlist_bounds]
-OCN_omp_num_threads:            @[ocn_omp_num_threads]
-OCN_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ocn = @[MESH_OCN_ICE]
-  use_coldstart = @[use_coldstart]
-  use_mommesh = @[use_mommesh]
-::
-
-# ICE #
-ICE_model:                      @[ice_model]
-ICE_petlist_bounds:             @[ice_petlist_bounds]
-ICE_omp_num_threads:            @[ice_omp_num_threads]
-ICE_attributes::
-  Verbosity = 0
-  DumpFields = @[DumpFields]
-  ProfileMemory = false
-  OverwriteSlice = true
-  mesh_ice = @[MESH_OCN_ICE]
-  eps_imesh = @[eps_imesh]
-  stop_n = @[RESTART_N]
-  stop_option = nhours
-  stop_ymd = -999
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = 0
-  OverwriteSlice = false
-  mesh_wav = @[MESH_WAV]
-::
-
-# CMEPS warm run sequence
-runSeq::
-@@[coupling_interval_slow_sec]
-   MED med_phases_prep_ocn_avg
-   MED -> OCN :remapMethod=redist
-   OCN
-   @@[coupling_interval_fast_sec]
-     MED med_phases_prep_atm
-     MED med_phases_prep_ice
-     MED med_phases_prep_wav_accum
-     MED med_phases_prep_wav_avg
-     MED -> ATM :remapMethod=redist
-     MED -> ICE :remapMethod=redist
-     MED -> WAV :remapMethod=redist
-     ATM
-     ICE
-     WAV
-     ATM -> MED :remapMethod=redist
-     MED med_phases_post_atm
-     ICE -> MED :remapMethod=redist
-     MED med_phases_post_ice
-     WAV -> MED :remapMethod=redist
-     MED med_phases_post_wav
-     MED med_phases_ocnalb_run
-     MED med_phases_prep_ocn_accum
-   @
-   OCN -> MED :remapMethod=redist
-   MED med_phases_post_ocn
-   MED med_phases_restart_write
-@
-::
-
-# CMEPS variables
-
-DRIVER_attributes::
-::
-MED_attributes::
-      ATM_model = @[atm_model]
-      ICE_model = @[ice_model]
-      OCN_model = @[ocn_model]
-      WAV_model = @[wav_model]
-      coupling_mode = @[CPLMODE]
-      history_tile_atm = @[ATMTILESIZE]
-      pio_rearranger = box
-      ocean_albedo_limit = @[ocean_albedo_limit]
-::
-ALLCOMP_attributes::
-      ScalarFieldCount = 2
-      ScalarFieldIdxGridNX = 1
-      ScalarFieldIdxGridNY = 2
-      ScalarFieldName = cpl_scalars
-      start_type = @[RUNTYPE]
-      restart_dir = RESTART/
-      case_name = ufs.cpld
-      restart_n = @[RESTART_N]
-      restart_option = nhours
-      restart_ymd = -999
-      dbug_flag = @[cap_dbug_flag]
-      stop_n = @[FHMAX]
-      stop_option = nhours
-      stop_ymd = -999
-      orb_eccen = 1.e36
-      orb_iyear = 2000
-      orb_iyear_align = 2000
-      orb_mode = fixed_year
-      orb_mvelp = 1.e36
-      orb_obliq = 1.e36
-::
diff --git a/parm/ufs/ufs.configure.leapfrog_atm_wav.IN b/parm/ufs/ufs.configure.leapfrog_atm_wav.IN
deleted file mode 100644
index ec22c9478c..0000000000
--- a/parm/ufs/ufs.configure.leapfrog_atm_wav.IN
+++ /dev/null
@@ -1,41 +0,0 @@
-#############################################
-####  UFS Run-Time Configuration File   #####
-#############################################
-
-# ESMF #
-logKindFlag:            @[esmf_logkind]
-globalResourceControl:  true
-
-# EARTH #
-EARTH_component_list: ATM WAV
-EARTH_attributes::
-  Verbosity = max
-::
-
-# ATM #
-ATM_model:                      @[atm_model]
-ATM_petlist_bounds:             @[atm_petlist_bounds]
-ATM_omp_num_threads:            @[atm_omp_num_threads]
-ATM_attributes::
-  Verbosity = max
-  DumpFields = true
-::
-
-# WAV #
-WAV_model:                      @[wav_model]
-WAV_petlist_bounds:             @[wav_petlist_bounds]
-WAV_omp_num_threads:            @[wav_omp_num_threads]
-WAV_attributes::
-  Verbosity = max
-::
-
-
-
-# Run Sequence #
-runSeq::
-  @@[coupling_interval_slow_sec]
-    ATM
-    ATM -> WAV
-    WAV
-  @
-::
diff --git a/parm/wave/at_10m_interp.inp.tmpl b/parm/wave/at_10m_interp.inp.tmpl
index b2a80081e1..6f4c1f7099 100755
--- a/parm/wave/at_10m_interp.inp.tmpl
+++ b/parm/wave/at_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
 $ Total number of grids 
  2
 $ Grid extensions 
-  'gnh_10m'
+  'uglo_m1g16'
   'at_10m'
 $
  0
diff --git a/parm/wave/ep_10m_interp.inp.tmpl b/parm/wave/ep_10m_interp.inp.tmpl
index 0848854ccf..23cfd50c2e 100755
--- a/parm/wave/ep_10m_interp.inp.tmpl
+++ b/parm/wave/ep_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
 $ Total number of grids 
  2
 $ Grid extensions 
-  'gnh_10m'
+  'uglo_m1g16'
   'ep_10m'
 $
  0
diff --git a/parm/wave/glo_15mxt_interp.inp.tmpl b/parm/wave/glo_15mxt_interp.inp.tmpl
index 74bc9eebf4..19e9dae684 100755
--- a/parm/wave/glo_15mxt_interp.inp.tmpl
+++ b/parm/wave/glo_15mxt_interp.inp.tmpl
@@ -3,11 +3,9 @@ $------------------------------------------------
 $ Start Time DT NSteps
  TIME DT NSTEPS
 $ Total number of grids 
- 4
+ 2
 $ Grid extensions 
-  'gnh_10m'
-  'aoc_9km'
-  'gsh_15m' 
+  'uglo_m1g16'
   'glo_15mxt'
 $
  0
diff --git a/parm/wave/glo_200_interp.inp.tmpl b/parm/wave/glo_200_interp.inp.tmpl
new file mode 100755
index 0000000000..c238a6fe0b
--- /dev/null
+++ b/parm/wave/glo_200_interp.inp.tmpl
@@ -0,0 +1,12 @@
+$ Input file for interpolation of GLO30m_ext Grid
+$------------------------------------------------
+$ Start Time DT NSteps
+ TIME DT NSTEPS
+$ Total number of grids 
+ 2
+$ Grid extensions 
+  'uglo_100km'
+  'glo_200'
+$
+ 0
+$
diff --git a/parm/wave/glo_30m_interp.inp.tmpl b/parm/wave/glo_30m_interp.inp.tmpl
index ea1baf7fc4..c62881202c 100755
--- a/parm/wave/glo_30m_interp.inp.tmpl
+++ b/parm/wave/glo_30m_interp.inp.tmpl
@@ -3,11 +3,9 @@ $------------------------------------------------
 $ Start Time DT NSteps
  TIME DT NSTEPS
 $ Total number of grids 
- 4
+ 2
 $ Grid extensions 
-  'gnh_10m'
-  'aoc_9km'
-  'gsh_15m' 
+  'uglo_m1g16'
   'glo_30m'
 $
  0
diff --git a/parm/wave/wc_10m_interp.inp.tmpl b/parm/wave/wc_10m_interp.inp.tmpl
index abb51b4dfc..8338c91d0c 100755
--- a/parm/wave/wc_10m_interp.inp.tmpl
+++ b/parm/wave/wc_10m_interp.inp.tmpl
@@ -5,7 +5,7 @@ $ Start Time DT NSteps
 $ Total number of grids 
  2
 $ Grid extensions 
-  'gnh_10m'
+  'uglo_m1g16'
   'wc_10m'
 $
  0
diff --git a/parm/wave/ww3_grib2.glo_100.inp.tmpl b/parm/wave/ww3_grib2.glo_100.inp.tmpl
new file mode 100755
index 0000000000..ddfabdb13d
--- /dev/null
+++ b/parm/wave/ww3_grib2.glo_100.inp.tmpl
@@ -0,0 +1,9 @@
+$ WAVEWATCH-III gridded output input file 
+$ ----------------------------------------
+TIME DT NT
+N
+FLAGS
+$
+TIME  7 MODNR GRIDNR 0 0
+$
+$ end of input file
diff --git a/parm/wmo/grib2_awpgfs_20km_akf003 b/parm/wmo/grib2_awpgfs_20km_akf003
index 7d2a37aa76..56ce2c0ecc 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf003
+++ b/parm/wmo/grib2_awpgfs_20km_akf003
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 3 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 3 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf006 b/parm/wmo/grib2_awpgfs_20km_akf006
index 1993f04065..c6ab3406fc 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf006
+++ b/parm/wmo/grib2_awpgfs_20km_akf006
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 6 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf009 b/parm/wmo/grib2_awpgfs_20km_akf009
index d0946fbb85..84dcce263e 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf009
+++ b/parm/wmo/grib2_awpgfs_20km_akf009
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 9 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 9 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf012 b/parm/wmo/grib2_awpgfs_20km_akf012
index 6902e22587..76d6e8ee83 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf012
+++ b/parm/wmo/grib2_awpgfs_20km_akf012
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBC89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 12 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBC98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBC98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBC98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf015 b/parm/wmo/grib2_awpgfs_20km_akf015
index 19b7a5e18b..09027a856b 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf015
+++ b/parm/wmo/grib2_awpgfs_20km_akf015
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 15 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 15 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf018 b/parm/wmo/grib2_awpgfs_20km_akf018
index f928b8aa9e..7c46b2592a 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf018
+++ b/parm/wmo/grib2_awpgfs_20km_akf018
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBD89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 18 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBD98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBD98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBD98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf021 b/parm/wmo/grib2_awpgfs_20km_akf021
index ab6e8ea698..7b576285a3 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf021
+++ b/parm/wmo/grib2_awpgfs_20km_akf021
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 21 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 21 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf024 b/parm/wmo/grib2_awpgfs_20km_akf024
index df3e24d111..36cb477a2f 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf024
+++ b/parm/wmo/grib2_awpgfs_20km_akf024
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 24 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf027 b/parm/wmo/grib2_awpgfs_20km_akf027
index 6cfaf1e208..2018c4d65b 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf027
+++ b/parm/wmo/grib2_awpgfs_20km_akf027
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 27 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 27 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf030 b/parm/wmo/grib2_awpgfs_20km_akf030
index c288012677..4ef755c47b 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf030
+++ b/parm/wmo/grib2_awpgfs_20km_akf030
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBF89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 30 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBF98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBF98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBF98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf033 b/parm/wmo/grib2_awpgfs_20km_akf033
index a2f05ef5de..8f615edefb 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf033
+++ b/parm/wmo/grib2_awpgfs_20km_akf033
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 33 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 33 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf036 b/parm/wmo/grib2_awpgfs_20km_akf036
index 429eb52a7d..6c86c6bbb2 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf036
+++ b/parm/wmo/grib2_awpgfs_20km_akf036
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBG89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 36 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBG98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBG98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBG98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf039 b/parm/wmo/grib2_awpgfs_20km_akf039
index 2c31136c6b..1eb2706d7d 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf039
+++ b/parm/wmo/grib2_awpgfs_20km_akf039
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 39 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 39 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf042 b/parm/wmo/grib2_awpgfs_20km_akf042
index 9f8cfef47f..8b922c49f1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf042
+++ b/parm/wmo/grib2_awpgfs_20km_akf042
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 42 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf045 b/parm/wmo/grib2_awpgfs_20km_akf045
index ce3e6371ac..d90b3014e2 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf045
+++ b/parm/wmo/grib2_awpgfs_20km_akf045
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 45 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 45 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf048 b/parm/wmo/grib2_awpgfs_20km_akf048
index eaea4e4a9f..da98d5e1e4 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf048
+++ b/parm/wmo/grib2_awpgfs_20km_akf048
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 48 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBI98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf051 b/parm/wmo/grib2_awpgfs_20km_akf051
index 94b9fe357e..03cea730ff 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf051
+++ b/parm/wmo/grib2_awpgfs_20km_akf051
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 51 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 51 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf054 b/parm/wmo/grib2_awpgfs_20km_akf054
index 208576514c..73f7f44328 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf054
+++ b/parm/wmo/grib2_awpgfs_20km_akf054
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 54 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf057 b/parm/wmo/grib2_awpgfs_20km_akf057
index 61c72d8f36..8d3dd046fd 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf057
+++ b/parm/wmo/grib2_awpgfs_20km_akf057
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 57 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 57 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf060 b/parm/wmo/grib2_awpgfs_20km_akf060
index ac82af5142..b847a7031d 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf060
+++ b/parm/wmo/grib2_awpgfs_20km_akf060
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBJ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 60 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBJ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBJ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBJ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf063 b/parm/wmo/grib2_awpgfs_20km_akf063
index 50379cb03a..dc616fd4ac 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf063
+++ b/parm/wmo/grib2_awpgfs_20km_akf063
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 63 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 63 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf066 b/parm/wmo/grib2_awpgfs_20km_akf066
index 7458bf1bc8..ffa3ccc629 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf066
+++ b/parm/wmo/grib2_awpgfs_20km_akf066
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 66 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf069 b/parm/wmo/grib2_awpgfs_20km_akf069
index aa667ae355..6887880ae9 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf069
+++ b/parm/wmo/grib2_awpgfs_20km_akf069
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 69 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 69 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf072 b/parm/wmo/grib2_awpgfs_20km_akf072
index fbe43c4367..feaf433665 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf072
+++ b/parm/wmo/grib2_awpgfs_20km_akf072
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 72 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf075 b/parm/wmo/grib2_awpgfs_20km_akf075
index ddde3f2265..0109659a7f 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf075
+++ b/parm/wmo/grib2_awpgfs_20km_akf075
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 75 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 75 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf078 b/parm/wmo/grib2_awpgfs_20km_akf078
index b1290566a3..9a8349d176 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf078
+++ b/parm/wmo/grib2_awpgfs_20km_akf078
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 78 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf081 b/parm/wmo/grib2_awpgfs_20km_akf081
index e7c4913337..ce12dfebe1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf081
+++ b/parm/wmo/grib2_awpgfs_20km_akf081
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 81 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 81 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf084 b/parm/wmo/grib2_awpgfs_20km_akf084
index bbeeb783da..57088ae6e2 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf084
+++ b/parm/wmo/grib2_awpgfs_20km_akf084
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 84 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf090 b/parm/wmo/grib2_awpgfs_20km_akf090
index c812e6c021..51da917072 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf090
+++ b/parm/wmo/grib2_awpgfs_20km_akf090
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 90 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 84 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 84 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf096 b/parm/wmo/grib2_awpgfs_20km_akf096
index ddfe7f3296..011dbfe3c1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf096
+++ b/parm/wmo/grib2_awpgfs_20km_akf096
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBM89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 96 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBM98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 90 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBM98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 90 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf102 b/parm/wmo/grib2_awpgfs_20km_akf102
index 3d3945a4af..88533a4fab 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf102
+++ b/parm/wmo/grib2_awpgfs_20km_akf102
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 102 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 96 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 96 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf108 b/parm/wmo/grib2_awpgfs_20km_akf108
index 251316c1a3..b6c93e2f55 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf108
+++ b/parm/wmo/grib2_awpgfs_20km_akf108
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 108 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 102 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 102 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf114 b/parm/wmo/grib2_awpgfs_20km_akf114
index cddfcec776..5682a9bd04 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf114
+++ b/parm/wmo/grib2_awpgfs_20km_akf114
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 114 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 108 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 108 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf120 b/parm/wmo/grib2_awpgfs_20km_akf120
index bcf0df3313..f66c40ff3c 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf120
+++ b/parm/wmo/grib2_awpgfs_20km_akf120
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 120 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 114 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 114 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf126 b/parm/wmo/grib2_awpgfs_20km_akf126
index 3b662c3198..1fdd8cbea3 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf126
+++ b/parm/wmo/grib2_awpgfs_20km_akf126
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 126 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 120 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 120 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf132 b/parm/wmo/grib2_awpgfs_20km_akf132
index 55a5cbaa9f..4e7f0e9494 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf132
+++ b/parm/wmo/grib2_awpgfs_20km_akf132
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 132 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 126 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 126 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf138 b/parm/wmo/grib2_awpgfs_20km_akf138
index 106640aa68..b1412f6fbc 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf138
+++ b/parm/wmo/grib2_awpgfs_20km_akf138
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 138 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 132 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 132 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf144 b/parm/wmo/grib2_awpgfs_20km_akf144
index 8b3e500bde..14084fefd0 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf144
+++ b/parm/wmo/grib2_awpgfs_20km_akf144
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 144 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 138 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 138 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf150 b/parm/wmo/grib2_awpgfs_20km_akf150
index 7b0b9be0c8..19e07778f1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf150
+++ b/parm/wmo/grib2_awpgfs_20km_akf150
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 150 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 144 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 144 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf156 b/parm/wmo/grib2_awpgfs_20km_akf156
index b12b79bc62..bc8350175f 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf156
+++ b/parm/wmo/grib2_awpgfs_20km_akf156
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 156 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 150 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 150 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf162 b/parm/wmo/grib2_awpgfs_20km_akf162
index efb71848af..774b5f5843 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf162
+++ b/parm/wmo/grib2_awpgfs_20km_akf162
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 162 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 156 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 156 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf168 b/parm/wmo/grib2_awpgfs_20km_akf168
index 8b3b0835ed..87a2cd982f 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf168
+++ b/parm/wmo/grib2_awpgfs_20km_akf168
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 168 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 162 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 162 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf174 b/parm/wmo/grib2_awpgfs_20km_akf174
index 4d9f6cbcd0..1b5d9ef757 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf174
+++ b/parm/wmo/grib2_awpgfs_20km_akf174
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 174 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 168 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 168 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf180 b/parm/wmo/grib2_awpgfs_20km_akf180
index e61a24b45e..f3f30070eb 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf180
+++ b/parm/wmo/grib2_awpgfs_20km_akf180
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 180 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 174 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 174 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf186 b/parm/wmo/grib2_awpgfs_20km_akf186
index 349d880d42..2890fe37e1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf186
+++ b/parm/wmo/grib2_awpgfs_20km_akf186
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 186 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 180 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 180 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf192 b/parm/wmo/grib2_awpgfs_20km_akf192
index 4b75d8bc77..7108aa4edc 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf192
+++ b/parm/wmo/grib2_awpgfs_20km_akf192
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 192 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 186 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 186 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf198 b/parm/wmo/grib2_awpgfs_20km_akf198
index 079fde4b30..431deff860 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf198
+++ b/parm/wmo/grib2_awpgfs_20km_akf198
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 198 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 192 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 192 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf204 b/parm/wmo/grib2_awpgfs_20km_akf204
index c00a859972..de100ed71d 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf204
+++ b/parm/wmo/grib2_awpgfs_20km_akf204
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 204 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 198 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 198 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf210 b/parm/wmo/grib2_awpgfs_20km_akf210
index dd3ff19043..f2c3f6163d 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf210
+++ b/parm/wmo/grib2_awpgfs_20km_akf210
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 210 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 204 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 204 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf216 b/parm/wmo/grib2_awpgfs_20km_akf216
index 262191524d..d43c050d46 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf216
+++ b/parm/wmo/grib2_awpgfs_20km_akf216
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 216 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 210 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 210 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf222 b/parm/wmo/grib2_awpgfs_20km_akf222
index 1a578c63cd..39e561d826 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf222
+++ b/parm/wmo/grib2_awpgfs_20km_akf222
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 222 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 216 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 216 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf228 b/parm/wmo/grib2_awpgfs_20km_akf228
index 62e4393fe8..1e543abfa9 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf228
+++ b/parm/wmo/grib2_awpgfs_20km_akf228
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBX89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 228 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBX98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 222 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBX98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 222 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBX98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf234 b/parm/wmo/grib2_awpgfs_20km_akf234
index 2ba85d0e42..96bb89c666 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf234
+++ b/parm/wmo/grib2_awpgfs_20km_akf234
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPBZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 234 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 228 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTBZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 228 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMBZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_akf240 b/parm/wmo/grib2_awpgfs_20km_akf240
index 13c725200e..ec502a0da1 100644
--- a/parm/wmo/grib2_awpgfs_20km_akf240
+++ b/parm/wmo/grib2_awpgfs_20km_akf240
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPBY89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 240 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTBY98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 234 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTBY98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 234 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMBY98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 240 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf003 b/parm/wmo/grib2_awpgfs_20km_conusf003
index 2dcb033211..63a9860aa0 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf003
+++ b/parm/wmo/grib2_awpgfs_20km_conusf003
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 3 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 3 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf006 b/parm/wmo/grib2_awpgfs_20km_conusf006
index 50a27c75b4..9d060567ea 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf006
+++ b/parm/wmo/grib2_awpgfs_20km_conusf006
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 6 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf009 b/parm/wmo/grib2_awpgfs_20km_conusf009
index 1039b7c333..740a0f26a4 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf009
+++ b/parm/wmo/grib2_awpgfs_20km_conusf009
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 9 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 9 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf012 b/parm/wmo/grib2_awpgfs_20km_conusf012
index ab4bb22471..4893c880ce 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf012
+++ b/parm/wmo/grib2_awpgfs_20km_conusf012
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNC89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 12 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNC98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNC98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNC98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf015 b/parm/wmo/grib2_awpgfs_20km_conusf015
index 05d58934e2..fe266fdb29 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf015
+++ b/parm/wmo/grib2_awpgfs_20km_conusf015
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 15 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 15 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf018 b/parm/wmo/grib2_awpgfs_20km_conusf018
index 5ceec0fe6e..1087409e91 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf018
+++ b/parm/wmo/grib2_awpgfs_20km_conusf018
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPND89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 18 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTND98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTND98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMND98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf021 b/parm/wmo/grib2_awpgfs_20km_conusf021
index 4d2a378832..f90fa2f0f6 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf021
+++ b/parm/wmo/grib2_awpgfs_20km_conusf021
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 21 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 21 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf024 b/parm/wmo/grib2_awpgfs_20km_conusf024
index 39a3405281..bbdde44c45 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf024
+++ b/parm/wmo/grib2_awpgfs_20km_conusf024
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 24 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf027 b/parm/wmo/grib2_awpgfs_20km_conusf027
index bb66a03391..fd99c83735 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf027
+++ b/parm/wmo/grib2_awpgfs_20km_conusf027
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 27 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 27 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf030 b/parm/wmo/grib2_awpgfs_20km_conusf030
index 1fa1ab2c89..c69a9e2189 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf030
+++ b/parm/wmo/grib2_awpgfs_20km_conusf030
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNF89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 30 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNF98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNF98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNF98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf033 b/parm/wmo/grib2_awpgfs_20km_conusf033
index be31353fb0..5e2d0190a3 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf033
+++ b/parm/wmo/grib2_awpgfs_20km_conusf033
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 33 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 33 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf036 b/parm/wmo/grib2_awpgfs_20km_conusf036
index aa1b5077eb..4f14417232 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf036
+++ b/parm/wmo/grib2_awpgfs_20km_conusf036
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNG89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 36 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNG98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNG98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNG98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf039 b/parm/wmo/grib2_awpgfs_20km_conusf039
index 4b194b44b5..0416f64876 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf039
+++ b/parm/wmo/grib2_awpgfs_20km_conusf039
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 39 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 39 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf042 b/parm/wmo/grib2_awpgfs_20km_conusf042
index c53e757fbd..bc7ea8b5ad 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf042
+++ b/parm/wmo/grib2_awpgfs_20km_conusf042
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 42 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf045 b/parm/wmo/grib2_awpgfs_20km_conusf045
index 971dd0d8f5..0c7f33b3df 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf045
+++ b/parm/wmo/grib2_awpgfs_20km_conusf045
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 45 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 45 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf048 b/parm/wmo/grib2_awpgfs_20km_conusf048
index 35eeb58996..655fa11ec7 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf048
+++ b/parm/wmo/grib2_awpgfs_20km_conusf048
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 48 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNI98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf051 b/parm/wmo/grib2_awpgfs_20km_conusf051
index 1d3c291e97..04e470e7b4 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf051
+++ b/parm/wmo/grib2_awpgfs_20km_conusf051
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 51 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 51 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf054 b/parm/wmo/grib2_awpgfs_20km_conusf054
index 3480dfdfe3..b3bd6e8163 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf054
+++ b/parm/wmo/grib2_awpgfs_20km_conusf054
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 54 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf057 b/parm/wmo/grib2_awpgfs_20km_conusf057
index 3fcfb4ddb0..e61f4e2f1c 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf057
+++ b/parm/wmo/grib2_awpgfs_20km_conusf057
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 57 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 57 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf060 b/parm/wmo/grib2_awpgfs_20km_conusf060
index 813f5433f3..d55773770a 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf060
+++ b/parm/wmo/grib2_awpgfs_20km_conusf060
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNJ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 60 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNJ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNJ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNJ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf063 b/parm/wmo/grib2_awpgfs_20km_conusf063
index 9d16be5eb9..ae7cd57e31 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf063
+++ b/parm/wmo/grib2_awpgfs_20km_conusf063
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 63 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 63 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf066 b/parm/wmo/grib2_awpgfs_20km_conusf066
index d6dd06f442..99ced1938c 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf066
+++ b/parm/wmo/grib2_awpgfs_20km_conusf066
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 66 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf069 b/parm/wmo/grib2_awpgfs_20km_conusf069
index 76e1e30aeb..616642f5df 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf069
+++ b/parm/wmo/grib2_awpgfs_20km_conusf069
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 69 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 69 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf072 b/parm/wmo/grib2_awpgfs_20km_conusf072
index 2ed6b61b68..acef56acc9 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf072
+++ b/parm/wmo/grib2_awpgfs_20km_conusf072
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 72 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf075 b/parm/wmo/grib2_awpgfs_20km_conusf075
index 2c051ae083..9ef7ff6b3b 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf075
+++ b/parm/wmo/grib2_awpgfs_20km_conusf075
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 75 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 75 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf078 b/parm/wmo/grib2_awpgfs_20km_conusf078
index 569f8c0bb6..eb0acbb2b1 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf078
+++ b/parm/wmo/grib2_awpgfs_20km_conusf078
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 78 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf081 b/parm/wmo/grib2_awpgfs_20km_conusf081
index 597f61acea..cb52a17792 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf081
+++ b/parm/wmo/grib2_awpgfs_20km_conusf081
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 81 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 81 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf084 b/parm/wmo/grib2_awpgfs_20km_conusf084
index 996b9626dd..e444f6c676 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf084
+++ b/parm/wmo/grib2_awpgfs_20km_conusf084
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 84 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf090 b/parm/wmo/grib2_awpgfs_20km_conusf090
index 5fe2520d2a..4a19d48912 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf090
+++ b/parm/wmo/grib2_awpgfs_20km_conusf090
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 90 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 84 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 84 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf096 b/parm/wmo/grib2_awpgfs_20km_conusf096
index 929273abc9..6244fe29f0 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf096
+++ b/parm/wmo/grib2_awpgfs_20km_conusf096
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNM89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 96 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNM98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 90 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNM98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 90 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf102 b/parm/wmo/grib2_awpgfs_20km_conusf102
index f19cc7f452..c942511624 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf102
+++ b/parm/wmo/grib2_awpgfs_20km_conusf102
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 102 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 96 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 96 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf108 b/parm/wmo/grib2_awpgfs_20km_conusf108
index 02064150e2..7f0d81fd9b 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf108
+++ b/parm/wmo/grib2_awpgfs_20km_conusf108
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 108 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 102 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 102 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf114 b/parm/wmo/grib2_awpgfs_20km_conusf114
index 27041b534e..e8ae453347 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf114
+++ b/parm/wmo/grib2_awpgfs_20km_conusf114
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 114 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 108 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 108 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf120 b/parm/wmo/grib2_awpgfs_20km_conusf120
index 7d84f7362a..7b3277130f 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf120
+++ b/parm/wmo/grib2_awpgfs_20km_conusf120
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 120 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 114 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 114 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf126 b/parm/wmo/grib2_awpgfs_20km_conusf126
index cf1c0150eb..0f1ec7e106 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf126
+++ b/parm/wmo/grib2_awpgfs_20km_conusf126
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 126 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 120 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 120 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf132 b/parm/wmo/grib2_awpgfs_20km_conusf132
index 4a3e914eea..ae675da0f1 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf132
+++ b/parm/wmo/grib2_awpgfs_20km_conusf132
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 132 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 126 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 126 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf138 b/parm/wmo/grib2_awpgfs_20km_conusf138
index 1d467bd4cb..9436aaae09 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf138
+++ b/parm/wmo/grib2_awpgfs_20km_conusf138
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 138 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 132 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 132 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf144 b/parm/wmo/grib2_awpgfs_20km_conusf144
index 40435975ba..fbfe462f25 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf144
+++ b/parm/wmo/grib2_awpgfs_20km_conusf144
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 144 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 138 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 138 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf150 b/parm/wmo/grib2_awpgfs_20km_conusf150
index 69fcd59ccb..1c936bcd0f 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf150
+++ b/parm/wmo/grib2_awpgfs_20km_conusf150
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 150 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 144 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 144 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf156 b/parm/wmo/grib2_awpgfs_20km_conusf156
index a480c29ebf..3e786a24dc 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf156
+++ b/parm/wmo/grib2_awpgfs_20km_conusf156
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 156 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 150 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 150 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf162 b/parm/wmo/grib2_awpgfs_20km_conusf162
index 698d228b38..39e766ccd0 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf162
+++ b/parm/wmo/grib2_awpgfs_20km_conusf162
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 162 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 156 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 156 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf168 b/parm/wmo/grib2_awpgfs_20km_conusf168
index 45c7612a6c..bf203a0a9e 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf168
+++ b/parm/wmo/grib2_awpgfs_20km_conusf168
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 168 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 162 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 162 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf174 b/parm/wmo/grib2_awpgfs_20km_conusf174
index b1a1450f81..48f1ee6ba8 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf174
+++ b/parm/wmo/grib2_awpgfs_20km_conusf174
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 174 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 168 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 168 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf180 b/parm/wmo/grib2_awpgfs_20km_conusf180
index 6dc9d02160..cae87de5f6 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf180
+++ b/parm/wmo/grib2_awpgfs_20km_conusf180
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 180 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 174 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 174 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf186 b/parm/wmo/grib2_awpgfs_20km_conusf186
index 0db98746e9..665cc249c9 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf186
+++ b/parm/wmo/grib2_awpgfs_20km_conusf186
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 186 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 180 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 180 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf192 b/parm/wmo/grib2_awpgfs_20km_conusf192
index 6ce1771f48..cc30c7d26d 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf192
+++ b/parm/wmo/grib2_awpgfs_20km_conusf192
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 192 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 186 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 186 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf198 b/parm/wmo/grib2_awpgfs_20km_conusf198
index 982195a2b7..626d46182f 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf198
+++ b/parm/wmo/grib2_awpgfs_20km_conusf198
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 198 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 192 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 192 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf204 b/parm/wmo/grib2_awpgfs_20km_conusf204
index 1d1a20f44b..16bef68a2e 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf204
+++ b/parm/wmo/grib2_awpgfs_20km_conusf204
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 204 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 198 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 198 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf210 b/parm/wmo/grib2_awpgfs_20km_conusf210
index c0c1760d15..aeb0ce7546 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf210
+++ b/parm/wmo/grib2_awpgfs_20km_conusf210
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 210 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 204 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 204 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf216 b/parm/wmo/grib2_awpgfs_20km_conusf216
index 6f467f23c3..9c26e9f1dd 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf216
+++ b/parm/wmo/grib2_awpgfs_20km_conusf216
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 216 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 210 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 210 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf222 b/parm/wmo/grib2_awpgfs_20km_conusf222
index 092999981f..530666b624 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf222
+++ b/parm/wmo/grib2_awpgfs_20km_conusf222
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 222 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 216 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 216 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf228 b/parm/wmo/grib2_awpgfs_20km_conusf228
index b27f934fdf..dab460f7b0 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf228
+++ b/parm/wmo/grib2_awpgfs_20km_conusf228
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNX89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 228 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNX98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 222 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNX98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 222 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNX98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf234 b/parm/wmo/grib2_awpgfs_20km_conusf234
index 988b92775e..7d9fa3848d 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf234
+++ b/parm/wmo/grib2_awpgfs_20km_conusf234
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPNZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 234 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 228 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTNZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 228 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMNZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_conusf240 b/parm/wmo/grib2_awpgfs_20km_conusf240
index bfa6d5b0af..88b4559b1b 100644
--- a/parm/wmo/grib2_awpgfs_20km_conusf240
+++ b/parm/wmo/grib2_awpgfs_20km_conusf240
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPNY89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 240 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTNY98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 234 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTNY98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 234 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMNY98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 240 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf003 b/parm/wmo/grib2_awpgfs_20km_pacf003
index 615659755d..d37486a5f1 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf003
+++ b/parm/wmo/grib2_awpgfs_20km_pacf003
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 3 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 3 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf006 b/parm/wmo/grib2_awpgfs_20km_pacf006
index 42a0844227..dbd9ab490b 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf006
+++ b/parm/wmo/grib2_awpgfs_20km_pacf006
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 6 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf009 b/parm/wmo/grib2_awpgfs_20km_pacf009
index 499366cad5..76aac8a858 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf009
+++ b/parm/wmo/grib2_awpgfs_20km_pacf009
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 9 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 9 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf012 b/parm/wmo/grib2_awpgfs_20km_pacf012
index 22c707b26a..5f3c13f5ef 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf012
+++ b/parm/wmo/grib2_awpgfs_20km_pacf012
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEC89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 12 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEC98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEC98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEC98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf015 b/parm/wmo/grib2_awpgfs_20km_pacf015
index 6643f08f01..9985082514 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf015
+++ b/parm/wmo/grib2_awpgfs_20km_pacf015
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 15 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 15 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf018 b/parm/wmo/grib2_awpgfs_20km_pacf018
index e1bebef654..af486de289 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf018
+++ b/parm/wmo/grib2_awpgfs_20km_pacf018
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPED89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 18 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTED98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTED98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMED98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf021 b/parm/wmo/grib2_awpgfs_20km_pacf021
index 6aff6fd1f6..69c2c6cbaa 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf021
+++ b/parm/wmo/grib2_awpgfs_20km_pacf021
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 21 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 21 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf024 b/parm/wmo/grib2_awpgfs_20km_pacf024
index c43e1a933b..a52a32ce1a 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf024
+++ b/parm/wmo/grib2_awpgfs_20km_pacf024
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 24 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf027 b/parm/wmo/grib2_awpgfs_20km_pacf027
index 044f310d11..31746c4fda 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf027
+++ b/parm/wmo/grib2_awpgfs_20km_pacf027
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 27 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 27 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf030 b/parm/wmo/grib2_awpgfs_20km_pacf030
index a368aeeda4..beb9f83630 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf030
+++ b/parm/wmo/grib2_awpgfs_20km_pacf030
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEF89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 30 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEF98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEF98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEF98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf033 b/parm/wmo/grib2_awpgfs_20km_pacf033
index 22e28c1388..f2e293ab1e 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf033
+++ b/parm/wmo/grib2_awpgfs_20km_pacf033
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 33 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 33 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf036 b/parm/wmo/grib2_awpgfs_20km_pacf036
index 4887685aa1..12e1dadddc 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf036
+++ b/parm/wmo/grib2_awpgfs_20km_pacf036
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEG89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 36 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEG98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEG98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEG98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf039 b/parm/wmo/grib2_awpgfs_20km_pacf039
index 37ac606d55..0a19a167bd 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf039
+++ b/parm/wmo/grib2_awpgfs_20km_pacf039
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 39 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 39 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf042 b/parm/wmo/grib2_awpgfs_20km_pacf042
index 6f9ac0eb05..e3c459e644 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf042
+++ b/parm/wmo/grib2_awpgfs_20km_pacf042
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 42 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf045 b/parm/wmo/grib2_awpgfs_20km_pacf045
index 096d7bcb3b..ed44d87a3c 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf045
+++ b/parm/wmo/grib2_awpgfs_20km_pacf045
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 45 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 45 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf048 b/parm/wmo/grib2_awpgfs_20km_pacf048
index 942c67f5dc..09fee68368 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf048
+++ b/parm/wmo/grib2_awpgfs_20km_pacf048
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 48 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEI98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf051 b/parm/wmo/grib2_awpgfs_20km_pacf051
index b56c9aaa66..b64ce74110 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf051
+++ b/parm/wmo/grib2_awpgfs_20km_pacf051
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPER89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 51 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTER98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTER98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMER98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 51 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf054 b/parm/wmo/grib2_awpgfs_20km_pacf054
index 075f3f0bf6..1ab830e126 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf054
+++ b/parm/wmo/grib2_awpgfs_20km_pacf054
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 54 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf057 b/parm/wmo/grib2_awpgfs_20km_pacf057
index 163617c80b..877fdff2e0 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf057
+++ b/parm/wmo/grib2_awpgfs_20km_pacf057
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPES89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 57 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTES98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTES98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMES98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 57 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf060 b/parm/wmo/grib2_awpgfs_20km_pacf060
index 34cd787012..b06a04608c 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf060
+++ b/parm/wmo/grib2_awpgfs_20km_pacf060
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEJ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 60 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEJ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEJ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEJ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf063 b/parm/wmo/grib2_awpgfs_20km_pacf063
index d66c7f5cc6..e36fb1ce40 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf063
+++ b/parm/wmo/grib2_awpgfs_20km_pacf063
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 63 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 63 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf066 b/parm/wmo/grib2_awpgfs_20km_pacf066
index dd90c84ce4..fde4d9b7c8 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf066
+++ b/parm/wmo/grib2_awpgfs_20km_pacf066
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 66 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf069 b/parm/wmo/grib2_awpgfs_20km_pacf069
index 34f43a228c..6df5538ce5 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf069
+++ b/parm/wmo/grib2_awpgfs_20km_pacf069
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 69 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 69 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf072 b/parm/wmo/grib2_awpgfs_20km_pacf072
index 32b1d7f9b4..e0949621d4 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf072
+++ b/parm/wmo/grib2_awpgfs_20km_pacf072
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 72 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf075 b/parm/wmo/grib2_awpgfs_20km_pacf075
index 7d99a47b2c..98250fb1bc 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf075
+++ b/parm/wmo/grib2_awpgfs_20km_pacf075
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 75 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 75 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf078 b/parm/wmo/grib2_awpgfs_20km_pacf078
index 9a97afc2bb..4b38ba9af8 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf078
+++ b/parm/wmo/grib2_awpgfs_20km_pacf078
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPET89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 78 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTET98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTET98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMET98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf081 b/parm/wmo/grib2_awpgfs_20km_pacf081
index 7061a52bf6..10f309a934 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf081
+++ b/parm/wmo/grib2_awpgfs_20km_pacf081
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 81 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 81 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf084 b/parm/wmo/grib2_awpgfs_20km_pacf084
index 43cb6ea3ac..2c6e39854f 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf084
+++ b/parm/wmo/grib2_awpgfs_20km_pacf084
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 84 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf090 b/parm/wmo/grib2_awpgfs_20km_pacf090
index a0584a5db9..041ae171aa 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf090
+++ b/parm/wmo/grib2_awpgfs_20km_pacf090
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 90 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 84 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 84 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf096 b/parm/wmo/grib2_awpgfs_20km_pacf096
index a9a6f5c0e9..6ae396a1e2 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf096
+++ b/parm/wmo/grib2_awpgfs_20km_pacf096
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEM89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 96 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEM98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 90 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEM98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 90 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf102 b/parm/wmo/grib2_awpgfs_20km_pacf102
index 6d871b9cfe..5cb280196a 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf102
+++ b/parm/wmo/grib2_awpgfs_20km_pacf102
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 102 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 96 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 96 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf108 b/parm/wmo/grib2_awpgfs_20km_pacf108
index 9eb756fee2..5da58fecd7 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf108
+++ b/parm/wmo/grib2_awpgfs_20km_pacf108
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 108 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 102 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 102 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf114 b/parm/wmo/grib2_awpgfs_20km_pacf114
index 763a29101e..e6838e11ad 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf114
+++ b/parm/wmo/grib2_awpgfs_20km_pacf114
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 114 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 108 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 108 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf120 b/parm/wmo/grib2_awpgfs_20km_pacf120
index ba695651a1..4f94adf282 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf120
+++ b/parm/wmo/grib2_awpgfs_20km_pacf120
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 120 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 114 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 114 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf126 b/parm/wmo/grib2_awpgfs_20km_pacf126
index 1e6a0b00d9..0b3c56d926 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf126
+++ b/parm/wmo/grib2_awpgfs_20km_pacf126
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 126 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 120 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 120 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf132 b/parm/wmo/grib2_awpgfs_20km_pacf132
index 1a8d195e39..9a54278e33 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf132
+++ b/parm/wmo/grib2_awpgfs_20km_pacf132
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 132 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 126 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 126 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf138 b/parm/wmo/grib2_awpgfs_20km_pacf138
index bb45bf4e02..f1634fde0e 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf138
+++ b/parm/wmo/grib2_awpgfs_20km_pacf138
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 138 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 132 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 132 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf144 b/parm/wmo/grib2_awpgfs_20km_pacf144
index 8b477f37f6..7ea5f68078 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf144
+++ b/parm/wmo/grib2_awpgfs_20km_pacf144
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 144 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 138 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 138 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf150 b/parm/wmo/grib2_awpgfs_20km_pacf150
index 630530ef18..5ab99b3155 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf150
+++ b/parm/wmo/grib2_awpgfs_20km_pacf150
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 150 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 144 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 144 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf156 b/parm/wmo/grib2_awpgfs_20km_pacf156
index 6b35a3720e..f5d5475059 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf156
+++ b/parm/wmo/grib2_awpgfs_20km_pacf156
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPER89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 156 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTER98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 150 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTER98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 150 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMER98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf162 b/parm/wmo/grib2_awpgfs_20km_pacf162
index 5e73ef8a27..ba225f9103 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf162
+++ b/parm/wmo/grib2_awpgfs_20km_pacf162
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 162 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 156 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 156 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf168 b/parm/wmo/grib2_awpgfs_20km_pacf168
index 047dfff1ba..335be30724 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf168
+++ b/parm/wmo/grib2_awpgfs_20km_pacf168
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPES89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 168 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTES98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 162 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTES98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 162 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMES98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf174 b/parm/wmo/grib2_awpgfs_20km_pacf174
index 38c11de27d..7ff357a29a 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf174
+++ b/parm/wmo/grib2_awpgfs_20km_pacf174
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 174 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 168 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 168 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf180 b/parm/wmo/grib2_awpgfs_20km_pacf180
index 425f51af96..921c33b214 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf180
+++ b/parm/wmo/grib2_awpgfs_20km_pacf180
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPET89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 180 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTET98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 174 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTET98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 174 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMET98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf186 b/parm/wmo/grib2_awpgfs_20km_pacf186
index f583cb1c68..0467f24228 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf186
+++ b/parm/wmo/grib2_awpgfs_20km_pacf186
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 186 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 180 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 180 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf192 b/parm/wmo/grib2_awpgfs_20km_pacf192
index 222ac22555..5ab98bd8a0 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf192
+++ b/parm/wmo/grib2_awpgfs_20km_pacf192
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 192 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 186 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 186 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf198 b/parm/wmo/grib2_awpgfs_20km_pacf198
index f3d6a5993c..9ee35392b4 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf198
+++ b/parm/wmo/grib2_awpgfs_20km_pacf198
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 198 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 192 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 192 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf204 b/parm/wmo/grib2_awpgfs_20km_pacf204
index 8ba14d995c..e4a52e294b 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf204
+++ b/parm/wmo/grib2_awpgfs_20km_pacf204
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 204 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 198 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 198 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf210 b/parm/wmo/grib2_awpgfs_20km_pacf210
index bd16958c58..b7eacf8a2c 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf210
+++ b/parm/wmo/grib2_awpgfs_20km_pacf210
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 210 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 204 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 204 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf216 b/parm/wmo/grib2_awpgfs_20km_pacf216
index 2dbf7c239b..3e501ccd25 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf216
+++ b/parm/wmo/grib2_awpgfs_20km_pacf216
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 216 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 210 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 210 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf222 b/parm/wmo/grib2_awpgfs_20km_pacf222
index aa95af5bb2..60ab11be32 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf222
+++ b/parm/wmo/grib2_awpgfs_20km_pacf222
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 222 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 216 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 216 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf228 b/parm/wmo/grib2_awpgfs_20km_pacf228
index 96827f922e..0cd0becc77 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf228
+++ b/parm/wmo/grib2_awpgfs_20km_pacf228
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEX89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 228 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEX98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 222 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEX98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 222 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEX98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf234 b/parm/wmo/grib2_awpgfs_20km_pacf234
index 8a2952c88c..bd779e552c 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf234
+++ b/parm/wmo/grib2_awpgfs_20km_pacf234
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPEZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 234 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 228 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTEZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 228 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMEZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pacf240 b/parm/wmo/grib2_awpgfs_20km_pacf240
index 6e5585597e..1072f507e5 100644
--- a/parm/wmo/grib2_awpgfs_20km_pacf240
+++ b/parm/wmo/grib2_awpgfs_20km_pacf240
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPEY89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 240 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTEY98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 234 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTEY98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 234 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMEY98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 240 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof003 b/parm/wmo/grib2_awpgfs_20km_pricof003
index a5e85cc253..9b5d15f73f 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof003
+++ b/parm/wmo/grib2_awpgfs_20km_pricof003
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 3 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 3 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 3 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof006 b/parm/wmo/grib2_awpgfs_20km_pricof006
index a39a57d385..f1094652d7 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof006
+++ b/parm/wmo/grib2_awpgfs_20km_pricof006
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFB89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 6 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFB98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 0 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFB98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 0 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 0 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 0 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFB98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof009 b/parm/wmo/grib2_awpgfs_20km_pricof009
index 2429e41cb9..3f036ae903 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof009
+++ b/parm/wmo/grib2_awpgfs_20km_pricof009
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 9 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 9 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 9 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof012 b/parm/wmo/grib2_awpgfs_20km_pricof012
index 5434a6d5df..db6eb8cde6 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof012
+++ b/parm/wmo/grib2_awpgfs_20km_pricof012
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFC89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 12 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFC98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 6 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFC98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 6 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 6 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 6 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFC98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof015 b/parm/wmo/grib2_awpgfs_20km_pricof015
index 7ae9210d5c..0fb1b7b098 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof015
+++ b/parm/wmo/grib2_awpgfs_20km_pricof015
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 15 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 15 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 15 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof018 b/parm/wmo/grib2_awpgfs_20km_pricof018
index dac0fc8e7a..d080ac0222 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof018
+++ b/parm/wmo/grib2_awpgfs_20km_pricof018
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFD89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 18 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFD98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 12 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFD98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 12 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 12 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 12 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFD98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof021 b/parm/wmo/grib2_awpgfs_20km_pricof021
index 3827769168..527a1c4a29 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof021
+++ b/parm/wmo/grib2_awpgfs_20km_pricof021
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 21 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 21 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 21 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof024 b/parm/wmo/grib2_awpgfs_20km_pricof024
index 7790a82dd6..1b0320f1a9 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof024
+++ b/parm/wmo/grib2_awpgfs_20km_pricof024
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFE89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 24 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFE98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 18 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFE98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 18 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 18 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 18 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFE98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof027 b/parm/wmo/grib2_awpgfs_20km_pricof027
index c4061b3cfb..e18b7c1530 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof027
+++ b/parm/wmo/grib2_awpgfs_20km_pricof027
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 27 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 27 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 27 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof030 b/parm/wmo/grib2_awpgfs_20km_pricof030
index ecd42983ea..739aa23a64 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof030
+++ b/parm/wmo/grib2_awpgfs_20km_pricof030
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFF89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 30 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFF98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 24 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFF98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 24 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 24 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 24 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFF98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof033 b/parm/wmo/grib2_awpgfs_20km_pricof033
index b52c822509..79278be454 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof033
+++ b/parm/wmo/grib2_awpgfs_20km_pricof033
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 33 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 33 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 33 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof036 b/parm/wmo/grib2_awpgfs_20km_pricof036
index f7a61e77a0..bfb23127d5 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof036
+++ b/parm/wmo/grib2_awpgfs_20km_pricof036
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFG89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 36 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFG98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 30 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFG98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 30 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 30 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 30 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFG98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof039 b/parm/wmo/grib2_awpgfs_20km_pricof039
index b9db65e3a6..c050ee66f0 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof039
+++ b/parm/wmo/grib2_awpgfs_20km_pricof039
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 39 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 39 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 39 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof042 b/parm/wmo/grib2_awpgfs_20km_pricof042
index 6a48338337..6f9a027648 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof042
+++ b/parm/wmo/grib2_awpgfs_20km_pricof042
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFH89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 42 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFH98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 36 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFH98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 36 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 36 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 36 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFH98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof045 b/parm/wmo/grib2_awpgfs_20km_pricof045
index 7c17dfb93b..6457ace251 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof045
+++ b/parm/wmo/grib2_awpgfs_20km_pricof045
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 45 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 45 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 45 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof048 b/parm/wmo/grib2_awpgfs_20km_pricof048
index 8314c7f4c2..f514269baf 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof048
+++ b/parm/wmo/grib2_awpgfs_20km_pricof048
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 48 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 42 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 42 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 42 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 42 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFI98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof051 b/parm/wmo/grib2_awpgfs_20km_pricof051
index 4d3c979ab9..d51ab115eb 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof051
+++ b/parm/wmo/grib2_awpgfs_20km_pricof051
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 51 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 51 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 51 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof054 b/parm/wmo/grib2_awpgfs_20km_pricof054
index 733651a581..6bd7cf6482 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof054
+++ b/parm/wmo/grib2_awpgfs_20km_pricof054
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFI89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 54 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFI98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 48 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFI98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 48 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 48 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 48 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof057 b/parm/wmo/grib2_awpgfs_20km_pricof057
index 69ab83ae93..e92a1d7d81 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof057
+++ b/parm/wmo/grib2_awpgfs_20km_pricof057
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 57 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 57 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 57 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof060 b/parm/wmo/grib2_awpgfs_20km_pricof060
index 8e6f93bf47..a7d9ef8d80 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof060
+++ b/parm/wmo/grib2_awpgfs_20km_pricof060
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFJ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 60 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFJ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 54 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFJ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 54 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 54 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 54 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFJ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof063 b/parm/wmo/grib2_awpgfs_20km_pricof063
index 7e476e243e..408daa4b81 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof063
+++ b/parm/wmo/grib2_awpgfs_20km_pricof063
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 63 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 63 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 63 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof066 b/parm/wmo/grib2_awpgfs_20km_pricof066
index 25e03e849b..9723019076 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof066
+++ b/parm/wmo/grib2_awpgfs_20km_pricof066
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 66 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 60 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 60 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 60 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 60 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof069 b/parm/wmo/grib2_awpgfs_20km_pricof069
index 4cb56bae36..6ac5c30fc6 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof069
+++ b/parm/wmo/grib2_awpgfs_20km_pricof069
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 69 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 69 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 69 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof072 b/parm/wmo/grib2_awpgfs_20km_pricof072
index 61ebff1404..c0c67aafe5 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof072
+++ b/parm/wmo/grib2_awpgfs_20km_pricof072
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFK89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 72 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFK98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 66 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFK98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 66 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 66 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 66 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFK98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof075 b/parm/wmo/grib2_awpgfs_20km_pricof075
index b53bec9eee..98060b6045 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof075
+++ b/parm/wmo/grib2_awpgfs_20km_pricof075
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 75 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 75 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 75 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof078 b/parm/wmo/grib2_awpgfs_20km_pricof078
index 568533f6ba..74770cce63 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof078
+++ b/parm/wmo/grib2_awpgfs_20km_pricof078
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 78 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 72 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 72 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 72 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 72 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof081 b/parm/wmo/grib2_awpgfs_20km_pricof081
index aa1ba8ce90..b5d61c8fed 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof081
+++ b/parm/wmo/grib2_awpgfs_20km_pricof081
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 81 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 81 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 81 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof084 b/parm/wmo/grib2_awpgfs_20km_pricof084
index 41fec436f0..01038a7d50 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof084
+++ b/parm/wmo/grib2_awpgfs_20km_pricof084
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFL89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 84 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFL98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 78 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFL98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 78 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 78 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 78 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFL98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof090 b/parm/wmo/grib2_awpgfs_20km_pricof090
index 1d09cfad2f..2615f7070c 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof090
+++ b/parm/wmo/grib2_awpgfs_20km_pricof090
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 90 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 84 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 84 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 84 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 84 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof096 b/parm/wmo/grib2_awpgfs_20km_pricof096
index fe587a6886..8cb1875cf8 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof096
+++ b/parm/wmo/grib2_awpgfs_20km_pricof096
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFM89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 96 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFM98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 90 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFM98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 90 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 90 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 90 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFM98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof102 b/parm/wmo/grib2_awpgfs_20km_pricof102
index bf931e2cf4..b1c5e27573 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof102
+++ b/parm/wmo/grib2_awpgfs_20km_pricof102
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 102 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 96 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 96 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 96 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 96 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof108 b/parm/wmo/grib2_awpgfs_20km_pricof108
index d250c49331..ca0669c405 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof108
+++ b/parm/wmo/grib2_awpgfs_20km_pricof108
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFN89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 108 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFN98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 102 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFN98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 102 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 102 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 102 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFN98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof114 b/parm/wmo/grib2_awpgfs_20km_pricof114
index 7c537d9b7e..7e8ecb2e62 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof114
+++ b/parm/wmo/grib2_awpgfs_20km_pricof114
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 114 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 108 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 108 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 108 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 108 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof120 b/parm/wmo/grib2_awpgfs_20km_pricof120
index c514b4356c..5217866af7 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof120
+++ b/parm/wmo/grib2_awpgfs_20km_pricof120
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFO89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 120 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFO98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 114 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFO98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 114 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 114 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 114 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFO98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof126 b/parm/wmo/grib2_awpgfs_20km_pricof126
index bfc5f89f69..2351bef63b 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof126
+++ b/parm/wmo/grib2_awpgfs_20km_pricof126
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 126 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 120 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 120 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 120 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 120 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof132 b/parm/wmo/grib2_awpgfs_20km_pricof132
index bfbfada303..2404bdb11b 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof132
+++ b/parm/wmo/grib2_awpgfs_20km_pricof132
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFP89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 132 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFP98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 126 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFP98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 126 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 126 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 126 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFP98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof138 b/parm/wmo/grib2_awpgfs_20km_pricof138
index 2143bcbadf..3b3a4e6115 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof138
+++ b/parm/wmo/grib2_awpgfs_20km_pricof138
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 138 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 132 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 132 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 132 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 132 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof144 b/parm/wmo/grib2_awpgfs_20km_pricof144
index cc62b63206..17c1fa2877 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof144
+++ b/parm/wmo/grib2_awpgfs_20km_pricof144
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFQ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 144 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFQ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 138 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFQ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 138 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 138 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 138 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFQ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof150 b/parm/wmo/grib2_awpgfs_20km_pricof150
index 6f4fe892ef..15854cc7f7 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof150
+++ b/parm/wmo/grib2_awpgfs_20km_pricof150
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 150 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 144 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 144 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 144 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 144 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof156 b/parm/wmo/grib2_awpgfs_20km_pricof156
index 6c9e1671b3..db29922c8e 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof156
+++ b/parm/wmo/grib2_awpgfs_20km_pricof156
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFR89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 156 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFR98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 150 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFR98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 150 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 150 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 150 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFR98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof162 b/parm/wmo/grib2_awpgfs_20km_pricof162
index fd055eaca8..d293e1cef0 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof162
+++ b/parm/wmo/grib2_awpgfs_20km_pricof162
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 162 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 156 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 156 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 156 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 156 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof168 b/parm/wmo/grib2_awpgfs_20km_pricof168
index 3f441ed4ae..74ee9cb83a 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof168
+++ b/parm/wmo/grib2_awpgfs_20km_pricof168
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFS89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 168 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFS98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 162 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFS98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 162 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 162 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 162 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFS98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof174 b/parm/wmo/grib2_awpgfs_20km_pricof174
index aee3d7ac21..21a2c3a9f2 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof174
+++ b/parm/wmo/grib2_awpgfs_20km_pricof174
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 174 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 168 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 168 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 168 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 168 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof180 b/parm/wmo/grib2_awpgfs_20km_pricof180
index 3ec98e42e0..7a9398a386 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof180
+++ b/parm/wmo/grib2_awpgfs_20km_pricof180
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFT89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 180 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFT98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 174 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFT98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 174 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 174 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 174 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFT98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof186 b/parm/wmo/grib2_awpgfs_20km_pricof186
index 2c7a8c2619..b044720a1d 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof186
+++ b/parm/wmo/grib2_awpgfs_20km_pricof186
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 186 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 180 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 180 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 180 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 180 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof192 b/parm/wmo/grib2_awpgfs_20km_pricof192
index 90c66e5767..8aa9176278 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof192
+++ b/parm/wmo/grib2_awpgfs_20km_pricof192
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFU89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 192 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFU98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 186 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFU98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 186 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 186 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 186 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFU98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof198 b/parm/wmo/grib2_awpgfs_20km_pricof198
index 50e04eb42a..4c29dc0013 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof198
+++ b/parm/wmo/grib2_awpgfs_20km_pricof198
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 198 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 192 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 192 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 192 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 192 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof204 b/parm/wmo/grib2_awpgfs_20km_pricof204
index f18f237151..4d7dd21709 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof204
+++ b/parm/wmo/grib2_awpgfs_20km_pricof204
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFV89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 204 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFV98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 198 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFV98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 198 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 198 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 198 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFV98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof210 b/parm/wmo/grib2_awpgfs_20km_pricof210
index 8d753952dd..96bb382bbd 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof210
+++ b/parm/wmo/grib2_awpgfs_20km_pricof210
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 210 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 204 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 204 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 204 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 204 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof216 b/parm/wmo/grib2_awpgfs_20km_pricof216
index af9bd58622..e732b7c7bc 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof216
+++ b/parm/wmo/grib2_awpgfs_20km_pricof216
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFW89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 216 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFW98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 210 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFW98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 210 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 210 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 210 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFW98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof222 b/parm/wmo/grib2_awpgfs_20km_pricof222
index 774c31319f..88ca3a3e6f 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof222
+++ b/parm/wmo/grib2_awpgfs_20km_pricof222
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 222 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 216 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 216 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 216 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 216 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof228 b/parm/wmo/grib2_awpgfs_20km_pricof228
index 6947978f81..17e669afa7 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof228
+++ b/parm/wmo/grib2_awpgfs_20km_pricof228
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFX89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 228 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFX98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 222 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFX98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 222 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 222 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 222 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFX98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof234 b/parm/wmo/grib2_awpgfs_20km_pricof234
index 0761e4dec3..f380eda313 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof234
+++ b/parm/wmo/grib2_awpgfs_20km_pricof234
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='ZPFZ89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 234 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 228 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='ZTFZ98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 228 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 228 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 228 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='ZMFZ98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib2_awpgfs_20km_pricof240 b/parm/wmo/grib2_awpgfs_20km_pricof240
index 438240e80a..d284837b9f 100644
--- a/parm/wmo/grib2_awpgfs_20km_pricof240
+++ b/parm/wmo/grib2_awpgfs_20km_pricof240
@@ -251,7 +251,7 @@
 &GRIBIDS DESC=' MSLET    Mean Sea Level ',WMOHEAD='YPFY89 KWBC',PDTN= 0 ,PDT=  3 192 2 0 96 0 0 1 240 101 0 0 255 0 0 /
 &GRIBIDS DESC=' TMIN     2 m above ground ',WMOHEAD='YTFY98 KWBC',PDTN= 8 ,PDT=  0 5 2 0 96 0 0 1 234 103 0 2 255 0 0 /
 &GRIBIDS DESC=' TMAX     2 m above ground ',WMOHEAD='YTFY98 KWBC',PDTN= 8 ,PDT=  0 4 2 0 96 0 0 1 234 103 0 2 255 0 0 /
-&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 8 ,PDT=  1 192 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 8 ,PDT=  1 193 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 8 ,PDT=  1 194 2 0 96 0 0 1 234 1 0 0 255 0 0 /
-&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 8 ,PDT=  1 195 2 0 96 0 0 1 234 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CRAIN    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 0 ,PDT=  1 192 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CFRZR    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 0 ,PDT=  1 193 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CICEP    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 0 ,PDT=  1 194 2 0 96 0 0 1 240 1 0 0 255 0 0 /
+&GRIBIDS DESC=' CSNOW    Surface ',WMOHEAD='YMFY98 KWBC',PDTN= 0 ,PDT=  1 195 2 0 96 0 0 1 240 1 0 0 255 0 0 /
diff --git a/parm/wmo/grib_awpgfs000.211 b/parm/wmo/grib_awpgfs000.211
deleted file mode 100755
index b7817939b9..0000000000
--- a/parm/wmo/grib_awpgfs000.211
+++ /dev/null
@@ -1,387 +0,0 @@
-  00001C02  0751FF80  076403E8...00010000       07    1000.0 MB   HGT           
-    211  00  YHQA99   0                        EOM
-  00001C02  0751FF80  076403CF...00010000       07     975.0 MB   HGT
-    211  00  YHQA93   0                        EOM
-  00001C02  0751FF80  076403B6...00010000       07     950.0 MB   HGT
-    211  00  YHQA95   0                        EOM
-  00001C02  0751FF80  0764039D...00010000       07     925.0 MB   HGT
-    211  00  YHQA92   0                        EOM
-  00001C02  0751FF80  07640384...00010000       07     900.0 MB   HGT
-    211  00  YHQA90   0                        EOM
-  00001C02  0751FF80  0764036B...00010000       07     875.0 MB   HGT
-    211  00  YHQA91   0                        EOM
-  00001C02  0751FF80  07640352...00010000       07     850.0 MB   HGT           
-    211  00  YHQA85   0                        EOM
-  00001C02  0751FF80  07640339...00010000       07     825.0 MB   HGT
-    211  00  YHQA82   0                        EOM
-  00001C02  0751FF80  07640320...00010000       07     800.0 MB   HGT
-    211  00  YHQA80   0                        EOM
-  00001C02  0751FF80  07640307...00010000       07     775.0 MB   HGT
-    211  00  YHQA77   0                        EOM
-  00001C02  0751FF80  076402EE...00010000       07     750.0 MB   HGT
-    211  00  YHQA75   0                        EOM
-  00001C02  0751FF80  076402D5...00010000       07     725.0 MB   HGT
-    211  00  YHQA72   0                        EOM
-  00001C02  0751FF80  076402BC...00010000       07     700.0 MB   HGT           
-    211  00  YHQA70   0                        EOM
-  00001C02  0751FF80  076402A3...00010000       07     675.0 MB   HGT
-    211  00  YHQA67   0                        EOM
-  00001C02  0751FF80  0764028A...00010000       07     650.0 MB   HGT
-    211  00  YHQA65   0                        EOM
-  00001C02  0751FF80  07640271...00010000       07     625.0 MB   HGT
-    211  00  YHQA62   0                        EOM
-  00001C02  0751FF80  07640258...00010000       07     600.0 MB   HGT
-    211  00  YHQA60   0                        EOM
-  00001C02  0751FF80  0764023F...00010000       07     575.0 MB   HGT
-    211  00  YHQA57   0                        EOM
-  00001C02  0751FF80  07640226...00010000       07     550.0 MB   HGT
-    211  00  YHQA55   0                        EOM
-  00001C02  0751FF80  0764020D...00010000       07     525.0 MB   HGT
-    211  00  YHQA52   0                        EOM
-  00001C02  0751FF80  076401F4...00010000       07     500.0 MB   HGT           
-    211  00  YHQA50   0                        EOM
-  00001C02  0751FF80  076401C2...00010000       07     450.0 MB   HGT
-    211  00  YHQA45   0                        EOM
-  00001C02  0751FF80  07640190...00010000       07     400.0 MB   HGT           
-    211  00  YHQA40   0                        EOM
-  00001C02  0751FF80  0764015E...00010000       07     350.0 MB   HGT
-    211  00  YHQA35   0                        EOM
-  00001C02  0751FF80  0764012C...00010000       07     300.0 MB   HGT           
-    211  00  YHQA30   0                        EOM
-  00001C02  0751FF80  076400FA...00010000       07     250.0 MB   HGT           
-    211  00  YHQA25   0                        EOM
-  00001C02  0751FF80  076400C8...00010000       07     200.0 MB   HGT           
-    211  00  YHQA20   0                        EOM
-  00001C02  0751FF80  07640096...00010000       07     150.0 MB   HGT           
-    211  00  YHQA15   0                        EOM
-  00001C02  0751FF80  07640064...00010000       07     100.0 MB   HGT           
-    211  00  YHQA10   0                        EOM
-  00001C02  0751FF80  216403E8...00010000       33    1000.0 MB   U GRD         
-    211  00  YUQA99   0                        EOM 
-  00001C02  0751FF80  216403CF...00010000       33     975.0 MB   U GRD         
-    211  00  YUQA93   0                        EOM 
-  00001C02  0751FF80  216403B6...00010000       33     950.0 MB   U GRD
-    211  00  YUQA95   0                        EOM
-  00001C02  0751FF80  2164039D...00010000       33     925.0 MB   U GRD
-    211  00  YUQA92   0                        EOM
-  00001C02  0751FF80  21640384...00010000       33     900.0 MB   U GRD
-    211  00  YUQA90   0                        EOM
-  00001C02  0751FF80  2164036B...00010000       33     875.0 MB   U GRD
-    211  00  YUQA91   0                        EOM
-  00001C02  0751FF80  21640352...00010000       33     850.0 MB   U GRD         
-    211  00  YUQA85   0                        EOM 
-  00001C02  0751FF80  21640339...00010000       33     825.0 MB   U GRD
-    211  00  YUQA82   0                        EOM
-  00001C02  0751FF80  21640320...00010000       33     800.0 MB   U GRD
-    211  00  YUQA80   0                        EOM
-  00001C02  0751FF80  21640307...00010000       33     775.0 MB   U GRD
-    211  00  YUQA77   0                        EOM
-  00001C02  0751FF80  216402EE...00010000       33     750.0 MB   U GRD
-    211  00  YUQA75   0                        EOM
-  00001C02  0751FF80  216402D5...00010000       33     725.0 MB   U GRD
-    211  00  YUQA72   0                        EOM
-  00001C02  0751FF80  216402BC...00010000       33     700.0 MB   U GRD         
-    211  00  YUQA70   0                        EOM
-  00001C02  0751FF80  216402A3...00010000       33     675.0 MB   U GRD
-    211  00  YUQA67   0                        EOM
-  00001C02  0751FF80  2164028A...00010000       33     650.0 MB   U GRD
-    211  00  YUQA65   0                        EOM
-  00001C02  0751FF80  21640271...00010000       33     625.0 MB   U GRD
-    211  00  YUQA62   0                        EOM
-  00001C02  0751FF80  21640258...00010000       33     600.0 MB   U GRD
-    211  00  YUQA60   0                        EOM
-  00001C02  0751FF80  2164023F...00010000       33     575.0 MB   U GRD
-    211  00  YUQA57   0                        EOM
-  00001C02  0751FF80  21640226...00010000       33     550.0 MB   U GRD
-    211  00  YUQA55   0                        EOM
-  00001C02  0751FF80  2164020D...00010000       33     525.0 MB   U GRD
-    211  00  YUQA52   0                        EOM
-  00001C02  0751FF80  216401F4...00010000       33     500.0 MB   U GRD         
-    211  00  YUQA50   0                        EOM
-  00001C02  0751FF80  216401C2...00010000       33     450.0 MB   U GRD
-    211  00  YUQA45   0                        EOM
-  00001C02  0751FF80  21640190...00010000       33     400.0 MB   U GRD         
-    211  00  YUQA40   0                        EOM
-  00001C02  0751FF80  2164015E...00010000       33     350.0 MB   U GRD
-    211  00  YUQA35   0                        EOM
-  00001C02  0751FF80  2164012C...00010000       33     300.0 MB   U GRD         
-    211  00  YUQA30   0                        EOM
-  00001C02  0751FF80  216400FA...00010000       33     250.0 MB   U GRD         
-    211  00  YUQA25   0                        EOM
-  00001C02  0751FF80  216400C8...00010000       33     200.0 MB   U GRD         
-    211  00  YUQA20   0                        EOM
-  00001C02  0751FF80  21640096...00010000       33     150.0 MB   U GRD         
-    211  00  YUQA15   0                        EOM
-  00001C02  0751FF80  21640064...00010000       33     100.0 MB   U GRD         
-    211  00  YUQA10   0                        EOM
-  00001C02  0751FF80  226403E8...00010000       34    1000.0 MB   V GRD
-    211  00  YVQA99   0                        EOM
-  00001C02  0751FF80  226403CF...00010000       34     975.0 MB   V GRD
-    211  00  YVQA93   0                        EOM
-  00001C02  0751FF80  226403B6...00010000       34     950.0 MB   V GRD
-    211  00  YVQA95   0                        EOM
-  00001C02  0751FF80  2264039D...00010000       34     925.0 MB   V GRD
-    211  00  YVQA92   0                        EOM
-  00001C02  0751FF80  22640384...00010000       34     900.0 MB   V GRD
-    211  00  YVQA90   0                        EOM
-  00001C02  0751FF80  2264036B...00010000       34     875.0 MB   V GRD
-    211  00  YVQA91   0                        EOM
-  00001C02  0751FF80  22640352...00010000       34     850.0 MB   V GRD         
-    211  00  YVQA85   0                        EOM
-  00001C02  0751FF80  22640339...00010000       34     825.0 MB   V GRD
-    211  00  YVQA82   0                        EOM
-  00001C02  0751FF80  22640320...00010000       34     800.0 MB   V GRD
-    211  00  YVQA80   0                        EOM
-  00001C02  0751FF80  22640307...00010000       34     775.0 MB   V GRD
-    211  00  YVQA77   0                        EOM
-  00001C02  0751FF80  226402EE...00010000       34     750.0 MB   V GRD
-    211  00  YVQA75   0                        EOM
-  00001C02  0751FF80  226402D5...00010000       34     725.0 MB   V GRD
-    211  00  YVQA72   0                        EOM
-  00001C02  0751FF80  226402BC...00010000       34     700.0 MB   V GRD
-    211  00  YVQA70   0                        EOM
-  00001C02  0751FF80  226402A3...00010000       34     675.0 MB   V GRD
-    211  00  YVQA67   0                        EOM
-  00001C02  0751FF80  2264028A...00010000       34     650.0 MB   V GRD
-    211  00  YVQA65   0                        EOM
-  00001C02  0751FF80  22640271...00010000       34     625.0 MB   V GRD
-    211  00  YVQA62   0                        EOM
-  00001C02  0751FF80  22640258...00010000       34     600.0 MB   V GRD
-    211  00  YVQA60   0                        EOM
-  00001C02  0751FF80  2264023F...00010000       34     575.0 MB   V GRD
-    211  00  YVQA57   0                        EOM
-  00001C02  0751FF80  22640226...00010000       34     550.0 MB   V GRD
-    211  00  YVQA55   0                        EOM
-  00001C02  0751FF80  2264020D...00010000       34     525.0 MB   V GRD
-    211  00  YVQA52   0                        EOM
-  00001C02  0751FF80  226401F4...00010000       34     500.0 MB   V GRD
-    211  00  YVQA50   0                        EOM
-  00001C02  0751FF80  226401C2...00010000       34     450.0 MB   V GRD
-    211  00  YVQA45   0                        EOM
-  00001C02  0751FF80  22640190...00010000       34     400.0 MB   V GRD
-    211  00  YVQA40   0                        EOM
-  00001C02  0751FF80  2264015E...00010000       34     350.0 MB   V GRD
-    211  00  YVQA35   0                        EOM
-  00001C02  0751FF80  2264012C...00010000       34     300.0 MB   V GRD         
-    211  00  YVQA30   0                        EOM
-  00001C02  0751FF80  226400FA...00010000       34     250.0 MB   V GRD         
-    211  00  YVQA25   0                        EOM
-  00001C02  0751FF80  226400C8...00010000       34     200.0 MB   V GRD         
-    211  00  YVQA20   0                        EOM
-  00001C02  0751FF80  22640096...00010000       34     150.0 MB   V GRD         
-    211  00  YVQA15   0                        EOM
-  00001C02  0751FF80  22640064...00010000       34     100.0 MB   V GRD         
-    211  00  YVQA10   0                        EOM
-  00001C02  0751FF80  02660000...00010000       02           MSL  PRMSL         
-    211  00  YPQA89   0                        EOM
-  00001C02  0751FF80  346403E8...00010000       52    1000.0 MB   R H           
-    211  00  YRQA99   0                        EOM
-  00001C02  0751FF80  346403CF...00010000       52     975.0 MB   R H           
-    211  00  YRQA93   0                        EOM
-  00001C02  0751FF80  346403B6...00010000       52     950.0 MB   R H           
-    211  00  YRQA95   0                        EOM
-  00001C02  0751FF80  3464039D...00010000       52     925.0 MB   R H           
-    211  00  YRQA92   0                        EOM
-  00001C02  0751FF80  34640384...00010000       52     900.0 MB   R H           
-    211  00  YRQA90   0                        EOM
-  00001C02  0751FF80  3464036B...00010000       52     875.0 MB   R H           
-    211  00  YRQA91   0                        EOM
-  00001C02  0751FF80  34640352...00010000       52     850.0 MB   R H           
-    211  00  YRQA85   0                        EOM
-  00001C02  0751FF80  34640339...00010000       52     825.0 MB   R H           
-    211  00  YRQA82   0                        EOM
-  00001C02  0751FF80  34640320...00010000       52     800.0 MB   R H           
-    211  00  YRQA80   0                        EOM
-  00001C02  0751FF80  34640307...00010000       52     775.0 MB   R H           
-    211  00  YRQA77   0                        EOM
-  00001C02  0751FF80  346402EE...00010000       52     750.0 MB   R H           
-    211  00  YRQA75   0                        EOM
-  00001C02  0751FF80  346402D5...00010000       52     725.0 MB   R H           
-    211  00  YRQA72   0                        EOM
-  00001C02  0751FF80  346402BC...00010000       52     700.0 MB   R H           
-    211  00  YRQA70   0                        EOM
-  00001C02  0751FF80  346402A3...00010000       52     675.0 MB   R H           
-    211  00  YRQA67   0                        EOM
-  00001C02  0751FF80  3464028A...00010000       52     650.0 MB   R H           
-    211  00  YRQA65   0                        EOM
-  00001C02  0751FF80  34640271...00010000       52     625.0 MB   R H           
-    211  00  YRQA62   0                        EOM
-  00001C02  0751FF80  34640258...00010000       52     600.0 MB   R H           
-    211  00  YRQA60   0                        EOM
-  00001C02  0751FF80  3464023F...00010000       52     575.0 MB   R H           
-    211  00  YRQA57   0                        EOM
-  00001C02  0751FF80  34640226...00010000       52     550.0 MB   R H           
-    211  00  YRQA55   0                        EOM
-  00001C02  0751FF80  3464020D...00010000       52     525.0 MB   R H           
-    211  00  YRQA52   0                        EOM
-  00001C02  0751FF80  346401F4...00010000       52     500.0 MB   R H           
-    211  00  YRQA50   0                        EOM
-  00001C02  0751FF80  346401C2...00010000       52     450.0 MB   R H           
-    211  00  YRQA45   0                        EOM
-  00001C02  0751FF80  34640190...00010000       52     400.0 MB   R H           
-    211  00  YRQA40   0                        EOM
-  00001C02  0751FF80  3464015E...00010000       52     350.0 MB   R H           
-    211  00  YRQA35   0                        EOM
-  00001C02  0751FF80  3464012C...00010000       52     300.0 MB   R H           
-    211  00  YRQA30   0                        EOM
-  00001C02  0751FF80  346400FA...00010000       52     250.0 MB   R H           
-    211  00  YRQA25   0                        EOM
-  00001C02  0751FF80  346400C8...00010000       52     200.0 MB   R H           
-    211  00  YRQA20   0                        EOM
-  00001C02  0751FF80  34640096...00010000       52     150.0 MB   R H           
-    211  00  YRQA15   0                        EOM
-  00001C02  0751FF80  34640064...00010000       52     100.0 MB   R H           
-    211  00  YRQA10   0                        EOM
-  00001C02  0751FF80  0B6403E8...00010000       11    1000.0 MB   TMP
-    211  00  YTQA99   0                        EOM
-  00001C02  0751FF80  0B6403CF...00010000       11     975.0 MB   TMP
-    211  00  YTQA93   0                        EOM
-  00001C02  0751FF80  0B6403B6...00010000       11     950.0 MB   TMP
-    211  00  YTQA95   0                        EOM
-  00001C02  0751FF80  0B64039D...00010000       11     925.0 MB   TMP
-    211  00  YTQA92   0                        EOM
-  00001C02  0751FF80  0B640384...00010000       11     900.0 MB   TMP
-    211  00  YTQA90   0                        EOM
-  00001C02  0751FF80  0B64036B...00010000       11     875.0 MB   TMP
-    211  00  YTQA91   0                        EOM
-  00001C02  0751FF80  0B640352...00010000       11     850.0 MB   TMP           
-    211  00  YTQA85   0                        EOM
-  00001C02  0751FF80  0B640339...00010000       11     825.0 MB   TMP
-    211  00  YTQA82   0                        EOM
-  00001C02  0751FF80  0B640320...00010000       11     800.0 MB   TMP
-    211  00  YTQA80   0                        EOM
-  00001C02  0751FF80  0B640307...00010000       11     775.0 MB   TMP
-    211  00  YTQA77   0                        EOM
-  00001C02  0751FF80  0B6402EE...00010000       11     750.0 MB   TMP
-    211  00  YTQA75   0                        EOM
-  00001C02  0751FF80  0B6402D5...00010000       11     725.0 MB   TMP
-    211  00  YTQA72   0                        EOM
-  00001C02  0751FF80  0B6402BC...00010000       11     700.0 MB   TMP           
-    211  00  YTQA70   0                        EOM
-  00001C02  0751FF80  0B6402A3...00010000       11     675.0 MB   TMP
-    211  00  YTQA67   0                        EOM
-  00001C02  0751FF80  0B64028A...00010000       11     650.0 MB   TMP
-    211  00  YTQA65   0                        EOM
-  00001C02  0751FF80  0B640271...00010000       11     625.0 MB   TMP
-    211  00  YTQA62   0                        EOM
-  00001C02  0751FF80  0B640258...00010000       11     600.0 MB   TMP
-    211  00  YTQA60   0                        EOM
-  00001C02  0751FF80  0B64023F...00010000       11     575.0 MB   TMP
-    211  00  YTQA57   0                        EOM
-  00001C02  0751FF80  0B640226...00010000       11     550.0 MB   TMP
-    211  00  YTQA55   0                        EOM
-  00001C02  0751FF80  0B64020D...00010000       11     525.0 MB   TMP
-    211  00  YTQA52   0                        EOM
-  00001C02  0751FF80  0B6401F4...00010000       11     500.0 MB   TMP           
-    211  00  YTQA50   0                        EOM
-  00001C02  0751FF80  0B6401C2...00010000       11     450.0 MB   TMP
-    211  00  YTQA45   0                        EOM
-  00001C02  0751FF80  0B640190...00010000       11     400.0 MB   TMP           
-    211  00  YTQA40   0                        EOM
-  00001C02  0751FF80  0B64015E...00010000       11     350.0 MB   TMP
-    211  00  YTQA35   0                        EOM
-  00001C02  0751FF80  0B64012C...00010000       11     300.0 MB   TMP           
-    211  00  YTQA30   0                        EOM
-  00001C02  0751FF80  0B6400FA...00010000       11     250.0 MB   TMP           
-    211  00  YTQA25   0                        EOM
-  00001C02  0751FF80  0B6400C8...00010000       11     200.0 MB   TMP           
-    211  00  YTQA20   0                        EOM
-  00001C02  0751FF80  0B640096...00010000       11     150.0 MB   TMP
-    211  00  YTQA15   0                        EOM
-  00001C02  0751FF80  0B640064...00010000       11     100.0 MB   TMP           
-    211  00  YTQA10   0                        EOM
-  00001C02  0751FF80  01010000...00010000       01          SFC  PRES           
-    211  00  YPQA98   0                        EOM
-  00001C02  0751FF80  346C2C64...00010000       52        44/100  R H           
-    211  00  YRQA00   0                        EOM
-  00001C02  0751FF80  36C80000...00010000       54          EATM  P WAT         
-    211  00  YFQA00   0                        EOM
-  00001C02  0751FF80  0B690002...00010000       11          2m/SFC TMP          
-    211  00  YTQA98   0                        EOM
-  00001C02  0751FF80  34741E00...00010000       52      BNDRY/SPD  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  0B070000...00010000       11            TRO TMP           
-    211  00  YTQA97   0                        EOM
-  00001C02  0751FF80  01070000...00010000       01            TRO PRES          
-    211  00  YPQA97   0                        EOM
-  00001C02  0751FF80  21741E00...00010000       33           SPD  U GRD         
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  22741E00...00010000       34           SPD  V GRD         
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  21070000...00010000       33            TRO U GRD         
-    211  00  YUQA97   0                        EOM
-  00001C02  0751FF80  22070000...00010000       34            TRO V GRD         
-    211  00  YVQA97   0                        EOM
-  00001C02  0751FF80  88070000...00010000      136            TRO VW SH         
-    211  00  YBQA97   0                        EOM
-  00001C02  0751FF80  83010000...00010000      131            SFC LFT X         
-    211  00  YXQA98   0                        EOM
-  00001C02  0751FF80  296402BC...00010000       41    700.0 MB    ABS V         
-    211  00  YCQA70   0                        EOM  
-  00001C02  0751FF80  296401F4...00010000       41    500.0 MB    ABS V         
-    211  00  YCQA50   0                        EOM
-  00001C02  0751FF80  9D010000...00010000      157          SFC   CAPE
-    211  00  YWQA98   0                        EOM
-  00001C02  0751FF80  9C010000...00010000      156          SFC   CIN
-    211  00  YYQA98   0                        EOM
-  00001C02  0751FF80  9D74B400...00010000      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQA86   0                        EOM
-  00001C02  0751FF80  9C74B400...00010000      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQA86   0                        EOM
-  00001C02  0751FF80  0B741E00...00010000       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  0B743C1E...00010000       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  0B745A3C...00010000       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  0B74785A...00010000       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  0B749678...00010000       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  0B74B496...00010000       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQA86   0                        EOM
-  00001C02  0751FF80  34743C1E...00010000       52   60 SPDY  30 SPDY  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  34745A3C...00010000       52   90 SPDY  60 SPDY  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  3474785A...00010000       52  120 SPDY  90 SPDY  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  34749678...00010000       52  150 SPDY 120 SPDY  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  3474B496...00010000       52  180 SPDY 150 SPDY  R H
-    211  00  YRQA86   0                        EOM
-  00001C02  0751FF80  21741E00...00010000       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  21743C1E...00010000       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  21745A3C...00010000       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  2174785A...00010000       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  21749678...00010000       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  2174B496...00010000       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQA86   0                        EOM
-  00001C02  0751FF80  22741E00...00010000       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  22743C1E...00010000       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  22745A3C...00010000       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  2274785A...00010000       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  22749678...00010000       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  2274B496...00010000       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQA86   0                        EOM
-  00001C02  0751FF80  0B690002...00010000       11    2  HTGL     TMP
-    211  00  YTQA98   0                        EOM
-  00001C02  0751FF80  34690002...00010000       52    2  HTGL     R H
-    211  00  YRQA98   0                        EOM
-  00001C02  0751FF80  2169000A...00010000       33   10  HTGL     U GRD
-    211  00  YUQA98   0                        EOM
-  00001C02  0751FF80  2269000A...00010000       34   10  HTGL     V GRD
-    211  00  YVQA98   0                        EOM
-  00001C02  0751FF80  07010000...00010000       07           SFC  HGT
-    211  00  YHQA98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs006.211 b/parm/wmo/grib_awpgfs006.211
deleted file mode 100755
index 6a5dbf545d..0000000000
--- a/parm/wmo/grib_awpgfs006.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...00010006       07    1000.0 MB   HGT           
-    211  00  YHQB99   0                        EOM
-  00001C02  0760FF80  076403CF...00010006       07     975.0 MB   HGT
-    211  00  YHQB93   0                        EOM
-  00001C02  0760FF80  076403B6...00010006       07     950.0 MB   HGT
-    211  00  YHQB95   0                        EOM
-  00001C02  0760FF80  0764039D...00010006       07     925.0 MB   HGT
-    211  00  YHQB92   0                        EOM
-  00001C02  0760FF80  07640384...00010006       07     900.0 MB   HGT
-    211  00  YHQB90   0                        EOM
-  00001C02  0760FF80  0764036B...00010006       07     875.0 MB   HGT
-    211  00  YHQB91   0                        EOM
-  00001C02  0760FF80  07640352...00010006       07     850.0 MB   HGT           
-    211  00  YHQB85   0                        EOM
-  00001C02  0760FF80  07640339...00010006       07     825.0 MB   HGT
-    211  00  YHQB82   0                        EOM
-  00001C02  0760FF80  07640320...00010006       07     800.0 MB   HGT
-    211  00  YHQB80   0                        EOM
-  00001C02  0760FF80  07640307...00010006       07     775.0 MB   HGT
-    211  00  YHQB77   0                        EOM
-  00001C02  0760FF80  076402EE...00010006       07     750.0 MB   HGT
-    211  00  YHQB75   0                        EOM
-  00001C02  0760FF80  076402D5...00010006       07     725.0 MB   HGT
-    211  00  YHQB72   0                        EOM
-  00001C02  0760FF80  076402BC...00010006       07     700.0 MB   HGT           
-    211  00  YHQB70   0                        EOM
-  00001C02  0760FF80  076402A3...00010006       07     675.0 MB   HGT
-    211  00  YHQB67   0                        EOM
-  00001C02  0760FF80  0764028A...00010006       07     650.0 MB   HGT
-    211  00  YHQB65   0                        EOM
-  00001C02  0760FF80  07640271...00010006       07     625.0 MB   HGT
-    211  00  YHQB62   0                        EOM
-  00001C02  0760FF80  07640258...00010006       07     600.0 MB   HGT
-    211  00  YHQB60   0                        EOM
-  00001C02  0760FF80  0764023F...00010006       07     575.0 MB   HGT
-    211  00  YHQB57   0                        EOM
-  00001C02  0760FF80  07640226...00010006       07     550.0 MB   HGT
-    211  00  YHQB55   0                        EOM
-  00001C02  0760FF80  0764020D...00010006       07     525.0 MB   HGT
-    211  00  YHQB52   0                        EOM
-  00001C02  0760FF80  076401F4...00010006       07     500.0 MB   HGT           
-    211  00  YHQB50   0                        EOM
-  00001C02  0760FF80  076401C2...00010006       07     450.0 MB   HGT
-    211  00  YHQB45   0                        EOM
-  00001C02  0760FF80  07640190...00010006       07     400.0 MB   HGT           
-    211  00  YHQB40   0                        EOM
-  00001C02  0760FF80  0764015E...00010006       07     350.0 MB   HGT
-    211  00  YHQB35   0                        EOM
-  00001C02  0760FF80  0764012C...00010006       07     300.0 MB   HGT           
-    211  00  YHQB30   0                        EOM
-  00001C02  0760FF80  076400FA...00010006       07     250.0 MB   HGT           
-    211  00  YHQB25   0                        EOM
-  00001C02  0760FF80  076400C8...00010006       07     200.0 MB   HGT           
-    211  00  YHQB20   0                        EOM
-  00001C02  0760FF80  07640096...00010006       07     150.0 MB   HGT           
-    211  00  YHQB15   0                        EOM
-  00001C02  0760FF80  07640064...00010006       07     100.0 MB   HGT           
-    211  00  YHQB10   0                        EOM
-  00001C02  0760FF80  216403E8...00010006       33    1000.0 MB   U GRD
-    211  00  YUQB99   0                        EOM
-  00001C02  0760FF80  216403CF...00010006       33     975.0 MB   U GRD
-    211  00  YUQB93   0                        EOM
-  00001C02  0760FF80  216403B6...00010006       33     950.0 MB   U GRD
-    211  00  YUQB95   0                        EOM
-  00001C02  0760FF80  2164039D...00010006       33     925.0 MB   U GRD
-    211  00  YUQB92   0                        EOM
-  00001C02  0760FF80  21640384...00010006       33     900.0 MB   U GRD
-    211  00  YUQB90   0                        EOM
-  00001C02  0760FF80  2164036B...00010006       33     875.0 MB   U GRD
-    211  00  YUQB91   0                        EOM
-  00001C02  0760FF80  21640352...00010006       33     850.0 MB   U GRD         
-    211  00  YUQB85   0                        EOM
-  00001C02  0760FF80  21640339...00010006       33     825.0 MB   U GRD
-    211  00  YUQB82   0                        EOM
-  00001C02  0760FF80  21640320...00010006       33     800.0 MB   U GRD
-    211  00  YUQB80   0                        EOM
-  00001C02  0760FF80  21640307...00010006       33     775.0 MB   U GRD
-    211  00  YUQB77   0                        EOM
-  00001C02  0760FF80  216402EE...00010006       33     750.0 MB   U GRD
-    211  00  YUQB75   0                        EOM
-  00001C02  0760FF80  216402D5...00010006       33     725.0 MB   U GRD
-    211  00  YUQB72   0                        EOM
-  00001C02  0760FF80  216402BC...00010006       33     700.0 MB   U GRD         
-    211  00  YUQB70   0                        EOM
-  00001C02  0760FF80  216402A3...00010006       33     675.0 MB   U GRD
-    211  00  YUQB67   0                        EOM
-  00001C02  0760FF80  2164028A...00010006       33     650.0 MB   U GRD
-    211  00  YUQB65   0                        EOM
-  00001C02  0760FF80  21640271...00010006       33     625.0 MB   U GRD
-    211  00  YUQB62   0                        EOM
-  00001C02  0760FF80  21640258...00010006       33     600.0 MB   U GRD
-    211  00  YUQB60   0                        EOM
-  00001C02  0760FF80  2164023F...00010006       33     575.0 MB   U GRD
-    211  00  YUQB57   0                        EOM
-  00001C02  0760FF80  21640226...00010006       33     550.0 MB   U GRD
-    211  00  YUQB55   0                        EOM
-  00001C02  0760FF80  2164020D...00010006       33     525.0 MB   U GRD
-    211  00  YUQB52   0                        EOM
-  00001C02  0760FF80  216401F4...00010006       33     500.0 MB   U GRD         
-    211  00  YUQB50   0                        EOM
-  00001C02  0760FF80  216401C2...00010006       33     450.0 MB   U GRD
-    211  00  YUQB45   0                        EOM
-  00001C02  0760FF80  21640190...00010006       33     400.0 MB   U GRD         
-    211  00  YUQB40   0                        EOM
-  00001C02  0760FF80  2164015E...00010006       33     350.0 MB   U GRD
-    211  00  YUQB35   0                        EOM
-  00001C02  0760FF80  2164012C...00010006       33     300.0 MB   U GRD         
-    211  00  YUQB30   0                        EOM
-  00001C02  0760FF80  216400FA...00010006       33     250.0 MB   U GRD         
-    211  00  YUQB25   0                        EOM
-  00001C02  0760FF80  216400C8...00010006       33     200.0 MB   U GRD         
-    211  00  YUQB20   0                        EOM
-  00001C02  0760FF80  21640096...00010006       33     150.0 MB   U GRD         
-    211  00  YUQB15   0                        EOM
-  00001C02  0760FF80  21640064...00010006       33     100.0 MB   U GRD         
-    211  00  YUQB10   0                        EOM
-  00001C02  0760FF80  226403E8...00010006       34    1000.0 MB   V GRD
-    211  00  YVQB99   0                        EOM
-  00001C02  0760FF80  226403CF...00010006       34     975.0 MB   V GRD
-    211  00  YVQB93   0                        EOM
-  00001C02  0760FF80  226403B6...00010006       34     950.0 MB   V GRD
-    211  00  YVQB95   0                        EOM
-  00001C02  0760FF80  2264039D...00010006       34     925.0 MB   V GRD
-    211  00  YVQB92   0                        EOM
-  00001C02  0760FF80  22640384...00010006       34     900.0 MB   V GRD
-    211  00  YVQB90   0                        EOM
-  00001C02  0760FF80  2264036B...00010006       34     875.0 MB   V GRD
-    211  00  YVQB91   0                        EOM
-  00001C02  0760FF80  22640352...00010006       34     850.0 MB   V GRD         
-    211  00  YVQB85   0                        EOM
-  00001C02  0760FF80  22640339...00010006       34     825.0 MB   V GRD
-    211  00  YVQB82   0                        EOM
-  00001C02  0760FF80  22640320...00010006       34     800.0 MB   V GRD
-    211  00  YVQB80   0                        EOM
-  00001C02  0760FF80  22640307...00010006       34     775.0 MB   V GRD
-    211  00  YVQB77   0                        EOM
-  00001C02  0760FF80  226402EE...00010006       34     750.0 MB   V GRD
-    211  00  YVQB75   0                        EOM
-  00001C02  0760FF80  226402D5...00010006       34     725.0 MB   V GRD
-    211  00  YVQB72   0                        EOM
-  00001C02  0760FF80  226402BC...00010006       34     700.0 MB   V GRD         
-    211  00  YVQB70   0                        EOM
-  00001C02  0760FF80  226402A3...00010006       34     675.0 MB   V GRD
-    211  00  YVQB67   0                        EOM
-  00001C02  0760FF80  2264028A...00010006       34     650.0 MB   V GRD
-    211  00  YVQB65   0                        EOM
-  00001C02  0760FF80  22640271...00010006       34     625.0 MB   V GRD
-    211  00  YVQB62   0                        EOM
-  00001C02  0760FF80  22640258...00010006       34     600.0 MB   V GRD
-    211  00  YVQB60   0                        EOM
-  00001C02  0760FF80  2264023F...00010006       34     575.0 MB   V GRD
-    211  00  YVQB57   0                        EOM
-  00001C02  0760FF80  22640226...00010006       34     550.0 MB   V GRD
-    211  00  YVQB55   0                        EOM
-  00001C02  0760FF80  2264020D...00010006       34     525.0 MB   V GRD
-    211  00  YVQB52   0                        EOM
-  00001C02  0760FF80  226401F4...00010006       34     500.0 MB   V GRD         
-    211  00  YVQB50   0                        EOM
-  00001C02  0760FF80  226401C2...00010006       34     450.0 MB   V GRD
-    211  00  YVQB45   0                        EOM
-  00001C02  0760FF80  22640190...00010006       34     400.0 MB   V GRD         
-    211  00  YVQB40   0                        EOM
-  00001C02  0760FF80  2264015E...00010006       34     350.0 MB   V GRD
-    211  00  YVQB35   0                        EOM
-  00001C02  0760FF80  2264012C...00010006       34     300.0 MB   V GRD         
-    211  00  YVQB30   0                        EOM
-  00001C02  0760FF80  226400FA...00010006       34     250.0 MB   V GRD         
-    211  00  YVQB25   0                        EOM
-  00001C02  0760FF80  226400C8...00010006       34     200.0 MB   V GRD         
-    211  00  YVQB20   0                        EOM
-  00001C02  0760FF80  22640096...00010006       34     150.0 MB   V GRD         
-    211  00  YVQB15   0                        EOM
-  00001C02  0760FF80  22640064...00010006       34     100.0 MB   V GRD         
-    211  00  YVQB10   0                        EOM
-  00001C02  0760FF80  02660000...00010006       02           MSL  PRMSL         
-    211  00  YPQB89   0                        EOM
-  00001C02  0760FF80  346403E8...00010006       52    1000.0 MB   R H
-    211  00  YRQB99   0                        EOM
-  00001C02  0760FF80  346403CF...00010006       52     975.0 MB   R H
-    211  00  YRQB93   0                        EOM
-  00001C02  0760FF80  346403B6...00010006       52     950.0 MB   R H
-    211  00  YRQB95   0                        EOM
-  00001C02  0760FF80  3464039D...00010006       52     925.0 MB   R H
-    211  00  YRQB92   0                        EOM
-  00001C02  0760FF80  34640384...00010006       52     900.0 MB   R H
-    211  00  YRQB90   0                        EOM
-  00001C02  0760FF80  3464036B...00010006       52     875.0 MB   R H
-    211  00  YRQB91   0                        EOM
-  00001C02  0760FF80  34640352...00010006       52     850.0 MB   R H           
-    211  00  YRQB85   0                        EOM
-  00001C02  0760FF80  34640339...00010006       52     825.0 MB   R H
-    211  00  YRQB82   0                        EOM
-  00001C02  0760FF80  34640320...00010006       52     800.0 MB   R H
-    211  00  YRQB80   0                        EOM
-  00001C02  0760FF80  34640307...00010006       52     775.0 MB   R H
-    211  00  YRQB77   0                        EOM
-  00001C02  0760FF80  346402EE...00010006       52     750.0 MB   R H
-    211  00  YRQB75   0                        EOM
-  00001C02  0760FF80  346402D5...00010006       52     725.0 MB   R H
-    211  00  YRQB72   0                        EOM
-  00001C02  0760FF80  346402BC...00010006       52     700.0 MB   R H           
-    211  00  YRQB70   0                        EOM
-  00001C02  0760FF80  346402A3...00010006       52     675.0 MB   R H
-    211  00  YRQB67   0                        EOM
-  00001C02  0760FF80  3464028A...00010006       52     650.0 MB   R H
-    211  00  YRQB65   0                        EOM
-  00001C02  0760FF80  34640271...00010006       52     625.0 MB   R H
-    211  00  YRQB62   0                        EOM
-  00001C02  0760FF80  34640258...00010006       52     600.0 MB   R H
-    211  00  YRQB60   0                        EOM
-  00001C02  0760FF80  3464023F...00010006       52     575.0 MB   R H
-    211  00  YRQB57   0                        EOM
-  00001C02  0760FF80  34640226...00010006       52     550.0 MB   R H
-    211  00  YRQB55   0                        EOM
-  00001C02  0760FF80  3464020D...00010006       52     525.0 MB   R H
-    211  00  YRQB52   0                        EOM
-  00001C02  0760FF80  346401F4...00010006       52     500.0 MB   R H           
-    211  00  YRQB50   0                        EOM
-  00001C02  0760FF80  346401C2...00010006       52     450.0 MB   R H
-    211  00  YRQB45   0                        EOM
-  00001C02  0760FF80  34640190...00010006       52     400.0 MB   R H           
-    211  00  YRQB40   0                        EOM
-  00001C02  0760FF80  3464015E...00010006       52     350.0 MB   R H
-    211  00  YRQB35   0                        EOM
-  00001C02  0760FF80  3464012C...00010006       52     300.0 MB   R H           
-    211  00  YRQB30   0                        EOM
-  00001C02  0760FF80  346400FA...00010006       52     250.0 MB   R H
-    211  00  YRQB25   0                        EOM
-  00001C02  0760FF80  346400C8...00010006       52     200.0 MB   R H
-    211  00  YRQB20   0                        EOM
-  00001C02  0760FF80  34640096...00010006       52     150.0 MB   R H
-    211  00  YRQB15   0                        EOM
-  00001C02  0760FF80  34640064...00010006       52     100.0 MB   R H
-    211  00  YRQB10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010006       11    1000.0 MB   TMP
-    211  00  YTQB99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010006       11     975.0 MB   TMP
-    211  00  YTQB93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010006       11     950.0 MB   TMP
-    211  00  YTQB95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010006       11     925.0 MB   TMP
-    211  00  YTQB92   0                        EOM
-  00001C02  0760FF80  0B640384...00010006       11     900.0 MB   TMP
-    211  00  YTQB90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010006       11     875.0 MB   TMP
-    211  00  YTQB91   0                        EOM
-  00001C02  0760FF80  0B640352...00010006       11     850.0 MB   TMP           
-    211  00  YTQB85   0                        EOM
-  00001C02  0760FF80  0B640339...00010006       11     825.0 MB   TMP
-    211  00  YTQB82   0                        EOM
-  00001C02  0760FF80  0B640320...00010006       11     800.0 MB   TMP
-    211  00  YTQB80   0                        EOM
-  00001C02  0760FF80  0B640307...00010006       11     775.0 MB   TMP
-    211  00  YTQB77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010006       11     750.0 MB   TMP
-    211  00  YTQB75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010006       11     725.0 MB   TMP
-    211  00  YTQB72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010006       11     700.0 MB   TMP           
-    211  00  YTQB70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010006       11     675.0 MB   TMP
-    211  00  YTQB67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010006       11     650.0 MB   TMP
-    211  00  YTQB65   0                        EOM
-  00001C02  0760FF80  0B640271...00010006       11     625.0 MB   TMP
-    211  00  YTQB62   0                        EOM
-  00001C02  0760FF80  0B640258...00010006       11     600.0 MB   TMP
-    211  00  YTQB60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010006       11     575.0 MB   TMP
-    211  00  YTQB57   0                        EOM
-  00001C02  0760FF80  0B640226...00010006       11     550.0 MB   TMP
-    211  00  YTQB55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010006       11     525.0 MB   TMP
-    211  00  YTQB52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010006       11     500.0 MB   TMP           
-    211  00  YTQB50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010006       11     450.0 MB   TMP
-    211  00  YTQB45   0                        EOM
-  00001C02  0760FF80  0B640190...00010006       11     400.0 MB   TMP           
-    211  00  YTQB40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010006       11     350.0 MB   TMP
-    211  00  YTQB35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010006       11     300.0 MB   TMP           
-    211  00  YTQB30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010006       11     250.0 MB   TMP           
-    211  00  YTQB25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010006       11     200.0 MB   TMP           
-    211  00  YTQB20   0                        EOM
-  00001C02  0760FF80  0B640096...00010006       11     150.0 MB   TMP           
-    211  00  YTQB15   0                        EOM
-  00001C02  0760FF80  0B640064...00010006       11     100.0 MB   TMP           
-    211  00  YTQB10   0                        EOM
-  00001C02  0760FF80  28640352...00010006       40     850.0 MB  DZDT           
-    211  00  YOQB85   0                        EOM
-  00001C02  0760FF80  286402BC...00010006       40     700.0 MB  DZDT           
-    211  00  YOQB70   0                        EOM
-  00001C02  0760FF80  286401F4...00010006       40     500.0 MB  DZDT           
-    211  00  YOQB50   0                        EOM
-  00001C02  0760FF80  28640190...00010006       40     400.0 MB  DZDT           
-    211  00  YOQB40   0                        EOM
-  00001C02  0760FF80  2864012C...00010006       40     300.0 MB  DZDT           
-    211  00  YOQB30   0                        EOM
-  00001C02  0760FF80  286400FA...00010006       40     250.0 MB  DZDT           
-    211  00  YOQB25   0                        EOM
-  00001C02  0760FF80  286400C8...00010006       40     200.0 MB  DZDT           
-    211  00  YOQB20   0                        EOM
-  00001C02  0760FF80  28640096...00010006       40     150.0 MB  DZDT           
-    211  00  YOQB15   0                        EOM
-  00001C02  0760FF80  28640064...00010006       40     100.0 MB  DZDT           
-    211  00  YOQB10   0                        EOM
-  00001C02  0760FF80  01010000...00010006       01          SFC  PRES           
-    211  00  YPQB98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010006       52        44/100  R H           
-    211  00  YRQB00   0                        EOM
-  00001C02  0760FF80  36C80000...00010006       54          EATM  P WAT         
-    211  00  YFQB00   0                        EOM
-  00001C02  0760FF80  0B690002...00010006       11          2m/SFC TMP          
-    211  00  YTQB98   0                        EOM
-  00001C02  0760FF80  34741E00...00010006       52      BNDRY/SPD  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  0B070000...00010006       11            TRO TMP           
-    211  00  YTQB97   0                        EOM
-  00001C02  0760FF80  01070000...00010006       01            TRO PRES          
-    211  00  YPQB97   0                        EOM
-  00001C02  0760FF80  21741E00...00010006       33           SPD  U GRD         
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  22741E00...00010006       34           SPD  V GRD         
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  21070000...00010006       33            TRO U GRD         
-    211  00  YUQB97   0                        EOM
-  00001C02  0760FF80  22070000...00010006       34            TRO V GRD         
-    211  00  YVQB97   0                        EOM
-  00001C02  0760FF80  88070000...00010006      136            TRO VW SH         
-    211  00  YBQB97   0                        EOM
-  00001C02  0760FF80  3D010000...00010006       61            SFC A PCP         
-    211  00  YEQB98   0                        EOM
-  00001C02  0760FF80  83010000...00010006      131            SFC LFT X         
-    211  00  YXQB98   0                        EOM
-  00001C02  0760FF80  296402BC...00010006       41    700.0 MB  ABS V         
-    211  00  YCQB70   0                        EOM
-  00001C02  0760FF80  296401F4...00010006       41    500.0 MB  ABS V         
-    211  00  YCQB50   0                        EOM
-  00001C02  0760FF80  9D010000...00010006      157          SFC   CAPE
-    211  00  YWQB98   0                        EOM
-  00001C02  0760FF80  9C010000...00010006      156          SFC   CIN
-    211  00  YYQB98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010006      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQB86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010006      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQB86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010006       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010006       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010006       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010006       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  0B749678...00010006       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010006       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQB86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010006       52   60 SPDY  30 SPDY  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010006       52   90 SPDY  60 SPDY  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  3474785A...00010006       52  120 SPDY  90 SPDY  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  34749678...00010006       52  150 SPDY 120 SPDY  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  3474B496...00010006       52  180 SPDY 150 SPDY  R H
-    211  00  YRQB86   0                        EOM
-  00001C02  0760FF80  21741E00...00010006       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010006       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010006       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  2174785A...00010006       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  21749678...00010006       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  2174B496...00010006       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQB86   0                        EOM
-  00001C02  0760FF80  22741E00...00010006       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010006       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010006       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  2274785A...00010006       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  22749678...00010006       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  2274B496...00010006       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQB86   0                        EOM
-  00001C02  0760FF80  0B690002...00010006       11    2  HTGL     TMP
-    211  00  YTQB98   0                        EOM
-  00001C02  0760FF80  34690002...00010006       52    2  HTGL     R H
-    211  00  YRQB98   0                        EOM
-  00001C02  0760FF80  2169000A...00010006       33   10  HTGL     U GRD
-    211  00  YUQB98   0                        EOM
-  00001C02  0760FF80  2269000A...00010006       34   10  HTGL     V GRD
-    211  00  YVQB98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs012.211 b/parm/wmo/grib_awpgfs012.211
deleted file mode 100755
index 442aa25263..0000000000
--- a/parm/wmo/grib_awpgfs012.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...0001000C       07    1000.0 MB   HGT           
-    211  00  YHQC99   0                        EOM
-  00001C02  0760FF80  076403CF...0001000C       07     975.0 MB   HGT
-    211  00  YHQC93   0                        EOM
-  00001C02  0760FF80  076403B6...0001000C       07     950.0 MB   HGT
-    211  00  YHQC95   0                        EOM
-  00001C02  0760FF80  0764039D...0001000C       07     925.0 MB   HGT
-    211  00  YHQC92   0                        EOM
-  00001C02  0760FF80  07640384...0001000C       07     900.0 MB   HGT
-    211  00  YHQC90   0                        EOM
-  00001C02  0760FF80  0764036B...0001000C       07     875.0 MB   HGT
-    211  00  YHQC91   0                        EOM
-  00001C02  0760FF80  07640352...0001000C       07     850.0 MB   HGT           
-    211  00  YHQC85   0                        EOM
-  00001C02  0760FF80  07640339...0001000C       07     825.0 MB   HGT
-    211  00  YHQC82   0                        EOM
-  00001C02  0760FF80  07640320...0001000C       07     800.0 MB   HGT
-    211  00  YHQC80   0                        EOM
-  00001C02  0760FF80  07640307...0001000C       07     775.0 MB   HGT
-    211  00  YHQC77   0                        EOM
-  00001C02  0760FF80  076402EE...0001000C       07     750.0 MB   HGT
-    211  00  YHQC75   0                        EOM
-  00001C02  0760FF80  076402D5...0001000C       07     725.0 MB   HGT
-    211  00  YHQC72   0                        EOM
-  00001C02  0760FF80  076402BC...0001000C       07     700.0 MB   HGT           
-    211  00  YHQC70   0                        EOM
-  00001C02  0760FF80  076402A3...0001000C       07     675.0 MB   HGT
-    211  00  YHQC67   0                        EOM
-  00001C02  0760FF80  0764028A...0001000C       07     650.0 MB   HGT
-    211  00  YHQC65   0                        EOM
-  00001C02  0760FF80  07640271...0001000C       07     625.0 MB   HGT
-    211  00  YHQC62   0                        EOM
-  00001C02  0760FF80  07640258...0001000C       07     600.0 MB   HGT
-    211  00  YHQC60   0                        EOM
-  00001C02  0760FF80  0764023F...0001000C       07     575.0 MB   HGT
-    211  00  YHQC57   0                        EOM
-  00001C02  0760FF80  07640226...0001000C       07     550.0 MB   HGT
-    211  00  YHQC55   0                        EOM
-  00001C02  0760FF80  0764020D...0001000C       07     525.0 MB   HGT
-    211  00  YHQC52   0                        EOM
-  00001C02  0760FF80  076401F4...0001000C       07     500.0 MB   HGT           
-    211  00  YHQC50   0                        EOM
-  00001C02  0760FF80  076401C2...0001000C       07     450.0 MB   HGT
-    211  00  YHQC45   0                        EOM
-  00001C02  0760FF80  07640190...0001000C       07     400.0 MB   HGT           
-    211  00  YHQC40   0                        EOM
-  00001C02  0760FF80  0764015E...0001000C       07     350.0 MB   HGT
-    211  00  YHQC35   0                        EOM
-  00001C02  0760FF80  0764012C...0001000C       07     300.0 MB   HGT           
-    211  00  YHQC30   0                        EOM
-  00001C02  0760FF80  076400FA...0001000C       07     250.0 MB   HGT           
-    211  00  YHQC25   0                        EOM
-  00001C02  0760FF80  076400C8...0001000C       07     200.0 MB   HGT           
-    211  00  YHQC20   0                        EOM
-  00001C02  0760FF80  07640096...0001000C       07     150.0 MB   HGT           
-    211  00  YHQC15   0                        EOM
-  00001C02  0760FF80  07640064...0001000C       07     100.0 MB   HGT           
-    211  00  YHQC10   0                        EOM
-  00001C02  0760FF80  216403E8...0001000C       33    1000.0 MB   U GRD
-    211  00  YUQC99   0                        EOM
-  00001C02  0760FF80  216403CF...0001000C       33     975.0 MB   U GRD
-    211  00  YUQC93   0                        EOM
-  00001C02  0760FF80  216403B6...0001000C       33     950.0 MB   U GRD
-    211  00  YUQC95   0                        EOM
-  00001C02  0760FF80  2164039D...0001000C       33     925.0 MB   U GRD
-    211  00  YUQC92   0                        EOM
-  00001C02  0760FF80  21640384...0001000C       33     900.0 MB   U GRD
-    211  00  YUQC90   0                        EOM
-  00001C02  0760FF80  2164036B...0001000C       33     875.0 MB   U GRD
-    211  00  YUQC91   0                        EOM
-  00001C02  0760FF80  21640352...0001000C       33     850.0 MB   U GRD         
-    211  00  YUQC85   0                        EOM
-  00001C02  0760FF80  21640339...0001000C       33     825.0 MB   U GRD
-    211  00  YUQC82   0                        EOM
-  00001C02  0760FF80  21640320...0001000C       33     800.0 MB   U GRD
-    211  00  YUQC80   0                        EOM
-  00001C02  0760FF80  21640307...0001000C       33     775.0 MB   U GRD
-    211  00  YUQC77   0                        EOM
-  00001C02  0760FF80  216402EE...0001000C       33     750.0 MB   U GRD
-    211  00  YUQC75   0                        EOM
-  00001C02  0760FF80  216402D5...0001000C       33     725.0 MB   U GRD
-    211  00  YUQC72   0                        EOM
-  00001C02  0760FF80  216402BC...0001000C       33     700.0 MB   U GRD         
-    211  00  YUQC70   0                        EOM
-  00001C02  0760FF80  216402A3...0001000C       33     675.0 MB   U GRD
-    211  00  YUQC67   0                        EOM
-  00001C02  0760FF80  2164028A...0001000C       33     650.0 MB   U GRD
-    211  00  YUQC65   0                        EOM
-  00001C02  0760FF80  21640271...0001000C       33     625.0 MB   U GRD
-    211  00  YUQC62   0                        EOM
-  00001C02  0760FF80  21640258...0001000C       33     600.0 MB   U GRD
-    211  00  YUQC60   0                        EOM
-  00001C02  0760FF80  2164023F...0001000C       33     575.0 MB   U GRD
-    211  00  YUQC57   0                        EOM
-  00001C02  0760FF80  21640226...0001000C       33     550.0 MB   U GRD
-    211  00  YUQC55   0                        EOM
-  00001C02  0760FF80  2164020D...0001000C       33     525.0 MB   U GRD
-    211  00  YUQC52   0                        EOM
-  00001C02  0760FF80  216401F4...0001000C       33     500.0 MB   U GRD         
-    211  00  YUQC50   0                        EOM
-  00001C02  0760FF80  216401C2...0001000C       33     450.0 MB   U GRD
-    211  00  YUQC45   0                        EOM
-  00001C02  0760FF80  21640190...0001000C       33     400.0 MB   U GRD         
-    211  00  YUQC40   0                        EOM
-  00001C02  0760FF80  2164015E...0001000C       33     350.0 MB   U GRD
-    211  00  YUQC35   0                        EOM
-  00001C02  0760FF80  2164012C...0001000C       33     300.0 MB   U GRD         
-    211  00  YUQC30   0                        EOM
-  00001C02  0760FF80  216400FA...0001000C       33     250.0 MB   U GRD         
-    211  00  YUQC25   0                        EOM
-  00001C02  0760FF80  216400C8...0001000C       33     200.0 MB   U GRD         
-    211  00  YUQC20   0                        EOM
-  00001C02  0760FF80  21640096...0001000C       33     150.0 MB   U GRD         
-    211  00  YUQC15   0                        EOM
-  00001C02  0760FF80  21640064...0001000C       33     100.0 MB   U GRD         
-    211  00  YUQC10   0                        EOM
-  00001C02  0760FF80  226403E8...0001000C       34    1000.0 MB   V GRD
-    211  00  YVQC99   0                        EOM
-  00001C02  0760FF80  226403CF...0001000C       34     975.0 MB   V GRD
-    211  00  YVQC93   0                        EOM
-  00001C02  0760FF80  226403B6...0001000C       34     950.0 MB   V GRD
-    211  00  YVQC95   0                        EOM
-  00001C02  0760FF80  2264039D...0001000C       34     925.0 MB   V GRD
-    211  00  YVQC92   0                        EOM
-  00001C02  0760FF80  22640384...0001000C       34     900.0 MB   V GRD
-    211  00  YVQC90   0                        EOM
-  00001C02  0760FF80  2264036B...0001000C       34     875.0 MB   V GRD
-    211  00  YVQC91   0                        EOM
-  00001C02  0760FF80  22640352...0001000C       34     850.0 MB   V GRD         
-    211  00  YVQC85   0                        EOM
-  00001C02  0760FF80  22640339...0001000C       34     825.0 MB   V GRD
-    211  00  YVQC82   0                        EOM
-  00001C02  0760FF80  22640320...0001000C       34     800.0 MB   V GRD
-    211  00  YVQC80   0                        EOM
-  00001C02  0760FF80  22640307...0001000C       34     775.0 MB   V GRD
-    211  00  YVQC77   0                        EOM
-  00001C02  0760FF80  226402EE...0001000C       34     750.0 MB   V GRD
-    211  00  YVQC75   0                        EOM
-  00001C02  0760FF80  226402D5...0001000C       34     725.0 MB   V GRD
-    211  00  YVQC72   0                        EOM
-  00001C02  0760FF80  226402BC...0001000C       34     700.0 MB   V GRD         
-    211  00  YVQC70   0                        EOM
-  00001C02  0760FF80  226402A3...0001000C       34     675.0 MB   V GRD
-    211  00  YVQC67   0                        EOM
-  00001C02  0760FF80  2264028A...0001000C       34     650.0 MB   V GRD
-    211  00  YVQC65   0                        EOM
-  00001C02  0760FF80  22640271...0001000C       34     625.0 MB   V GRD
-    211  00  YVQC62   0                        EOM
-  00001C02  0760FF80  22640258...0001000C       34     600.0 MB   V GRD
-    211  00  YVQC60   0                        EOM
-  00001C02  0760FF80  2264023F...0001000C       34     575.0 MB   V GRD
-    211  00  YVQC57   0                        EOM
-  00001C02  0760FF80  22640226...0001000C       34     550.0 MB   V GRD
-    211  00  YVQC55   0                        EOM
-  00001C02  0760FF80  2264020D...0001000C       34     525.0 MB   V GRD
-    211  00  YVQC52   0                        EOM
-  00001C02  0760FF80  226401F4...0001000C       34     500.0 MB   V GRD         
-    211  00  YVQC50   0                        EOM 
-  00001C02  0760FF80  226401C2...0001000C       34     450.0 MB   V GRD
-    211  00  YVQC45   0                        EOM
-  00001C02  0760FF80  22640190...0001000C       34     400.0 MB   V GRD         
-    211  00  YVQC40   0                        EOM
-  00001C02  0760FF80  2264015E...0001000C       34     350.0 MB   V GRD
-    211  00  YVQC35   0                        EOM
-  00001C02  0760FF80  2264012C...0001000C       34     300.0 MB   V GRD         
-    211  00  YVQC30   0                        EOM
-  00001C02  0760FF80  226400FA...0001000C       34     250.0 MB   V GRD         
-    211  00  YVQC25   0                        EOM
-  00001C02  0760FF80  226400C8...0001000C       34     200.0 MB   V GRD         
-    211  00  YVQC20   0                        EOM
-  00001C02  0760FF80  22640096...0001000C       34     150.0 MB   V GRD         
-    211  00  YVQC15   0                        EOM
-  00001C02  0760FF80  22640064...0001000C       34     100.0 MB   V GRD         
-    211  00  YVQC10   0                        EOM
-  00001C02  0760FF80  02660000...0001000C       02           MSL  PRMSL         
-    211  00  YPQC89   0                        EOM
-  00001C02  0760FF80  346403E8...0001000C       52    1000.0 MB   R H
-    211  00  YRQC99   0                        EOM
-  00001C02  0760FF80  346403CF...0001000C       52     975.0 MB   R H
-    211  00  YRQC93   0                        EOM
-  00001C02  0760FF80  346403B6...0001000C       52     950.0 MB   R H
-    211  00  YRQC95   0                        EOM
-  00001C02  0760FF80  3464039D...0001000C       52     925.0 MB   R H
-    211  00  YRQC92   0                        EOM
-  00001C02  0760FF80  34640384...0001000C       52     900.0 MB   R H
-    211  00  YRQC90   0                        EOM
-  00001C02  0760FF80  3464036B...0001000C       52     875.0 MB   R H
-    211  00  YRQC91   0                        EOM
-  00001C02  0760FF80  34640352...0001000C       52     850.0 MB   R H           
-    211  00  YRQC85   0                        EOM
-  00001C02  0760FF80  34640339...0001000C       52     825.0 MB   R H
-    211  00  YRQC82   0                        EOM
-  00001C02  0760FF80  34640320...0001000C       52     800.0 MB   R H
-    211  00  YRQC80   0                        EOM
-  00001C02  0760FF80  34640307...0001000C       52     775.0 MB   R H
-    211  00  YRQC77   0                        EOM
-  00001C02  0760FF80  346402EE...0001000C       52     750.0 MB   R H
-    211  00  YRQC75   0                        EOM
-  00001C02  0760FF80  346402D5...0001000C       52     725.0 MB   R H
-    211  00  YRQC72   0                        EOM
-  00001C02  0760FF80  346402BC...0001000C       52     700.0 MB   R H           
-    211  00  YRQC70   0                        EOM
-  00001C02  0760FF80  346402A3...0001000C       52     675.0 MB   R H
-    211  00  YRQC67   0                        EOM
-  00001C02  0760FF80  3464028A...0001000C       52     650.0 MB   R H
-    211  00  YRQC65   0                        EOM
-  00001C02  0760FF80  34640271...0001000C       52     625.0 MB   R H
-    211  00  YRQC62   0                        EOM
-  00001C02  0760FF80  34640258...0001000C       52     600.0 MB   R H
-    211  00  YRQC60   0                        EOM
-  00001C02  0760FF80  3464023F...0001000C       52     575.0 MB   R H
-    211  00  YRQC57   0                        EOM
-  00001C02  0760FF80  34640226...0001000C       52     550.0 MB   R H
-    211  00  YRQC55   0                        EOM
-  00001C02  0760FF80  3464020D...0001000C       52     525.0 MB   R H
-    211  00  YRQC52   0                        EOM
-  00001C02  0760FF80  346401F4...0001000C       52     500.0 MB   R H           
-    211  00  YRQC50   0                        EOM
-  00001C02  0760FF80  346401C2...0001000C       52     450.0 MB   R H
-    211  00  YRQC45   0                        EOM
-  00001C02  0760FF80  34640190...0001000C       52     400.0 MB   R H           
-    211  00  YRQC40   0                        EOM
-  00001C02  0760FF80  3464015E...0001000C       52     350.0 MB   R H
-    211  00  YRQC35   0                        EOM
-  00001C02  0760FF80  3464012C...0001000C       52     300.0 MB   R H           
-    211  00  YRQC30   0                        EOM
-  00001C02  0760FF80  346400FA...0001000C       52     250.0 MB   R H
-    211  00  YRQC25   0                        EOM
-  00001C02  0760FF80  346400C8...0001000C       52     200.0 MB   R H
-    211  00  YRQC20   0                        EOM
-  00001C02  0760FF80  34640096...0001000C       52     150.0 MB   R H
-    211  00  YRQC15   0                        EOM
-  00001C02  0760FF80  34640064...0001000C       52     100.0 MB   R H
-    211  00  YRQC10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001000C       11    1000.0 MB   TMP
-    211  00  YTQC99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001000C       11     975.0 MB   TMP
-    211  00  YTQC93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001000C       11     950.0 MB   TMP
-    211  00  YTQC95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001000C       11     925.0 MB   TMP
-    211  00  YTQC92   0                        EOM
-  00001C02  0760FF80  0B640384...0001000C       11     900.0 MB   TMP
-    211  00  YTQC90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001000C       11     875.0 MB   TMP
-    211  00  YTQC91   0                        EOM
-  00001C02  0760FF80  0B640352...0001000C       11     850.0 MB   TMP           
-    211  00  YTQC85   0                        EOM
-  00001C02  0760FF80  0B640339...0001000C       11     825.0 MB   TMP
-    211  00  YTQC82   0                        EOM
-  00001C02  0760FF80  0B640320...0001000C       11     800.0 MB   TMP
-    211  00  YTQC80   0                        EOM
-  00001C02  0760FF80  0B640307...0001000C       11     775.0 MB   TMP
-    211  00  YTQC77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001000C       11     750.0 MB   TMP
-    211  00  YTQC75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001000C       11     725.0 MB   TMP
-    211  00  YTQC72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001000C       11     700.0 MB   TMP           
-    211  00  YTQC70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001000C       11     675.0 MB   TMP
-    211  00  YTQC67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001000C       11     650.0 MB   TMP
-    211  00  YTQC65   0                        EOM
-  00001C02  0760FF80  0B640271...0001000C       11     625.0 MB   TMP
-    211  00  YTQC62   0                        EOM
-  00001C02  0760FF80  0B640258...0001000C       11     600.0 MB   TMP
-    211  00  YTQC60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001000C       11     575.0 MB   TMP
-    211  00  YTQC57   0                        EOM
-  00001C02  0760FF80  0B640226...0001000C       11     550.0 MB   TMP
-    211  00  YTQC55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001000C       11     525.0 MB   TMP
-    211  00  YTQC52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001000C       11     500.0 MB   TMP           
-    211  00  YTQC50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001000C       11     450.0 MB   TMP
-    211  00  YTQC45   0                        EOM
-  00001C02  0760FF80  0B640190...0001000C       11     400.0 MB   TMP           
-    211  00  YTQC40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001000C       11     350.0 MB   TMP
-    211  00  YTQC35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001000C       11     300.0 MB   TMP           
-    211  00  YTQC30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001000C       11     250.0 MB   TMP           
-    211  00  YTQC25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001000C       11     200.0 MB   TMP           
-    211  00  YTQC20   0                        EOM
-  00001C02  0760FF80  0B640096...0001000C       11     150.0 MB   TMP           
-    211  00  YTQC15   0                        EOM
-  00001C02  0760FF80  0B640064...0001000C       11     100.0 MB   TMP           
-    211  00  YTQC10   0                        EOM
-  00001C02  0760FF80  28640352...0001000C       40     850.0 MB  DZDT           
-    211  00  YOQC85   0                        EOM
-  00001C02  0760FF80  286402BC...0001000C       40     700.0 MB  DZDT           
-    211  00  YOQC70   0                        EOM
-  00001C02  0760FF80  286401F4...0001000C       40     500.0 MB  DZDT           
-    211  00  YOQC50   0                        EOM
-  00001C02  0760FF80  28640190...0001000C       40     400.0 MB  DZDT           
-    211  00  YOQC40   0                        EOM
-  00001C02  0760FF80  2864012C...0001000C       40     300.0 MB  DZDT           
-    211  00  YOQC30   0                        EOM
-  00001C02  0760FF80  286400FA...0001000C       40     250.0 MB  DZDT           
-    211  00  YOQC25   0                        EOM
-  00001C02  0760FF80  286400C8...0001000C       40     200.0 MB  DZDT           
-    211  00  YOQC20   0                        EOM
-  00001C02  0760FF80  28640096...0001000C       40     150.0 MB  DZDT           
-    211  00  YOQC15   0                        EOM
-  00001C02  0760FF80  28640064...0001000C       40     100.0 MB  DZDT           
-    211  00  YOQC10   0                        EOM
-  00001C02  0760FF80  01010000...0001000C       01          SFC  PRES           
-    211  00  YPQC98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001000C       52        44/100  R H           
-    211  00  YRQC00   0                        EOM
-  00001C02  0760FF80  36C80000...0001000C       54          EATM  P WAT         
-    211  00  YFQC00   0                        EOM
-  00001C02  0760FF80  0B690002...0001000C       11          2m/SFC TMP         
-    211  00  YTQC98   0                        EOM
-  00001C02  0760FF80  34741E00...0001000C       52     BNDRY/SPD  R H           
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  0B070000...0001000C       11            TRO TMP           
-    211  00  YTQC97   0                        EOM
-  00001C02  0760FF80  01070000...0001000C       01            TRO PRES          
-    211  00  YPQC97   0                        EOM
-  00001C02  0760FF80  21741E00...0001000C       33           SPD  U GRD         
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  22741E00...0001000C       34           SPD  V GRD         
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  21070000...0001000C       33            TRO U GRD         
-    211  00  YUQC97   0                        EOM
-  00001C02  0760FF80  22070000...0001000C       34            TRO V GRD         
-    211  00  YVQC97   0                        EOM
-  00001C02  0760FF80  88070000...0001000C      136            TRO VW SH         
-    211  00  YBQC97   0                        EOM
-  00001C02  0760FF80  3D010000...0001000C       61            SFC A PCP         
-    211  00  YEQC98   0                        EOM
-  00001C02  0760FF80  83010000...0001000C      131            SFC LFT X         
-    211  00  YXQC98   0                        EOM
-  00001C02  0760FF80  296402BC...0001000C       41    700.0 MB    ABS V         
-    211  00  YCQC70   0                        EOM
-  00001C02  0760FF80  296401F4...0001000C       41    500.0 MB    ABS V         
-    211  00  YCQC50   0                        EOM
-  00001C02  0760FF80  9D010000...0001000C      157          SFC   CAPE
-    211  00  YWQC98   0                        EOM
-  00001C02  0760FF80  9C010000...0001000C      156          SFC   CIN
-    211  00  YYQC98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001000C      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQC86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001000C      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQC86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001000C       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001000C       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001000C       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001000C       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  0B749678...0001000C       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001000C       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQC86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001000C       52   60 SPDY  30 SPDY  R H
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001000C       52   90 SPDY  60 SPDY  R H
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  3474785A...0001000C       52  120 SPDY  90 SPDY  R H
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  34749678...0001000C       52  150 SPDY 120 SPDY  R H
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  3474B496...0001000C       52  180 SPDY 150 SPDY  R H
-    211  00  YRQC86   0                        EOM
-  00001C02  0760FF80  21741E00...0001000C       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001000C       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001000C       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  2174785A...0001000C       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  21749678...0001000C       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  2174B496...0001000C       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQC86   0                        EOM
-  00001C02  0760FF80  22741E00...0001000C       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001000C       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001000C       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  2274785A...0001000C       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  22749678...0001000C       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  2274B496...0001000C       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQC86   0                        EOM
-  00001C02  0760FF80  0B690002...0001000C       11    2  HTGL     TMP
-    211  00  YTQC98   0                        EOM
-  00001C02  0760FF80  34690002...0001000C       52    2  HTGL     R H
-    211  00  YRQC98   0                        EOM
-  00001C02  0760FF80  2169000A...0001000C       33   10  HTGL     U GRD
-    211  00  YUQC98   0                        EOM
-  00001C02  0760FF80  2269000A...0001000C       34   10  HTGL     V GRD
-    211  00  YVQC98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs018.211 b/parm/wmo/grib_awpgfs018.211
deleted file mode 100755
index 98719251ad..0000000000
--- a/parm/wmo/grib_awpgfs018.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...00010012       07    1000.0 MB   HGT           
-    211  00  YHQD99   0                        EOM 
-  00001C02  0760FF80  076403CF...00010012       07     975.0 MB   HGT
-    211  00  YHQD93   0                        EOM
-  00001C02  0760FF80  076403B6...00010012       07     950.0 MB   HGT
-    211  00  YHQD95   0                        EOM
-  00001C02  0760FF80  0764039D...00010012       07     925.0 MB   HGT
-    211  00  YHQD92   0                        EOM
-  00001C02  0760FF80  07640384...00010012       07     900.0 MB   HGT
-    211  00  YHQD90   0                        EOM
-  00001C02  0760FF80  0764036B...00010012       07     875.0 MB   HGT
-    211  00  YHQD91   0                        EOM
-  00001C02  0760FF80  07640352...00010012       07     850.0 MB   HGT           
-    211  00  YHQD85   0                        EOM
-  00001C02  0760FF80  07640339...00010012       07     825.0 MB   HGT
-    211  00  YHQD82   0                        EOM
-  00001C02  0760FF80  07640320...00010012       07     800.0 MB   HGT
-    211  00  YHQD80   0                        EOM
-  00001C02  0760FF80  07640307...00010012       07     775.0 MB   HGT
-    211  00  YHQD77   0                        EOM
-  00001C02  0760FF80  076402EE...00010012       07     750.0 MB   HGT
-    211  00  YHQD75   0                        EOM
-  00001C02  0760FF80  076402D5...00010012       07     725.0 MB   HGT
-    211  00  YHQD72   0                        EOM
-  00001C02  0760FF80  076402BC...00010012       07     700.0 MB   HGT           
-    211  00  YHQD70   0                        EOM
-  00001C02  0760FF80  076402A3...00010012       07     675.0 MB   HGT
-    211  00  YHQD67   0                        EOM
-  00001C02  0760FF80  0764028A...00010012       07     650.0 MB   HGT
-    211  00  YHQD65   0                        EOM
-  00001C02  0760FF80  07640271...00010012       07     625.0 MB   HGT
-    211  00  YHQD62   0                        EOM
-  00001C02  0760FF80  07640258...00010012       07     600.0 MB   HGT
-    211  00  YHQD60   0                        EOM
-  00001C02  0760FF80  0764023F...00010012       07     575.0 MB   HGT
-    211  00  YHQD57   0                        EOM
-  00001C02  0760FF80  07640226...00010012       07     550.0 MB   HGT
-    211  00  YHQD55   0                        EOM
-  00001C02  0760FF80  0764020D...00010012       07     525.0 MB   HGT
-    211  00  YHQD52   0                        EOM
-  00001C02  0760FF80  076401F4...00010012       07     500.0 MB   HGT           
-    211  00  YHQD50   0                        EOM
-  00001C02  0760FF80  076401C2...00010012       07     450.0 MB   HGT
-    211  00  YHQD45   0                        EOM
-  00001C02  0760FF80  07640190...00010012       07     400.0 MB   HGT           
-    211  00  YHQD40   0                        EOM
-  00001C02  0760FF80  0764015E...00010012       07     350.0 MB   HGT
-    211  00  YHQD35   0                        EOM
-  00001C02  0760FF80  0764012C...00010012       07     300.0 MB   HGT           
-    211  00  YHQD30   0                        EOM
-  00001C02  0760FF80  076400FA...00010012       07     250.0 MB   HGT           
-    211  00  YHQD25   0                        EOM
-  00001C02  0760FF80  076400C8...00010012       07     200.0 MB   HGT           
-    211  00  YHQD20   0                        EOM
-  00001C02  0760FF80  07640096...00010012       07     150.0 MB   HGT           
-    211  00  YHQD15   0                        EOM
-  00001C02  0760FF80  07640064...00010012       07     100.0 MB   HGT           
-    211  00  YHQD10   0                        EOM
-  00001C02  0760FF80  216403E8...00010012       33    1000.0 MB   U GRD
-    211  00  YUQD99   0                        EOM
-  00001C02  0760FF80  216403CF...00010012       33     975.0 MB   U GRD
-    211  00  YUQD93   0                        EOM
-  00001C02  0760FF80  216403B6...00010012       33     950.0 MB   U GRD
-    211  00  YUQD95   0                        EOM
-  00001C02  0760FF80  2164039D...00010012       33     925.0 MB   U GRD
-    211  00  YUQD92   0                        EOM
-  00001C02  0760FF80  21640384...00010012       33     900.0 MB   U GRD
-    211  00  YUQD90   0                        EOM
-  00001C02  0760FF80  2164036B...00010012       33     875.0 MB   U GRD
-    211  00  YUQD91   0                        EOM
-  00001C02  0760FF80  21640352...00010012       33     850.0 MB   U GRD         
-    211  00  YUQD85   0                        EOM
-  00001C02  0760FF80  21640339...00010012       33     825.0 MB   U GRD
-    211  00  YUQD82   0                        EOM
-  00001C02  0760FF80  21640320...00010012       33     800.0 MB   U GRD
-    211  00  YUQD80   0                        EOM
-  00001C02  0760FF80  21640307...00010012       33     775.0 MB   U GRD
-    211  00  YUQD77   0                        EOM
-  00001C02  0760FF80  216402EE...00010012       33     750.0 MB   U GRD
-    211  00  YUQD75   0                        EOM
-  00001C02  0760FF80  216402D5...00010012       33     725.0 MB   U GRD
-    211  00  YUQD72   0                        EOM
-  00001C02  0760FF80  216402BC...00010012       33     700.0 MB   U GRD         
-    211  00  YUQD70   0                        EOM
-  00001C02  0760FF80  216402A3...00010012       33     675.0 MB   U GRD
-    211  00  YUQD67   0                        EOM
-  00001C02  0760FF80  2164028A...00010012       33     650.0 MB   U GRD
-    211  00  YUQD65   0                        EOM
-  00001C02  0760FF80  21640271...00010012       33     625.0 MB   U GRD
-    211  00  YUQD62   0                        EOM
-  00001C02  0760FF80  21640258...00010012       33     600.0 MB   U GRD
-    211  00  YUQD60   0                        EOM
-  00001C02  0760FF80  2164023F...00010012       33     575.0 MB   U GRD
-    211  00  YUQD57   0                        EOM
-  00001C02  0760FF80  21640226...00010012       33     550.0 MB   U GRD
-    211  00  YUQD55   0                        EOM
-  00001C02  0760FF80  2164020D...00010012       33     525.0 MB   U GRD
-    211  00  YUQD52   0                        EOM
-  00001C02  0760FF80  216401F4...00010012       33     500.0 MB   U GRD         
-    211  00  YUQD50   0                        EOM
-  00001C02  0760FF80  216401C2...00010012       33     450.0 MB   U GRD
-    211  00  YUQD45   0                        EOM
-  00001C02  0760FF80  21640190...00010012       33     400.0 MB   U GRD         
-    211  00  YUQD40   0                        EOM
-  00001C02  0760FF80  2164015E...00010012       33     350.0 MB   U GRD
-    211  00  YUQD35   0                        EOM
-  00001C02  0760FF80  2164012C...00010012       33     300.0 MB   U GRD         
-    211  00  YUQD30   0                        EOM
-  00001C02  0760FF80  216400FA...00010012       33     250.0 MB   U GRD         
-    211  00  YUQD25   0                        EOM
-  00001C02  0760FF80  216400C8...00010012       33     200.0 MB   U GRD         
-    211  00  YUQD20   0                        EOM 
-  00001C02  0760FF80  21640096...00010012       33     150.0 MB   U GRD         
-    211  00  YUQD15   0                        EOM
-  00001C02  0760FF80  21640064...00010012       33     100.0 MB   U GRD         
-    211  00  YUQD10   0                        EOM
-  00001C02  0760FF80  226403E8...00010012       34    1000.0 MB   V GRD
-    211  00  YVQD99   0                        EOM
-  00001C02  0760FF80  226403CF...00010012       34     975.0 MB   V GRD
-    211  00  YVQD93   0                        EOM
-  00001C02  0760FF80  226403B6...00010012       34     950.0 MB   V GRD
-    211  00  YVQD95   0                        EOM
-  00001C02  0760FF80  2264039D...00010012       34     925.0 MB   V GRD
-    211  00  YVQD92   0                        EOM
-  00001C02  0760FF80  22640384...00010012       34     900.0 MB   V GRD
-    211  00  YVQD90   0                        EOM
-  00001C02  0760FF80  2264036B...00010012       34     875.0 MB   V GRD
-    211  00  YVQD91   0                        EOM
-  00001C02  0760FF80  22640352...00010012       34     850.0 MB   V GRD         
-    211  00  YVQD85   0                        EOM
-  00001C02  0760FF80  22640339...00010012       34     825.0 MB   V GRD
-    211  00  YVQD82   0                        EOM
-  00001C02  0760FF80  22640320...00010012       34     800.0 MB   V GRD
-    211  00  YVQD80   0                        EOM
-  00001C02  0760FF80  22640307...00010012       34     775.0 MB   V GRD
-    211  00  YVQD77   0                        EOM
-  00001C02  0760FF80  226402EE...00010012       34     750.0 MB   V GRD
-    211  00  YVQD75   0                        EOM
-  00001C02  0760FF80  226402D5...00010012       34     725.0 MB   V GRD
-    211  00  YVQD72   0                        EOM
-  00001C02  0760FF80  226402BC...00010012       34     700.0 MB   V GRD         
-    211  00  YVQD70   0                        EOM
-  00001C02  0760FF80  226402A3...00010012       34     675.0 MB   V GRD
-    211  00  YVQD67   0                        EOM
-  00001C02  0760FF80  2264028A...00010012       34     650.0 MB   V GRD
-    211  00  YVQD65   0                        EOM
-  00001C02  0760FF80  22640271...00010012       34     625.0 MB   V GRD
-    211  00  YVQD62   0                        EOM
-  00001C02  0760FF80  22640258...00010012       34     600.0 MB   V GRD
-    211  00  YVQD60   0                        EOM
-  00001C02  0760FF80  2264023F...00010012       34     575.0 MB   V GRD
-    211  00  YVQD57   0                        EOM
-  00001C02  0760FF80  22640226...00010012       34     550.0 MB   V GRD
-    211  00  YVQD55   0                        EOM
-  00001C02  0760FF80  2264020D...00010012       34     525.0 MB   V GRD
-    211  00  YVQD52   0                        EOM
-  00001C02  0760FF80  226401F4...00010012       34     500.0 MB   V GRD         
-    211  00  YVQD50   0                        EOM
-  00001C02  0760FF80  226401C2...00010012       34     450.0 MB   V GRD
-    211  00  YVQD45   0                        EOM
-  00001C02  0760FF80  22640190...00010012       34     400.0 MB   V GRD         
-    211  00  YVQD40   0                        EOM
-  00001C02  0760FF80  2264015E...00010012       34     350.0 MB   V GRD
-    211  00  YVQD35   0                        EOM
-  00001C02  0760FF80  2264012C...00010012       34     300.0 MB   V GRD         
-    211  00  YVQD30   0                        EOM
-  00001C02  0760FF80  226400FA...00010012       34     250.0 MB   V GRD         
-    211  00  YVQD25   0                        EOM
-  00001C02  0760FF80  226400C8...00010012       34     200.0 MB   V GRD         
-    211  00  YVQD20   0                        EOM
-  00001C02  0760FF80  22640096...00010012       34     150.0 MB   V GRD         
-    211  00  YVQD15   0                        EOM
-  00001C02  0760FF80  22640064...00010012       34     100.0 MB   V GRD         
-    211  00  YVQD10   0                        EOM
-  00001C02  0760FF80  02660000...00010012       02           MSL  PRMSL         
-    211  00  YPQD89   0                        EOM
-  00001C02  0760FF80  346403E8...00010012       52    1000.0 MB   R H
-    211  00  YRQD99   0                        EOM
-  00001C02  0760FF80  346403CF...00010012       52     975.0 MB   R H
-    211  00  YRQD93   0                        EOM
-  00001C02  0760FF80  346403B6...00010012       52     950.0 MB   R H
-    211  00  YRQD95   0                        EOM
-  00001C02  0760FF80  3464039D...00010012       52     925.0 MB   R H
-    211  00  YRQD92   0                        EOM
-  00001C02  0760FF80  34640384...00010012       52     900.0 MB   R H
-    211  00  YRQD90   0                        EOM
-  00001C02  0760FF80  3464036B...00010012       52     875.0 MB   R H
-    211  00  YRQD91   0                        EOM
-  00001C02  0760FF80  34640352...00010012       52     850.0 MB   R H           
-    211  00  YRQD85   0                        EOM
-  00001C02  0760FF80  34640339...00010012       52     825.0 MB   R H
-    211  00  YRQD82   0                        EOM
-  00001C02  0760FF80  34640320...00010012       52     800.0 MB   R H
-    211  00  YRQD80   0                        EOM
-  00001C02  0760FF80  34640307...00010012       52     775.0 MB   R H
-    211  00  YRQD77   0                        EOM
-  00001C02  0760FF80  346402EE...00010012       52     750.0 MB   R H
-    211  00  YRQD75   0                        EOM
-  00001C02  0760FF80  346402D5...00010012       52     725.0 MB   R H
-    211  00  YRQD72   0                        EOM
-  00001C02  0760FF80  346402BC...00010012       52     700.0 MB   R H           
-    211  00  YRQD70   0                        EOM
-  00001C02  0760FF80  346402A3...00010012       52     675.0 MB   R H
-    211  00  YRQD67   0                        EOM
-  00001C02  0760FF80  3464028A...00010012       52     650.0 MB   R H
-    211  00  YRQD65   0                        EOM
-  00001C02  0760FF80  34640271...00010012       52     625.0 MB   R H
-    211  00  YRQD62   0                        EOM
-  00001C02  0760FF80  34640258...00010012       52     600.0 MB   R H
-    211  00  YRQD60   0                        EOM
-  00001C02  0760FF80  3464023F...00010012       52     575.0 MB   R H
-    211  00  YRQD57   0                        EOM
-  00001C02  0760FF80  34640226...00010012       52     550.0 MB   R H
-    211  00  YRQD55   0                        EOM
-  00001C02  0760FF80  3464020D...00010012       52     525.0 MB   R H
-    211  00  YRQD52   0                        EOM
-  00001C02  0760FF80  346401F4...00010012       52     500.0 MB   R H           
-    211  00  YRQD50   0                        EOM 
-  00001C02  0760FF80  346401C2...00010012       52     450.0 MB   R H
-    211  00  YRQD45   0                        EOM
-  00001C02  0760FF80  34640190...00010012       52     400.0 MB   R H           
-    211  00  YRQD40   0                        EOM
-  00001C02  0760FF80  3464015E...00010012       52     350.0 MB   R H
-    211  00  YRQD35   0                        EOM
-  00001C02  0760FF80  3464012C...00010012       52     300.0 MB   R H           
-    211  00  YRQD30   0                        EOM
-  00001C02  0760FF80  346400FA...00010012       52     250.0 MB   R H
-    211  00  YRQD25   0                        EOM
-  00001C02  0760FF80  346400C8...00010012       52     200.0 MB   R H
-    211  00  YRQD20   0                        EOM
-  00001C02  0760FF80  34640096...00010012       52     150.0 MB   R H
-    211  00  YRQD15   0                        EOM
-  00001C02  0760FF80  34640064...00010012       52     100.0 MB   R H
-    211  00  YRQD10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010012       11    1000.0 MB   TMP
-    211  00  YTQD99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010012       11     975.0 MB   TMP
-    211  00  YTQD93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010012       11     950.0 MB   TMP
-    211  00  YTQD95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010012       11     925.0 MB   TMP
-    211  00  YTQD92   0                        EOM
-  00001C02  0760FF80  0B640384...00010012       11     900.0 MB   TMP
-    211  00  YTQD90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010012       11     875.0 MB   TMP
-    211  00  YTQD91   0                        EOM
-  00001C02  0760FF80  0B640352...00010012       11     850.0 MB   TMP           
-    211  00  YTQD85   0                        EOM
-  00001C02  0760FF80  0B640339...00010012       11     825.0 MB   TMP
-    211  00  YTQD82   0                        EOM
-  00001C02  0760FF80  0B640320...00010012       11     800.0 MB   TMP
-    211  00  YTQD80   0                        EOM
-  00001C02  0760FF80  0B640307...00010012       11     775.0 MB   TMP
-    211  00  YTQD77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010012       11     750.0 MB   TMP
-    211  00  YTQD75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010012       11     725.0 MB   TMP
-    211  00  YTQD72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010012       11     700.0 MB   TMP           
-    211  00  YTQD70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010012       11     675.0 MB   TMP
-    211  00  YTQD67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010012       11     650.0 MB   TMP
-    211  00  YTQD65   0                        EOM
-  00001C02  0760FF80  0B640271...00010012       11     625.0 MB   TMP
-    211  00  YTQD62   0                        EOM
-  00001C02  0760FF80  0B640258...00010012       11     600.0 MB   TMP
-    211  00  YTQD60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010012       11     575.0 MB   TMP
-    211  00  YTQD57   0                        EOM
-  00001C02  0760FF80  0B640226...00010012       11     550.0 MB   TMP
-    211  00  YTQD55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010012       11     525.0 MB   TMP
-    211  00  YTQD52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010012       11     500.0 MB   TMP           
-    211  00  YTQD50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010012       11     450.0 MB   TMP
-    211  00  YTQD45   0                        EOM
-  00001C02  0760FF80  0B640190...00010012       11     400.0 MB   TMP           
-    211  00  YTQD40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010012       11     350.0 MB   TMP
-    211  00  YTQD35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010012       11     300.0 MB   TMP           
-    211  00  YTQD30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010012       11     250.0 MB   TMP           
-    211  00  YTQD25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010012       11     200.0 MB   TMP           
-    211  00  YTQD20   0                        EOM
-  00001C02  0760FF80  0B640096...00010012       11     150.0 MB   TMP           
-    211  00  YTQD15   0                        EOM
-  00001C02  0760FF80  0B640064...00010012       11     100.0 MB   TMP           
-    211  00  YTQD10   0                        EOM
-  00001C02  0760FF80  28640352...00010012       40     850.0 MB  DZDT           
-    211  00  YOQD85   0                        EOM
-  00001C02  0760FF80  286402BC...00010012       40     700.0 MB  DZDT           
-    211  00  YOQD70   0                        EOM
-  00001C02  0760FF80  286401F4...00010012       40     500.0 MB  DZDT           
-    211  00  YOQD50   0                        EOM
-  00001C02  0760FF80  28640190...00010012       40     400.0 MB  DZDT           
-    211  00  YOQD40   0                        EOM
-  00001C02  0760FF80  2864012C...00010012       40     300.0 MB  DZDT           
-    211  00  YOQD30   0                        EOM
-  00001C02  0760FF80  286400FA...00010012       40     250.0 MB  DZDT           
-    211  00  YOQD25   0                        EOM
-  00001C02  0760FF80  286400C8...00010012       40     200.0 MB  DZDT           
-    211  00  YOQD20   0                        EOM
-  00001C02  0760FF80  28640096...00010012       40     150.0 MB  DZDT           
-    211  00  YOQD15   0                        EOM
-  00001C02  0760FF80  28640064...00010012       40     100.0 MB  DZDT           
-    211  00  YOQD10   0                        EOM
-  00001C02  0760FF80  01010000...00010012       01          SFC  PRES           
-    211  00  YPQD98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010012       52        44/100  R H           
-    211  00  YRQD00   0                        EOM
-  00001C02  0760FF80  36C80000...00010012       54          EATM  P WAT         
-    211  00  YFQD00   0                        EOM
-  00001C02  0760FF80  0B690002...00010012       11          2m/SFC TMP         
-    211  00  YTQD98   0                        EOM
-  00001C02  0760FF80  34741E00...00010012       52     BNDRY/SPD  R H           
-    211  00  YRQD86   0                        EOM 
-  00001C02  0760FF80  0B070000...00010012       11            TRO TMP           
-    211  00  YTQD97   0                        EOM
-  00001C02  0760FF80  01070000...00010012       01            TRO PRES          
-    211  00  YPQD97   0                        EOM
-  00001C02  0760FF80  21741E00...00010012       33           SPD  U GRD         
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  22741E00...00010012       34           SPD  V GRD         
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  21070000...00010012       33            TRO U GRD         
-    211  00  YUQD97   0                        EOM
-  00001C02  0760FF80  22070000...00010012       34            TRO V GRD         
-    211  00  YVQD97   0                        EOM
-  00001C02  0760FF80  88070000...00010012      136            TRO VW SH         
-    211  00  YBQD97   0                        EOM
-  00001C02  0760FF80  3D010000...00010012       61            SFC A PCP         
-    211  00  YEQD98   0                        EOM
-  00001C02  0760FF80  83010000...00010012      131            SFC LFT X         
-    211  00  YXQD98   0                        EOM
-  00001C02  0760FF80  296402BC...00010012       41    700.0 MB    ABS V         
-    211  00  YCQD70   0                        EOM
-  00001C02  0760FF80  296401F4...00010012       41    500.0 MB    ABS V         
-    211  00  YCQD50   0                        EOM
-  00001C02  0760FF80  9D010000...00010012      157          SFC   CAPE
-    211  00  YWQD98   0                        EOM
-  00001C02  0760FF80  9C010000...00010012      156          SFC   CIN
-    211  00  YYQD98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010012      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQD86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010012      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQD86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010012       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010012       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010012       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010012       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  0B749678...00010012       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010012       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQD86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010012       52   60 SPDY  30 SPDY  R H
-    211  00  YRQD86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010012       52   90 SPDY  60 SPDY  R H
-    211  00  YRQD86   0                        EOM
-  00001C02  0760FF80  3474785A...00010012       52  120 SPDY  90 SPDY  R H
-    211  00  YRQD86   0                        EOM
-  00001C02  0760FF80  34749678...00010012       52  150 SPDY 120 SPDY  R H
-    211  00  YRQD86   0                        EOM
-  00001C02  0760FF80  3474B496...00010012       52  180 SPDY 150 SPDY  R H
-    211  00  YRQD86   0                        EOM
-  00001C02  0760FF80  21741E00...00010012       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010012       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010012       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  2174785A...00010012       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  21749678...00010012       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  2174B496...00010012       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQD86   0                        EOM
-  00001C02  0760FF80  22741E00...00010012       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010012       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010012       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  2274785A...00010012       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  22749678...00010012       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  2274B496...00010012       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQD86   0                        EOM
-  00001C02  0760FF80  0B690002...00010012       11    2  HTGL     TMP
-    211  00  YTQD98   0                        EOM
-  00001C02  0760FF80  34690002...00010012       52    2  HTGL     R H
-    211  00  YRQD98   0                        EOM
-  00001C02  0760FF80  2169000A...00010012       33   10  HTGL     U GRD
-    211  00  YUQD98   0                        EOM
-  00001C02  0760FF80  2269000A...00010012       34   10  HTGL     V GRD
-    211  00  YVQD98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs024.211 b/parm/wmo/grib_awpgfs024.211
deleted file mode 100755
index 151fbbd679..0000000000
--- a/parm/wmo/grib_awpgfs024.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...00010018       07    1000.0 MB   HGT           
-    211  00  YHQE99   0                        EOM
-  00001C02  0760FF80  076403CF...00010018       07     975.0 MB   HGT
-    211  00  YHQE93   0                        EOM
-  00001C02  0760FF80  076403B6...00010018       07     950.0 MB   HGT
-    211  00  YHQE95   0                        EOM
-  00001C02  0760FF80  0764039D...00010018       07     925.0 MB   HGT
-    211  00  YHQE92   0                        EOM
-  00001C02  0760FF80  07640384...00010018       07     900.0 MB   HGT
-    211  00  YHQE90   0                        EOM
-  00001C02  0760FF80  0764036B...00010018       07     875.0 MB   HGT
-    211  00  YHQE91   0                        EOM
-  00001C02  0760FF80  07640352...00010018       07     850.0 MB   HGT           
-    211  00  YHQE85   0                        EOM
-  00001C02  0760FF80  07640339...00010018       07     825.0 MB   HGT
-    211  00  YHQE82   0                        EOM
-  00001C02  0760FF80  07640320...00010018       07     800.0 MB   HGT
-    211  00  YHQE80   0                        EOM
-  00001C02  0760FF80  07640307...00010018       07     775.0 MB   HGT
-    211  00  YHQE77   0                        EOM
-  00001C02  0760FF80  076402EE...00010018       07     750.0 MB   HGT
-    211  00  YHQE75   0                        EOM
-  00001C02  0760FF80  076402D5...00010018       07     725.0 MB   HGT
-    211  00  YHQE72   0                        EOM
-  00001C02  0760FF80  076402BC...00010018       07     700.0 MB   HGT           
-    211  00  YHQE70   0                        EOM
-  00001C02  0760FF80  076402A3...00010018       07     675.0 MB   HGT
-    211  00  YHQE67   0                        EOM
-  00001C02  0760FF80  0764028A...00010018       07     650.0 MB   HGT
-    211  00  YHQE65   0                        EOM
-  00001C02  0760FF80  07640271...00010018       07     625.0 MB   HGT
-    211  00  YHQE62   0                        EOM
-  00001C02  0760FF80  07640258...00010018       07     600.0 MB   HGT
-    211  00  YHQE60   0                        EOM
-  00001C02  0760FF80  0764023F...00010018       07     575.0 MB   HGT
-    211  00  YHQE57   0                        EOM
-  00001C02  0760FF80  07640226...00010018       07     550.0 MB   HGT
-    211  00  YHQE55   0                        EOM
-  00001C02  0760FF80  0764020D...00010018       07     525.0 MB   HGT
-    211  00  YHQE52   0                        EOM
-  00001C02  0760FF80  076401F4...00010018       07     500.0 MB   HGT           
-    211  00  YHQE50   0                        EOM
-  00001C02  0760FF80  076401C2...00010018       07     450.0 MB   HGT
-    211  00  YHQE45   0                        EOM
-  00001C02  0760FF80  07640190...00010018       07     400.0 MB   HGT           
-    211  00  YHQE40   0                        EOM
-  00001C02  0760FF80  0764015E...00010018       07     350.0 MB   HGT
-    211  00  YHQE35   0                        EOM
-  00001C02  0760FF80  0764012C...00010018       07     300.0 MB   HGT           
-    211  00  YHQE30   0                        EOM
-  00001C02  0760FF80  076400FA...00010018       07     250.0 MB   HGT           
-    211  00  YHQE25   0                        EOM
-  00001C02  0760FF80  076400C8...00010018       07     200.0 MB   HGT           
-    211  00  YHQE20   0                        EOM
-  00001C02  0760FF80  07640096...00010018       07     150.0 MB   HGT           
-    211  00  YHQE15   0                        EOM
-  00001C02  0760FF80  07640064...00010018       07     100.0 MB   HGT           
-    211  00  YHQE10   0                        EOM
-  00001C02  0760FF80  216403E8...00010018       33    1000.0 MB   U GRD
-    211  00  YUQE99   0                        EOM
-  00001C02  0760FF80  216403CF...00010018       33     975.0 MB   U GRD
-    211  00  YUQE93   0                        EOM
-  00001C02  0760FF80  216403B6...00010018       33     950.0 MB   U GRD
-    211  00  YUQE95   0                        EOM
-  00001C02  0760FF80  2164039D...00010018       33     925.0 MB   U GRD
-    211  00  YUQE92   0                        EOM
-  00001C02  0760FF80  21640384...00010018       33     900.0 MB   U GRD
-    211  00  YUQE90   0                        EOM
-  00001C02  0760FF80  2164036B...00010018       33     875.0 MB   U GRD
-    211  00  YUQE91   0                        EOM
-  00001C02  0760FF80  21640352...00010018       33     850.0 MB   U GRD         
-    211  00  YUQE85   0                        EOM
-  00001C02  0760FF80  21640339...00010018       33     825.0 MB   U GRD
-    211  00  YUQE82   0                        EOM
-  00001C02  0760FF80  21640320...00010018       33     800.0 MB   U GRD
-    211  00  YUQE80   0                        EOM
-  00001C02  0760FF80  21640307...00010018       33     775.0 MB   U GRD
-    211  00  YUQE77   0                        EOM
-  00001C02  0760FF80  216402EE...00010018       33     750.0 MB   U GRD
-    211  00  YUQE75   0                        EOM
-  00001C02  0760FF80  216402D5...00010018       33     725.0 MB   U GRD
-    211  00  YUQE72   0                        EOM
-  00001C02  0760FF80  216402BC...00010018       33     700.0 MB   U GRD         
-    211  00  YUQE70   0                        EOM
-  00001C02  0760FF80  216402A3...00010018       33     675.0 MB   U GRD
-    211  00  YUQE67   0                        EOM
-  00001C02  0760FF80  2164028A...00010018       33     650.0 MB   U GRD
-    211  00  YUQE65   0                        EOM
-  00001C02  0760FF80  21640271...00010018       33     625.0 MB   U GRD
-    211  00  YUQE62   0                        EOM
-  00001C02  0760FF80  21640258...00010018       33     600.0 MB   U GRD
-    211  00  YUQE60   0                        EOM
-  00001C02  0760FF80  2164023F...00010018       33     575.0 MB   U GRD
-    211  00  YUQE57   0                        EOM
-  00001C02  0760FF80  21640226...00010018       33     550.0 MB   U GRD
-    211  00  YUQE55   0                        EOM
-  00001C02  0760FF80  2164020D...00010018       33     525.0 MB   U GRD
-    211  00  YUQE52   0                        EOM
-  00001C02  0760FF80  216401F4...00010018       33     500.0 MB   U GRD         
-    211  00  YUQE50   0                        EOM
-  00001C02  0760FF80  216401C2...00010018       33     450.0 MB   U GRD
-    211  00  YUQE45   0                        EOM
-  00001C02  0760FF80  21640190...00010018       33     400.0 MB   U GRD         
-    211  00  YUQE40   0                        EOM
-  00001C02  0760FF80  2164015E...00010018       33     350.0 MB   U GRD
-    211  00  YUQE35   0                        EOM
-  00001C02  0760FF80  2164012C...00010018       33     300.0 MB   U GRD         
-    211  00  YUQE30   0                        EOM
-  00001C02  0760FF80  216400FA...00010018       33     250.0 MB   U GRD         
-    211  00  YUQE25   0                        EOM
-  00001C02  0760FF80  216400C8...00010018       33     200.0 MB   U GRD         
-    211  00  YUQE20   0                        EOM
-  00001C02  0760FF80  21640096...00010018       33     150.0 MB   U GRD         
-    211  00  YUQE15   0                        EOM
-  00001C02  0760FF80  21640064...00010018       33     100.0 MB   U GRD         
-    211  00  YUQE10   0                        EOM
-  00001C02  0760FF80  226403E8...00010018       34    1000.0 MB   V GRD
-    211  00  YVQE99   0                        EOM
-  00001C02  0760FF80  226403CF...00010018       34     975.0 MB   V GRD
-    211  00  YVQE93   0                        EOM
-  00001C02  0760FF80  226403B6...00010018       34     950.0 MB   V GRD
-    211  00  YVQE95   0                        EOM
-  00001C02  0760FF80  2264039D...00010018       34     925.0 MB   V GRD
-    211  00  YVQE92   0                        EOM
-  00001C02  0760FF80  22640384...00010018       34     900.0 MB   V GRD
-    211  00  YVQE90   0                        EOM
-  00001C02  0760FF80  2264036B...00010018       34     875.0 MB   V GRD
-    211  00  YVQE91   0                        EOM
-  00001C02  0760FF80  22640352...00010018       34     850.0 MB   V GRD         
-    211  00  YVQE85   0                        EOM
-  00001C02  0760FF80  22640339...00010018       34     825.0 MB   V GRD
-    211  00  YVQE82   0                        EOM
-  00001C02  0760FF80  22640320...00010018       34     800.0 MB   V GRD
-    211  00  YVQE80   0                        EOM
-  00001C02  0760FF80  22640307...00010018       34     775.0 MB   V GRD
-    211  00  YVQE77   0                        EOM
-  00001C02  0760FF80  226402EE...00010018       34     750.0 MB   V GRD
-    211  00  YVQE75   0                        EOM
-  00001C02  0760FF80  226402D5...00010018       34     725.0 MB   V GRD
-    211  00  YVQE72   0                        EOM
-  00001C02  0760FF80  226402BC...00010018       34     700.0 MB   V GRD         
-    211  00  YVQE70   0                        EOM
-  00001C02  0760FF80  226402A3...00010018       34     675.0 MB   V GRD
-    211  00  YVQE67   0                        EOM
-  00001C02  0760FF80  2264028A...00010018       34     650.0 MB   V GRD
-    211  00  YVQE65   0                        EOM
-  00001C02  0760FF80  22640271...00010018       34     625.0 MB   V GRD
-    211  00  YVQE62   0                        EOM
-  00001C02  0760FF80  22640258...00010018       34     600.0 MB   V GRD
-    211  00  YVQE60   0                        EOM
-  00001C02  0760FF80  2264023F...00010018       34     575.0 MB   V GRD
-    211  00  YVQE57   0                        EOM
-  00001C02  0760FF80  22640226...00010018       34     550.0 MB   V GRD
-    211  00  YVQE55   0                        EOM
-  00001C02  0760FF80  2264020D...00010018       34     525.0 MB   V GRD
-    211  00  YVQE52   0                        EOM
-  00001C02  0760FF80  226401F4...00010018       34     500.0 MB   V GRD         
-    211  00  YVQE50   0                        EOM
-  00001C02  0760FF80  226401C2...00010018       34     450.0 MB   V GRD
-    211  00  YVQE45   0                        EOM
-  00001C02  0760FF80  22640190...00010018       34     400.0 MB   V GRD         
-    211  00  YVQE40   0                        EOM
-  00001C02  0760FF80  2264015E...00010018       34     350.0 MB   V GRD
-    211  00  YVQE35   0                        EOM
-  00001C02  0760FF80  2264012C...00010018       34     300.0 MB   V GRD         
-    211  00  YVQE30   0                        EOM
-  00001C02  0760FF80  226400FA...00010018       34     250.0 MB   V GRD         
-    211  00  YVQE25   0                        EOM
-  00001C02  0760FF80  226400C8...00010018       34     200.0 MB   V GRD         
-    211  00  YVQE20   0                        EOM
-  00001C02  0760FF80  22640096...00010018       34     150.0 MB   V GRD         
-    211  00  YVQE15   0                        EOM
-  00001C02  0760FF80  22640064...00010018       34     100.0 MB   V GRD         
-    211  00  YVQE10   0                        EOM
-  00001C02  0760FF80  02660000...00010018       02           MSL  PRMSL         
-    211  00  YPQE89   0                        EOM
-  00001C02  0760FF80  346403E8...00010018       52    1000.0 MB   R H
-    211  00  YRQE99   0                        EOM
-  00001C02  0760FF80  346403CF...00010018       52     975.0 MB   R H
-    211  00  YRQE93   0                        EOM
-  00001C02  0760FF80  346403B6...00010018       52     950.0 MB   R H
-    211  00  YRQE95   0                        EOM
-  00001C02  0760FF80  3464039D...00010018       52     925.0 MB   R H
-    211  00  YRQE92   0                        EOM
-  00001C02  0760FF80  34640384...00010018       52     900.0 MB   R H
-    211  00  YRQE90   0                        EOM
-  00001C02  0760FF80  3464036B...00010018       52     875.0 MB   R H
-    211  00  YRQE91   0                        EOM
-  00001C02  0760FF80  34640352...00010018       52     850.0 MB   R H           
-    211  00  YRQE85   0                        EOM
-  00001C02  0760FF80  34640339...00010018       52     825.0 MB   R H
-    211  00  YRQE82   0                        EOM
-  00001C02  0760FF80  34640320...00010018       52     800.0 MB   R H
-    211  00  YRQE80   0                        EOM
-  00001C02  0760FF80  34640307...00010018       52     775.0 MB   R H
-    211  00  YRQE77   0                        EOM
-  00001C02  0760FF80  346402EE...00010018       52     750.0 MB   R H
-    211  00  YRQE75   0                        EOM
-  00001C02  0760FF80  346402D5...00010018       52     725.0 MB   R H
-    211  00  YRQE72   0                        EOM
-  00001C02  0760FF80  346402BC...00010018       52     700.0 MB   R H           
-    211  00  YRQE70   0                        EOM
-  00001C02  0760FF80  346402A3...00010018       52     675.0 MB   R H
-    211  00  YRQE67   0                        EOM
-  00001C02  0760FF80  3464028A...00010018       52     650.0 MB   R H
-    211  00  YRQE65   0                        EOM
-  00001C02  0760FF80  34640271...00010018       52     625.0 MB   R H
-    211  00  YRQE62   0                        EOM
-  00001C02  0760FF80  34640258...00010018       52     600.0 MB   R H
-    211  00  YRQE60   0                        EOM
-  00001C02  0760FF80  3464023F...00010018       52     575.0 MB   R H
-    211  00  YRQE57   0                        EOM
-  00001C02  0760FF80  34640226...00010018       52     550.0 MB   R H
-    211  00  YRQE55   0                        EOM
-  00001C02  0760FF80  3464020D...00010018       52     525.0 MB   R H
-    211  00  YRQE52   0                        EOM
-  00001C02  0760FF80  346401F4...00010018       52     500.0 MB   R H           
-    211  00  YRQE50   0                        EOM
-  00001C02  0760FF80  346401C2...00010018       52     450.0 MB   R H
-    211  00  YRQE45   0                        EOM
-  00001C02  0760FF80  34640190...00010018       52     400.0 MB   R H           
-    211  00  YRQE40   0                        EOM
-  00001C02  0760FF80  3464015E...00010018       52     350.0 MB   R H
-    211  00  YRQE35   0                        EOM
-  00001C02  0760FF80  3464012C...00010018       52     300.0 MB   R H           
-    211  00  YRQE30   0                        EOM
-  00001C02  0760FF80  346400FA...00010018       52     250.0 MB   R H
-    211  00  YRQE25   0                        EOM
-  00001C02  0760FF80  346400C8...00010018       52     200.0 MB   R H
-    211  00  YRQE20   0                        EOM
-  00001C02  0760FF80  34640096...00010018       52     150.0 MB   R H
-    211  00  YRQE15   0                        EOM
-  00001C02  0760FF80  34640064...00010018       52     100.0 MB   R H
-    211  00  YRQE10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010018       11    1000.0 MB   TMP
-    211  00  YTQE99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010018       11     975.0 MB   TMP
-    211  00  YTQE93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010018       11     950.0 MB   TMP
-    211  00  YTQE95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010018       11     925.0 MB   TMP
-    211  00  YTQE92   0                        EOM
-  00001C02  0760FF80  0B640384...00010018       11     900.0 MB   TMP
-    211  00  YTQE90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010018       11     875.0 MB   TMP
-    211  00  YTQE91   0                        EOM
-  00001C02  0760FF80  0B640352...00010018       11     850.0 MB   TMP           
-    211  00  YTQE85   0                        EOM
-  00001C02  0760FF80  0B640339...00010018       11     825.0 MB   TMP
-    211  00  YTQE82   0                        EOM
-  00001C02  0760FF80  0B640320...00010018       11     800.0 MB   TMP
-    211  00  YTQE80   0                        EOM
-  00001C02  0760FF80  0B640307...00010018       11     775.0 MB   TMP
-    211  00  YTQE77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010018       11     750.0 MB   TMP
-    211  00  YTQE75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010018       11     725.0 MB   TMP
-    211  00  YTQE72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010018       11     700.0 MB   TMP           
-    211  00  YTQE70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010018       11     675.0 MB   TMP
-    211  00  YTQE67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010018       11     650.0 MB   TMP
-    211  00  YTQE65   0                        EOM
-  00001C02  0760FF80  0B640271...00010018       11     625.0 MB   TMP
-    211  00  YTQE62   0                        EOM
-  00001C02  0760FF80  0B640258...00010018       11     600.0 MB   TMP
-    211  00  YTQE60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010018       11     575.0 MB   TMP
-    211  00  YTQE57   0                        EOM
-  00001C02  0760FF80  0B640226...00010018       11     550.0 MB   TMP
-    211  00  YTQE55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010018       11     525.0 MB   TMP
-    211  00  YTQE52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010018       11     500.0 MB   TMP           
-    211  00  YTQE50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010018       11     450.0 MB   TMP
-    211  00  YTQE45   0                        EOM
-  00001C02  0760FF80  0B640190...00010018       11     400.0 MB   TMP           
-    211  00  YTQE40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010018       11     350.0 MB   TMP
-    211  00  YTQE35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010018       11     300.0 MB   TMP           
-    211  00  YTQE30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010018       11     250.0 MB   TMP           
-    211  00  YTQE25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010018       11     200.0 MB   TMP           
-    211  00  YTQE20   0                        EOM
-  00001C02  0760FF80  0B640096...00010018       11     150.0 MB   TMP           
-    211  00  YTQE15   0                        EOM
-  00001C02  0760FF80  0B640064...00010018       11     100.0 MB   TMP           
-    211  00  YTQE10   0                        EOM
-  00001C02  0760FF80  28640352...00010018       40     850.0 MB  DZDT           
-    211  00  YOQE85   0                        EOM
-  00001C02  0760FF80  286402BC...00010018       40     700.0 MB  DZDT           
-    211  00  YOQE70   0                        EOM
-  00001C02  0760FF80  286401F4...00010018       40     500.0 MB  DZDT           
-    211  00  YOQE50   0                        EOM
-  00001C02  0760FF80  28640190...00010018       40     400.0 MB  DZDT           
-    211  00  YOQE40   0                        EOM
-  00001C02  0760FF80  2864012C...00010018       40     300.0 MB  DZDT           
-    211  00  YOQE30   0                        EOM
-  00001C02  0760FF80  286400FA...00010018       40     250.0 MB  DZDT           
-    211  00  YOQE25   0                        EOM
-  00001C02  0760FF80  286400C8...00010018       40     200.0 MB  DZDT           
-    211  00  YOQE20   0                        EOM
-  00001C02  0760FF80  28640096...00010018       40     150.0 MB  DZDT           
-    211  00  YOQE15   0                        EOM
-  00001C02  0760FF80  28640064...00010018       40     100.0 MB  DZDT           
-    211  00  YOQE10   0                        EOM
-  00001C02  0760FF80  01010000...00010018       01          SFC  PRES           
-    211  00  YPQE98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010018       52        44/100  R H           
-    211  00  YRQE00   0                        EOM
-  00001C02  0760FF80  36C80000...00010018       54          EATM  P WAT         
-    211  00  YFQE00   0                        EOM
-  00001C02  0760FF80  0B690002...00010018       11          2m/SFC TMP         
-    211  00  YTQE98   0                        EOM
-  00001C02  0760FF80  34741E00...00010018       52    BNDRY/SPD   R H  
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  0B070000...00010018       11            TRO TMP           
-    211  00  YTQE97   0                        EOM
-  00001C02  0760FF80  01070000...00010018       01            TRO PRES          
-    211  00  YPQE97   0                        EOM
-  00001C02  0760FF80  21741E00...00010018       33           SPD  U GRD         
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  22741E00...00010018       34           SPD  V GRD         
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  21070000...00010018       33            TRO U GRD         
-    211  00  YUQE97   0                        EOM
-  00001C02  0760FF80  22070000...00010018       34            TRO V GRD         
-    211  00  YVQE97   0                        EOM
-  00001C02  0760FF80  88070000...00010018      136            TRO VW SH         
-    211  00  YBQE97   0                        EOM
-  00001C02  0760FF80  3D010000...00010018       61            SFC A PCP         
-    211  00  YEQE98   0                        EOM
-  00001C02  0760FF80  83010000...00010018      131            SFC LFT X         
-    211  00  YXQE98   0                        EOM
-  00001C02  0760FF80  296402BC...00010018       41    700.0 MB    ABS V         
-    211  00  YCQE70   0                        EOM
-  00001C02  0760FF80  296401F4...00010018       41    500.0 MB    ABS V         
-    211  00  YCQE50   0                        EOM
-  00001C02  0760FF80  9D010000...00010018      157          SFC   CAPE
-    211  00  YWQE98   0                        EOM
-  00001C02  0760FF80  9C010000...00010018      156          SFC   CIN
-    211  00  YYQE98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010018      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQE86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010018      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQE86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010018       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010018       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010018       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010018       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  0B749678...00010018       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010018       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQE86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010018       52   60 SPDY  30 SPDY  R H
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010018       52   90 SPDY  60 SPDY  R H
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  3474785A...00010018       52  120 SPDY  90 SPDY  R H
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  34749678...00010018       52  150 SPDY 120 SPDY  R H
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  3474B496...00010018       52  180 SPDY 150 SPDY  R H
-    211  00  YRQE86   0                        EOM
-  00001C02  0760FF80  21741E00...00010018       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010018       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010018       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  2174785A...00010018       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  21749678...00010018       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  2174B496...00010018       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQE86   0                        EOM
-  00001C02  0760FF80  22741E00...00010018       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010018       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010018       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  2274785A...00010018       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  22749678...00010018       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  2274B496...00010018       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQE86   0                        EOM
-  00001C02  0760FF80  0B690002...00010018       11    2  HTGL     TMP
-    211  00  YTQE98   0                        EOM
-  00001C02  0760FF80  34690002...00010018       52    2  HTGL     R H
-    211  00  YRQE98   0                        EOM
-  00001C02  0760FF80  2169000A...00010018       33   10  HTGL     U GRD
-    211  00  YUQE98   0                        EOM
-  00001C02  0760FF80  2269000A...00010018       34   10  HTGL     V GRD
-    211  00  YVQE98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs030.211 b/parm/wmo/grib_awpgfs030.211
deleted file mode 100755
index 55fd7b382a..0000000000
--- a/parm/wmo/grib_awpgfs030.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...0001001E       07    1000.0 MB   HGT           
-    211  00  YHQF99   0                        EOM
-  00001C02  0760FF80  076403CF...0001001E       07     975.0 MB   HGT
-    211  00  YHQF93   0                        EOM
-  00001C02  0760FF80  076403B6...0001001E       07     950.0 MB   HGT
-    211  00  YHQF95   0                        EOM
-  00001C02  0760FF80  0764039D...0001001E       07     925.0 MB   HGT
-    211  00  YHQF92   0                        EOM
-  00001C02  0760FF80  07640384...0001001E       07     900.0 MB   HGT
-    211  00  YHQF90   0                        EOM
-  00001C02  0760FF80  0764036B...0001001E       07     875.0 MB   HGT
-    211  00  YHQF91   0                        EOM
-  00001C02  0760FF80  07640352...0001001E       07     850.0 MB   HGT           
-    211  00  YHQF85   0                        EOM
-  00001C02  0760FF80  07640339...0001001E       07     825.0 MB   HGT
-    211  00  YHQF82   0                        EOM
-  00001C02  0760FF80  07640320...0001001E       07     800.0 MB   HGT
-    211  00  YHQF80   0                        EOM
-  00001C02  0760FF80  07640307...0001001E       07     775.0 MB   HGT
-    211  00  YHQF77   0                        EOM
-  00001C02  0760FF80  076402EE...0001001E       07     750.0 MB   HGT
-    211  00  YHQF75   0                        EOM
-  00001C02  0760FF80  076402D5...0001001E       07     725.0 MB   HGT
-    211  00  YHQF72   0                        EOM
-  00001C02  0760FF80  076402BC...0001001E       07     700.0 MB   HGT           
-    211  00  YHQF70   0                        EOM
-  00001C02  0760FF80  076402A3...0001001E       07     675.0 MB   HGT
-    211  00  YHQF67   0                        EOM
-  00001C02  0760FF80  0764028A...0001001E       07     650.0 MB   HGT
-    211  00  YHQF65   0                        EOM
-  00001C02  0760FF80  07640271...0001001E       07     625.0 MB   HGT
-    211  00  YHQF62   0                        EOM
-  00001C02  0760FF80  07640258...0001001E       07     600.0 MB   HGT
-    211  00  YHQF60   0                        EOM
-  00001C02  0760FF80  0764023F...0001001E       07     575.0 MB   HGT
-    211  00  YHQF57   0                        EOM
-  00001C02  0760FF80  07640226...0001001E       07     550.0 MB   HGT
-    211  00  YHQF55   0                        EOM
-  00001C02  0760FF80  0764020D...0001001E       07     525.0 MB   HGT
-    211  00  YHQF52   0                        EOM
-  00001C02  0760FF80  076401F4...0001001E       07     500.0 MB   HGT           
-    211  00  YHQF50   0                        EOM
-  00001C02  0760FF80  076401C2...0001001E       07     450.0 MB   HGT
-    211  00  YHQF45   0                        EOM
-  00001C02  0760FF80  07640190...0001001E       07     400.0 MB   HGT           
-    211  00  YHQF40   0                        EOM
-  00001C02  0760FF80  0764015E...0001001E       07     350.0 MB   HGT
-    211  00  YHQF35   0                        EOM
-  00001C02  0760FF80  0764012C...0001001E       07     300.0 MB   HGT           
-    211  00  YHQF30   0                        EOM
-  00001C02  0760FF80  076400FA...0001001E       07     250.0 MB   HGT           
-    211  00  YHQF25   0                        EOM
-  00001C02  0760FF80  076400C8...0001001E       07     200.0 MB   HGT           
-    211  00  YHQF20   0                        EOM
-  00001C02  0760FF80  07640096...0001001E       07     150.0 MB   HGT           
-    211  00  YHQF15   0                        EOM
-  00001C02  0760FF80  07640064...0001001E       07     100.0 MB   HGT           
-    211  00  YHQF10   0                        EOM
-  00001C02  0760FF80  216403E8...0001001E       33    1000.0 MB   U GRD
-    211  00  YUQF99   0                        EOM
-  00001C02  0760FF80  216403CF...0001001E       33     975.0 MB   U GRD
-    211  00  YUQF93   0                        EOM
-  00001C02  0760FF80  216403B6...0001001E       33     950.0 MB   U GRD
-    211  00  YUQF95   0                        EOM
-  00001C02  0760FF80  2164039D...0001001E       33     925.0 MB   U GRD
-    211  00  YUQF92   0                        EOM
-  00001C02  0760FF80  21640384...0001001E       33     900.0 MB   U GRD
-    211  00  YUQF90   0                        EOM
-  00001C02  0760FF80  2164036B...0001001E       33     875.0 MB   U GRD
-    211  00  YUQF91   0                        EOM
-  00001C02  0760FF80  21640352...0001001E       33     850.0 MB   U GRD         
-    211  00  YUQF85   0                        EOM
-  00001C02  0760FF80  21640339...0001001E       33     825.0 MB   U GRD
-    211  00  YUQF82   0                        EOM
-  00001C02  0760FF80  21640320...0001001E       33     800.0 MB   U GRD
-    211  00  YUQF80   0                        EOM
-  00001C02  0760FF80  21640307...0001001E       33     775.0 MB   U GRD
-    211  00  YUQF77   0                        EOM
-  00001C02  0760FF80  216402EE...0001001E       33     750.0 MB   U GRD
-    211  00  YUQF75   0                        EOM
-  00001C02  0760FF80  216402D5...0001001E       33     725.0 MB   U GRD
-    211  00  YUQF72   0                        EOM
-  00001C02  0760FF80  216402BC...0001001E       33     700.0 MB   U GRD         
-    211  00  YUQF70   0                        EOM
-  00001C02  0760FF80  216402A3...0001001E       33     675.0 MB   U GRD
-    211  00  YUQF67   0                        EOM
-  00001C02  0760FF80  2164028A...0001001E       33     650.0 MB   U GRD
-    211  00  YUQF65   0                        EOM
-  00001C02  0760FF80  21640271...0001001E       33     625.0 MB   U GRD
-    211  00  YUQF62   0                        EOM
-  00001C02  0760FF80  21640258...0001001E       33     600.0 MB   U GRD
-    211  00  YUQF60   0                        EOM
-  00001C02  0760FF80  2164023F...0001001E       33     575.0 MB   U GRD
-    211  00  YUQF57   0                        EOM
-  00001C02  0760FF80  21640226...0001001E       33     550.0 MB   U GRD
-    211  00  YUQF55   0                        EOM
-  00001C02  0760FF80  2164020D...0001001E       33     525.0 MB   U GRD
-    211  00  YUQF52   0                        EOM
-  00001C02  0760FF80  216401F4...0001001F       33     500.0 MB   U GRD         
-    211  00  YUQF50   0                        EOM
-  00001C02  0760FF80  216401C2...0001001E       33     450.0 MB   U GRD
-    211  00  YUQF45   0                        EOM
-  00001C02  0760FF80  21640190...0001001E       33     400.0 MB   U GRD         
-    211  00  YUQF40   0                        EOM
-  00001C02  0760FF80  2164015E...0001001E       33     350.0 MB   U GRD
-    211  00  YUQF35   0                        EOM
-  00001C02  0760FF80  2164012C...0001001E       33     300.0 MB   U GRD         
-    211  00  YUQF30   0                        EOM
-  00001C02  0760FF80  216400FA...0001001E       33     250.0 MB   U GRD         
-    211  00  YUQF25   0                        EOM
-  00001C02  0760FF80  216400C8...0001001E       33     200.0 MB   U GRD         
-    211  00  YUQF20   0                        EOM
-  00001C02  0760FF80  21640096...0001001F       33     150.0 MB   U GRD         
-    211  00  YUQF15   0                        EOM
-  00001C02  0760FF80  21640064...0001001E       33     100.0 MB   U GRD         
-    211  00  YUQF10   0                        EOM
-  00001C02  0760FF80  226403E8...0001001E       34    1000.0 MB   V GRD
-    211  00  YVQF99   0                        EOM
-  00001C02  0760FF80  226403CF...0001001E       34     975.0 MB   V GRD
-    211  00  YVQF93   0                        EOM
-  00001C02  0760FF80  226403B6...0001001E       34     950.0 MB   V GRD
-    211  00  YVQF95   0                        EOM
-  00001C02  0760FF80  2264039D...0001001E       34     925.0 MB   V GRD
-    211  00  YVQF92   0                        EOM
-  00001C02  0760FF80  22640384...0001001E       34     900.0 MB   V GRD
-    211  00  YVQF90   0                        EOM
-  00001C02  0760FF80  2264036B...0001001E       34     875.0 MB   V GRD
-    211  00  YVQF91   0                        EOM
-  00001C02  0760FF80  22640352...0001001E       34     850.0 MB   V GRD         
-    211  00  YVQF85   0                        EOM
-  00001C02  0760FF80  22640339...0001001E       34     825.0 MB   V GRD
-    211  00  YVQF82   0                        EOM
-  00001C02  0760FF80  22640320...0001001E       34     800.0 MB   V GRD
-    211  00  YVQF80   0                        EOM
-  00001C02  0760FF80  22640307...0001001E       34     775.0 MB   V GRD
-    211  00  YVQF77   0                        EOM
-  00001C02  0760FF80  226402EE...0001001E       34     750.0 MB   V GRD
-    211  00  YVQF75   0                        EOM
-  00001C02  0760FF80  226402D5...0001001E       34     725.0 MB   V GRD
-    211  00  YVQF72   0                        EOM
-  00001C02  0760FF80  226402BC...0001001E       34     700.0 MB   V GRD         
-    211  00  YVQF70   0                        EOM
-  00001C02  0760FF80  226402A3...0001001E       34     675.0 MB   V GRD
-    211  00  YVQF67   0                        EOM
-  00001C02  0760FF80  2264028A...0001001E       34     650.0 MB   V GRD
-    211  00  YVQF65   0                        EOM
-  00001C02  0760FF80  22640271...0001001E       34     625.0 MB   V GRD
-    211  00  YVQF62   0                        EOM
-  00001C02  0760FF80  22640258...0001001E       34     600.0 MB   V GRD
-    211  00  YVQF60   0                        EOM
-  00001C02  0760FF80  2264023F...0001001E       34     575.0 MB   V GRD
-    211  00  YVQF57   0                        EOM
-  00001C02  0760FF80  22640226...0001001E       34     550.0 MB   V GRD
-    211  00  YVQF55   0                        EOM
-  00001C02  0760FF80  2264020D...0001001E       34     525.0 MB   V GRD
-    211  00  YVQF52   0                        EOM
-  00001C02  0760FF80  226401F4...0001001E       34     500.0 MB   V GRD         
-    211  00  YVQF50   0                        EOM
-  00001C02  0760FF80  226401C2...0001001E       34     450.0 MB   V GRD
-    211  00  YVQF45   0                        EOM
-  00001C02  0760FF80  22640190...0001001E       34     400.0 MB   V GRD         
-    211  00  YVQF40   0                        EOM
-  00001C02  0760FF80  2264015E...0001001E       34     350.0 MB   V GRD
-    211  00  YVQF35   0                        EOM
-  00001C02  0760FF80  2264012C...0001001E       34     300.0 MB   V GRD         
-    211  00  YVQF30   0                        EOM
-  00001C02  0760FF80  226400FA...0001001E       34     250.0 MB   V GRD         
-    211  00  YVQF25   0                        EOM
-  00001C02  0760FF80  226400C8...0001001E       34     200.0 MB   V GRD         
-    211  00  YVQF20   0                        EOM
-  00001C02  0760FF80  22640096...0001001E       34     150.0 MB   V GRD         
-    211  00  YVQF15   0                        EOM
-  00001C02  0760FF80  22640064...0001001E       34     100.0 MB   V GRD         
-    211  00  YVQF10   0                        EOM
-  00001C02  0760FF80  02660000...0001001E       02           MSL  PRMSL         
-    211  00  YPQF89   0                        EOM
-  00001C02  0760FF80  346403E8...0001001E       52    1000.0 MB   R H
-    211  00  YRQF99   0                        EOM
-  00001C02  0760FF80  346403CF...0001001E       52     975.0 MB   R H
-    211  00  YRQF93   0                        EOM
-  00001C02  0760FF80  346403B6...0001001E       52     950.0 MB   R H
-    211  00  YRQF95   0                        EOM
-  00001C02  0760FF80  3464039D...0001001E       52     925.0 MB   R H
-    211  00  YRQF92   0                        EOM
-  00001C02  0760FF80  34640384...0001001E       52     900.0 MB   R H
-    211  00  YRQF90   0                        EOM
-  00001C02  0760FF80  3464036B...0001001E       52     875.0 MB   R H
-    211  00  YRQF91   0                        EOM
-  00001C02  0760FF80  34640352...0001001E       52     850.0 MB   R H           
-    211  00  YRQF85   0                        EOM
-  00001C02  0760FF80  34640339...0001001E       52     825.0 MB   R H
-    211  00  YRQF82   0                        EOM
-  00001C02  0760FF80  34640320...0001001E       52     800.0 MB   R H
-    211  00  YRQF80   0                        EOM
-  00001C02  0760FF80  34640307...0001001E       52     775.0 MB   R H
-    211  00  YRQF77   0                        EOM
-  00001C02  0760FF80  346402EE...0001001E       52     750.0 MB   R H
-    211  00  YRQF75   0                        EOM
-  00001C02  0760FF80  346402D5...0001001E       52     725.0 MB   R H
-    211  00  YRQF72   0                        EOM
-  00001C02  0760FF80  346402BC...0001001E       52     700.0 MB   R H           
-    211  00  YRQF70   0                        EOM
-  00001C02  0760FF80  346402A3...0001001E       52     675.0 MB   R H
-    211  00  YRQF67   0                        EOM
-  00001C02  0760FF80  3464028A...0001001E       52     650.0 MB   R H
-    211  00  YRQF65   0                        EOM
-  00001C02  0760FF80  34640271...0001001E       52     625.0 MB   R H
-    211  00  YRQF62   0                        EOM
-  00001C02  0760FF80  34640258...0001001E       52     600.0 MB   R H
-    211  00  YRQF60   0                        EOM
-  00001C02  0760FF80  3464023F...0001001E       52     575.0 MB   R H
-    211  00  YRQF57   0                        EOM
-  00001C02  0760FF80  34640226...0001001E       52     550.0 MB   R H
-    211  00  YRQF55   0                        EOM
-  00001C02  0760FF80  3464020D...0001001E       52     525.0 MB   R H
-    211  00  YRQF52   0                        EOM
-  00001C02  0760FF80  346401F4...0001001E       52     500.0 MB   R H           
-    211  00  YRQF50   0                        EOM
-  00001C02  0760FF80  346401C2...0001001E       52     450.0 MB   R H
-    211  00  YRQF45   0                        EOM
-  00001C02  0760FF80  34640190...0001001E       52     400.0 MB   R H           
-    211  00  YRQF40   0                        EOM
-  00001C02  0760FF80  3464015E...0001001E       52     350.0 MB   R H
-    211  00  YRQF35   0                        EOM
-  00001C02  0760FF80  3464012C...0001001E       52     300.0 MB   R H           
-    211  00  YRQF30   0                        EOM
-  00001C02  0760FF80  346400FA...0001001E       52     250.0 MB   R H
-    211  00  YRQF25   0                        EOM
-  00001C02  0760FF80  346400C8...0001001E       52     200.0 MB   R H
-    211  00  YRQF20   0                        EOM
-  00001C02  0760FF80  34640096...0001001E       52     150.0 MB   R H
-    211  00  YRQF15   0                        EOM
-  00001C02  0760FF80  34640064...0001001E       52     100.0 MB   R H
-    211  00  YRQF10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001001E       11    1000.0 MB   TMP
-    211  00  YTQF99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001001E       11     975.0 MB   TMP
-    211  00  YTQF93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001001E       11     950.0 MB   TMP
-    211  00  YTQF95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001001E       11     925.0 MB   TMP
-    211  00  YTQF92   0                        EOM
-  00001C02  0760FF80  0B640384...0001001E       11     900.0 MB   TMP
-    211  00  YTQF90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001001E       11     875.0 MB   TMP
-    211  00  YTQF91   0                        EOM
-  00001C02  0760FF80  0B640352...0001001E       11     850.0 MB   TMP           
-    211  00  YTQF85   0                        EOM
-  00001C02  0760FF80  0B640339...0001001E       11     825.0 MB   TMP
-    211  00  YTQF82   0                        EOM
-  00001C02  0760FF80  0B640320...0001001E       11     800.0 MB   TMP
-    211  00  YTQF80   0                        EOM
-  00001C02  0760FF80  0B640307...0001001E       11     775.0 MB   TMP
-    211  00  YTQF77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001001E       11     750.0 MB   TMP
-    211  00  YTQF75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001001E       11     725.0 MB   TMP
-    211  00  YTQF72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001001E       11     700.0 MB   TMP           
-    211  00  YTQF70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001001E       11     675.0 MB   TMP
-    211  00  YTQF67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001001E       11     650.0 MB   TMP
-    211  00  YTQF65   0                        EOM
-  00001C02  0760FF80  0B640271...0001001E       11     625.0 MB   TMP
-    211  00  YTQF62   0                        EOM
-  00001C02  0760FF80  0B640258...0001001E       11     600.0 MB   TMP
-    211  00  YTQF60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001001E       11     575.0 MB   TMP
-    211  00  YTQF57   0                        EOM
-  00001C02  0760FF80  0B640226...0001001E       11     550.0 MB   TMP
-    211  00  YTQF55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001001E       11     525.0 MB   TMP
-    211  00  YTQF52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001001E       11     500.0 MB   TMP           
-    211  00  YTQF50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001001E       11     450.0 MB   TMP
-    211  00  YTQF45   0                        EOM
-  00001C02  0760FF80  0B640190...0001001E       11     400.0 MB   TMP           
-    211  00  YTQF40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001001E       11     350.0 MB   TMP
-    211  00  YTQF35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001001E       11     300.0 MB   TMP           
-    211  00  YTQF30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001001E       11     250.0 MB   TMP           
-    211  00  YTQF25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001001E       11     200.0 MB   TMP           
-    211  00  YTQF20   0                        EOM
-  00001C02  0760FF80  0B640096...0001001E       11     150.0 MB   TMP           
-    211  00  YTQF15   0                        EOM
-  00001C02  0760FF80  0B640064...0001001E       11     100.0 MB   TMP           
-    211  00  YTQF10   0                        EOM
-  00001C02  0760FF80  28640352...0001001E       40     850.0 MB  DZDT           
-    211  00  YOQF85   0                        EOM 
-  00001C02  0760FF80  286402BC...0001001E       40     700.0 MB  DZDT           
-    211  00  YOQF70   0                        EOM
-  00001C02  0760FF80  286401F4...0001001E       40     500.0 MB  DZDT           
-    211  00  YOQF50   0                        EOM
-  00001C02  0760FF80  28640190...0001001E       40     400.0 MB  DZDT           
-    211  00  YOQF40   0                        EOM
-  00001C02  0760FF80  2864012C...0001001E       40     300.0 MB  DZDT           
-    211  00  YOQF30   0                        EOM
-  00001C02  0760FF80  286400FA...0001001E       40     250.0 MB  DZDT           
-    211  00  YOQF25   0                        EOM
-  00001C02  0760FF80  286400C8...0001001E       40     200.0 MB  DZDT           
-    211  00  YOQF20   0                        EOM
-  00001C02  0760FF80  28640096...0001001E       40     150.0 MB  DZDT           
-    211  00  YOQF15   0                        EOM
-  00001C02  0760FF80  28640064...0001001E       40     100.0 MB  DZDT           
-    211  00  YOQF10   0                        EOM
-  00001C02  0760FF80  01010000...0001001E       01          SFC  PRES           
-    211  00  YPQF98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001001E       52        44/100  R H           
-    211  00  YRQF00   0                        EOM
-  00001C02  0760FF80  36C80000...0001001E       54          EATM  P WAT         
-    211  00  YFQF00   0                        EOM
-  00001C02  0760FF80  0B690002...0001001E       11          2m/SFC TMP         
-    211  00  YTQF98   0                        EOM
-  00001C02  0760FF80  34741E00...0001001E       52      BNDRY/SPD  R H 
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  0B070000...0001001E       11            TRO TMP           
-    211  00  YTQF97   0                        EOM
-  00001C02  0760FF80  01070000...0001001E       01            TRO PRES          
-    211  00  YPQF97   0                        EOM
-  00001C02  0760FF80  21741E00...0001001E       33           SPD  U GRD         
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  22741E00...0001001E       34           SPD  V GRD         
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  21070000...0001001E       33            TRO U GRD         
-    211  00  YUQF97   0                        EOM
-  00001C02  0760FF80  22070000...0001001E       34            TRO V GRD         
-    211  00  YVQF97   0                        EOM
-  00001C02  0760FF80  88070000...0001001E      136            TRO VW SH         
-    211  00  YBQF97   0                        EOM
-  00001C02  0760FF80  3D010000...0001001E       61            SFC A PCP         
-    211  00  YEQF98   0                        EOM
-  00001C02  0760FF80  83010000...0001001E      131            SFC LFT X         
-    211  00  YXQF98   0                        EOM
-  00001C02  0760FF80  296402BC...0001001E       41    700.0 MB    ABS V         
-    211  00  YCQF70   0                        EOM
-  00001C02  0760FF80  296401F4...0001001E       41    500.0 MB    ABS V         
-    211  00  YCQF50   0                        EOM
-  00001C02  0760FF80  9D010000...0001001E      157          SFC   CAPE
-    211  00  YWQF98   0                        EOM
-  00001C02  0760FF80  9C010000...0001001E      156          SFC   CIN
-    211  00  YYQF98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001001E      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQF86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001001E      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQF86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001001E       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001001E       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001001E       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001001E       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  0B749678...0001001E       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001001E       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQF86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001001E       52   60 SPDY  30 SPDY  R H
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001001E       52   90 SPDY  60 SPDY  R H
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  3474785A...0001001E       52  120 SPDY  90 SPDY  R H
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  34749678...0001001E       52  150 SPDY 120 SPDY  R H
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  3474B496...0001001E       52  180 SPDY 150 SPDY  R H
-    211  00  YRQF86   0                        EOM
-  00001C02  0760FF80  21741E00...0001001E       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001001E       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001001E       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  2174785A...0001001E       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  21749678...0001001E       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  2174B496...0001001E       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQF86   0                        EOM
-  00001C02  0760FF80  22741E00...0001001E       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001001E       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001001E       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  2274785A...0001001E       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  22749678...0001001E       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  2274B496...0001001E       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQF86   0                        EOM
-  00001C02  0760FF80  0B690002...0001001E       11    2  HTGL     TMP
-    211  00  YTQF98   0                        EOM
-  00001C02  0760FF80  34690002...0001001E       52    2  HTGL     R H
-    211  00  YRQF98   0                        EOM
-  00001C02  0760FF80  2169000A...0001001E       33   10  HTGL     U GRD
-    211  00  YUQF98   0                        EOM
-  00001C02  0760FF80  2269000A...0001001E       34   10  HTGL     V GRD
-    211  00  YVQF98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs036.211 b/parm/wmo/grib_awpgfs036.211
deleted file mode 100755
index 4b11fb378d..0000000000
--- a/parm/wmo/grib_awpgfs036.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...00010024       07    1000.0 MB   HGT           
-    211  00  YHQG99   0                        EOM
-  00001C02  0760FF80  076403CF...00010024       07     975.0 MB   HGT
-    211  00  YHQG93   0                        EOM
-  00001C02  0760FF80  076403B6...00010024       07     950.0 MB   HGT
-    211  00  YHQG95   0                        EOM
-  00001C02  0760FF80  0764039D...00010024       07     925.0 MB   HGT
-    211  00  YHQG92   0                        EOM
-  00001C02  0760FF80  07640384...00010024       07     900.0 MB   HGT
-    211  00  YHQG90   0                        EOM
-  00001C02  0760FF80  0764036B...00010024       07     875.0 MB   HGT
-    211  00  YHQG91   0                        EOM
-  00001C02  0760FF80  07640352...00010024       07     850.0 MB   HGT           
-    211  00  YHQG85   0                        EOM
-  00001C02  0760FF80  07640339...00010024       07     825.0 MB   HGT
-    211  00  YHQG82   0                        EOM
-  00001C02  0760FF80  07640320...00010024       07     800.0 MB   HGT
-    211  00  YHQG80   0                        EOM
-  00001C02  0760FF80  07640307...00010024       07     775.0 MB   HGT
-    211  00  YHQG77   0                        EOM
-  00001C02  0760FF80  076402EE...00010024       07     750.0 MB   HGT
-    211  00  YHQG75   0                        EOM
-  00001C02  0760FF80  076402D5...00010024       07     725.0 MB   HGT
-    211  00  YHQG72   0                        EOM
-  00001C02  0760FF80  076402BC...00010024       07     700.0 MB   HGT           
-    211  00  YHQG70   0                        EOM
-  00001C02  0760FF80  076402A3...00010024       07     675.0 MB   HGT
-    211  00  YHQG67   0                        EOM
-  00001C02  0760FF80  0764028A...00010024       07     650.0 MB   HGT
-    211  00  YHQG65   0                        EOM
-  00001C02  0760FF80  07640271...00010024       07     625.0 MB   HGT
-    211  00  YHQG62   0                        EOM
-  00001C02  0760FF80  07640258...00010024       07     600.0 MB   HGT
-    211  00  YHQG60   0                        EOM
-  00001C02  0760FF80  0764023F...00010024       07     575.0 MB   HGT
-    211  00  YHQG57   0                        EOM
-  00001C02  0760FF80  07640226...00010024       07     550.0 MB   HGT
-    211  00  YHQG55   0                        EOM
-  00001C02  0760FF80  0764020D...00010024       07     525.0 MB   HGT
-    211  00  YHQG52   0                        EOM
-  00001C02  0760FF80  076401F4...00010024       07     500.0 MB   HGT           
-    211  00  YHQG50   0                        EOM
-  00001C02  0760FF80  076401C2...00010024       07     450.0 MB   HGT
-    211  00  YHQG45   0                        EOM
-  00001C02  0760FF80  07640190...00010024       07     400.0 MB   HGT           
-    211  00  YHQG40   0                        EOM
-  00001C02  0760FF80  0764015E...00010024       07     350.0 MB   HGT
-    211  00  YHQG35   0                        EOM
-  00001C02  0760FF80  0764012C...00010024       07     300.0 MB   HGT           
-    211  00  YHQG30   0                        EOM
-  00001C02  0760FF80  076400FA...00010024       07     250.0 MB   HGT           
-    211  00  YHQG25   0                        EOM
-  00001C02  0760FF80  076400C8...00010024       07     200.0 MB   HGT           
-    211  00  YHQG20   0                        EOM
-  00001C02  0760FF80  07640096...00010024       07     150.0 MB   HGT
-    211  00  YHQG15   0                        EOM
-  00001C02  0760FF80  07640064...00010024       07     100.0 MB   HGT           
-    211  00  YHQG10   0                        EOM
-  00001C02  0760FF80  216403E8...00010024       33    1000.0 MB   U GRD
-    211  00  YUQG99   0                        EOM
-  00001C02  0760FF80  216403CF...00010024       33     975.0 MB   U GRD
-    211  00  YUQG93   0                        EOM
-  00001C02  0760FF80  216403B6...00010024       33     950.0 MB   U GRD
-    211  00  YUQG95   0                        EOM
-  00001C02  0760FF80  2164039D...00010024       33     925.0 MB   U GRD
-    211  00  YUQG92   0                        EOM
-  00001C02  0760FF80  21640384...00010024       33     900.0 MB   U GRD
-    211  00  YUQG90   0                        EOM
-  00001C02  0760FF80  2164036B...00010024       33     875.0 MB   U GRD
-    211  00  YUQG91   0                        EOM
-  00001C02  0760FF80  21640352...00010024       33     850.0 MB   U GRD         
-    211  00  YUQG85   0                        EOM
-  00001C02  0760FF80  21640339...00010024       33     825.0 MB   U GRD
-    211  00  YUQG82   0                        EOM
-  00001C02  0760FF80  21640320...00010024       33     800.0 MB   U GRD
-    211  00  YUQG80   0                        EOM
-  00001C02  0760FF80  21640307...00010024       33     775.0 MB   U GRD
-    211  00  YUQG77   0                        EOM
-  00001C02  0760FF80  216402EE...00010024       33     750.0 MB   U GRD
-    211  00  YUQG75   0                        EOM
-  00001C02  0760FF80  216402D5...00010024       33     725.0 MB   U GRD
-    211  00  YUQG72   0                        EOM
-  00001C02  0760FF80  216402BC...00010024       33     700.0 MB   U GRD
-    211  00  YUQG70   0                        EOM
-  00001C02  0760FF80  216402A3...00010024       33     675.0 MB   U GRD
-    211  00  YUQG67   0                        EOM
-  00001C02  0760FF80  2164028A...00010024       33     650.0 MB   U GRD
-    211  00  YUQG65   0                        EOM
-  00001C02  0760FF80  21640271...00010024       33     625.0 MB   U GRD
-    211  00  YUQG62   0                        EOM
-  00001C02  0760FF80  21640258...00010024       33     600.0 MB   U GRD
-    211  00  YUQG60   0                        EOM
-  00001C02  0760FF80  2164023F...00010024       33     575.0 MB   U GRD
-    211  00  YUQG57   0                        EOM
-  00001C02  0760FF80  21640226...00010024       33     550.0 MB   U GRD
-    211  00  YUQG55   0                        EOM
-  00001C02  0760FF80  2164020D...00010024       33     525.0 MB   U GRD
-    211  00  YUQG52   0                        EOM
-  00001C02  0760FF80  216401F4...00010024       33     500.0 MB   U GRD
-    211  00  YUQG50   0                        EOM
-  00001C02  0760FF80  216401C2...00010024       33     450.0 MB   U GRD
-    211  00  YUQG45   0                        EOM
-  00001C02  0760FF80  21640190...00010024       33     400.0 MB   U GRD         
-    211  00  YUQG40   0                        EOM
-  00001C02  0760FF80  2164015E...00010024       33     350.0 MB   U GRD
-    211  00  YUQG35   0                        EOM
-  00001C02  0760FF80  2164012C...00010024       33     300.0 MB   U GRD         
-    211  00  YUQG30   0                        EOM
-  00001C02  0760FF80  216400FA...00010024       33     250.0 MB   U GRD         
-    211  00  YUQG25   0                        EOM
-  00001C02  0760FF80  216400C8...00010024       33     200.0 MB   U GRD         
-    211  00  YUQG20   0                        EOM
-  00001C02  0760FF80  21640096...00010024       33     150.0 MB   U GRD         
-    211  00  YUQG15   0                        EOM
-  00001C02  0760FF80  21640064...00010024       33     100.0 MB   U GRD         
-    211  00  YUQG10   0                        EOM
-  00001C02  0760FF80  226403E8...00010024       34    1000.0 MB   V GRD
-    211  00  YVQG99   0                        EOM
-  00001C02  0760FF80  226403CF...00010024       34     975.0 MB   V GRD
-    211  00  YVQG93   0                        EOM
-  00001C02  0760FF80  226403B6...00010024       34     950.0 MB   V GRD
-    211  00  YVQG95   0                        EOM
-  00001C02  0760FF80  2264039D...00010024       34     925.0 MB   V GRD
-    211  00  YVQG92   0                        EOM
-  00001C02  0760FF80  22640384...00010024       34     900.0 MB   V GRD
-    211  00  YVQG90   0                        EOM
-  00001C02  0760FF80  2264036B...00010024       34     875.0 MB   V GRD
-    211  00  YVQG91   0                        EOM
-  00001C02  0760FF80  22640352...00010024       34     850.0 MB   V GRD         
-    211  00  YVQG85   0                        EOM
-  00001C02  0760FF80  22640339...00010024       34     825.0 MB   V GRD
-    211  00  YVQG82   0                        EOM
-  00001C02  0760FF80  22640320...00010024       34     800.0 MB   V GRD
-    211  00  YVQG80   0                        EOM
-  00001C02  0760FF80  22640307...00010024       34     775.0 MB   V GRD
-    211  00  YVQG77   0                        EOM
-  00001C02  0760FF80  226402EE...00010024       34     750.0 MB   V GRD
-    211  00  YVQG75   0                        EOM
-  00001C02  0760FF80  226402D5...00010024       34     725.0 MB   V GRD
-    211  00  YVQG72   0                        EOM
-  00001C02  0760FF80  226402BC...00010024       34     700.0 MB   V GRD         
-    211  00  YVQG70   0                        EOM
-  00001C02  0760FF80  226402A3...00010024       34     675.0 MB   V GRD
-    211  00  YVQG67   0                        EOM
-  00001C02  0760FF80  2264028A...00010024       34     650.0 MB   V GRD
-    211  00  YVQG65   0                        EOM
-  00001C02  0760FF80  22640271...00010024       34     625.0 MB   V GRD
-    211  00  YVQG62   0                        EOM
-  00001C02  0760FF80  22640258...00010024       34     600.0 MB   V GRD
-    211  00  YVQG60   0                        EOM
-  00001C02  0760FF80  2264023F...00010024       34     575.0 MB   V GRD
-    211  00  YVQG57   0                        EOM
-  00001C02  0760FF80  22640226...00010024       34     550.0 MB   V GRD
-    211  00  YVQG55   0                        EOM
-  00001C02  0760FF80  2264020D...00010024       34     525.0 MB   V GRD
-    211  00  YVQG52   0                        EOM
-  00001C02  0760FF80  226401F4...00010024       34     500.0 MB   V GRD         
-    211  00  YVQG50   0                        EOM
-  00001C02  0760FF80  226401C2...00010024       34     450.0 MB   V GRD
-    211  00  YVQG45   0                        EOM
-  00001C02  0760FF80  22640190...00010024       34     400.0 MB   V GRD         
-    211  00  YVQG40   0                        EOM
-  00001C02  0760FF80  2264015E...00010024       34     350.0 MB   V GRD
-    211  00  YVQG35   0                        EOM
-  00001C02  0760FF80  2264012C...00010024       34     300.0 MB   V GRD         
-    211  00  YVQG30   0                        EOM
-  00001C02  0760FF80  226400FA...00010024       34     250.0 MB   V GRD         
-    211  00  YVQG25   0                        EOM
-  00001C02  0760FF80  226400C8...00010024       34     200.0 MB   V GRD         
-    211  00  YVQG20   0                        EOM
-  00001C02  0760FF80  22640096...00010024       34     150.0 MB   V GRD         
-    211  00  YVQG15   0                        EOM
-  00001C02  0760FF80  22640064...00010024       34     100.0 MB   V GRD         
-    211  00  YVQG10   0                        EOM
-  00001C02  0760FF80  02660000...00010024       02           MSL  PRMSL         
-    211  00  YPQG89   0                        EOM
-  00001C02  0760FF80  346403E8...00010024       52    1000.0 MB   R H
-    211  00  YRQG99   0                        EOM
-  00001C02  0760FF80  346403CF...00010024       52     975.0 MB   R H
-    211  00  YRQG93   0                        EOM
-  00001C02  0760FF80  346403B6...00010024       52     950.0 MB   R H
-    211  00  YRQG95   0                        EOM
-  00001C02  0760FF80  3464039D...00010024       52     925.0 MB   R H
-    211  00  YRQG92   0                        EOM
-  00001C02  0760FF80  34640384...00010024       52     900.0 MB   R H
-    211  00  YRQG90   0                        EOM
-  00001C02  0760FF80  3464036B...00010024       52     875.0 MB   R H
-    211  00  YRQG91   0                        EOM
-  00001C02  0760FF80  34640352...00010024       52     850.0 MB   R H           
-    211  00  YRQG85   0                        EOM
-  00001C02  0760FF80  34640339...00010024       52     825.0 MB   R H
-    211  00  YRQG82   0                        EOM
-  00001C02  0760FF80  34640320...00010024       52     800.0 MB   R H
-    211  00  YRQG80   0                        EOM
-  00001C02  0760FF80  34640307...00010024       52     775.0 MB   R H
-    211  00  YRQG77   0                        EOM
-  00001C02  0760FF80  346402EE...00010024       52     750.0 MB   R H
-    211  00  YRQG75   0                        EOM
-  00001C02  0760FF80  346402D5...00010024       52     725.0 MB   R H
-    211  00  YRQG72   0                        EOM
-  00001C02  0760FF80  346402BC...00010024       52     700.0 MB   R H           
-    211  00  YRQG70   0                        EOM
-  00001C02  0760FF80  346402A3...00010024       52     675.0 MB   R H
-    211  00  YRQG67   0                        EOM
-  00001C02  0760FF80  3464028A...00010024       52     650.0 MB   R H
-    211  00  YRQG65   0                        EOM
-  00001C02  0760FF80  34640271...00010024       52     625.0 MB   R H
-    211  00  YRQG62   0                        EOM
-  00001C02  0760FF80  34640258...00010024       52     600.0 MB   R H
-    211  00  YRQG60   0                        EOM
-  00001C02  0760FF80  3464023F...00010024       52     575.0 MB   R H
-    211  00  YRQG57   0                        EOM
-  00001C02  0760FF80  34640226...00010024       52     550.0 MB   R H
-    211  00  YRQG55   0                        EOM
-  00001C02  0760FF80  3464020D...00010024       52     525.0 MB   R H
-    211  00  YRQG52   0                        EOM
-  00001C02  0760FF80  346401F4...00010024       52     500.0 MB   R H           
-    211  00  YRQG50   0                        EOM
-  00001C02  0760FF80  346401C2...00010024       52     450.0 MB   R H
-    211  00  YRQG45   0                        EOM
-  00001C02  0760FF80  34640190...00010024       52     400.0 MB   R H           
-    211  00  YRQG40   0                        EOM
-  00001C02  0760FF80  3464015E...00010024       52     350.0 MB   R H
-    211  00  YRQG35   0                        EOM
-  00001C02  0760FF80  3464012C...00010024       52     300.0 MB   R H           
-    211  00  YRQG30   0                        EOM
-  00001C02  0760FF80  346400FA...00010024       52     250.0 MB   R H
-    211  00  YRQG25   0                        EOM
-  00001C02  0760FF80  346400C8...00010024       52     200.0 MB   R H
-    211  00  YRQG20   0                        EOM
-  00001C02  0760FF80  34640096...00010024       52     150.0 MB   R H
-    211  00  YRQG15   0                        EOM
-  00001C02  0760FF80  34640064...00010024       52     100.0 MB   R H
-    211  00  YRQG10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010024       11    1000.0 MB   TMP
-    211  00  YTQG99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010024       11     975.0 MB   TMP
-    211  00  YTQG93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010024       11     950.0 MB   TMP
-    211  00  YTQG95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010024       11     925.0 MB   TMP
-    211  00  YTQG92   0                        EOM
-  00001C02  0760FF80  0B640384...00010024       11     900.0 MB   TMP
-    211  00  YTQG90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010024       11     875.0 MB   TMP
-    211  00  YTQG91   0                        EOM
-  00001C02  0760FF80  0B640352...00010024       11     850.0 MB   TMP           
-    211  00  YTQG85   0                        EOM
-  00001C02  0760FF80  0B640339...00010024       11     825.0 MB   TMP
-    211  00  YTQG82   0                        EOM
-  00001C02  0760FF80  0B640320...00010024       11     800.0 MB   TMP
-    211  00  YTQG80   0                        EOM
-  00001C02  0760FF80  0B640307...00010024       11     775.0 MB   TMP
-    211  00  YTQG77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010024       11     750.0 MB   TMP
-    211  00  YTQG75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010024       11     725.0 MB   TMP
-    211  00  YTQG72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010024       11     700.0 MB   TMP           
-    211  00  YTQG70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010024       11     675.0 MB   TMP
-    211  00  YTQG67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010024       11     650.0 MB   TMP
-    211  00  YTQG65   0                        EOM
-  00001C02  0760FF80  0B640271...00010024       11     625.0 MB   TMP
-    211  00  YTQG62   0                        EOM
-  00001C02  0760FF80  0B640258...00010024       11     600.0 MB   TMP
-    211  00  YTQG60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010024       11     575.0 MB   TMP
-    211  00  YTQG57   0                        EOM
-  00001C02  0760FF80  0B640226...00010024       11     550.0 MB   TMP
-    211  00  YTQG55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010024       11     525.0 MB   TMP
-    211  00  YTQG52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010024       11     500.0 MB   TMP           
-    211  00  YTQG50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010024       11     450.0 MB   TMP
-    211  00  YTQG45   0                        EOM
-  00001C02  0760FF80  0B640190...00010024       24     400.0 MB   TMP           
-    211  00  YTQG40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010024       11     350.0 MB   TMP
-    211  00  YTQG35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010024       11     300.0 MB   TMP           
-    211  00  YTQG30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010024       11     250.0 MB   TMP           
-    211  00  YTQG25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010024       11     200.0 MB   TMP           
-    211  00  YTQG20   0                        EOM
-  00001C02  0760FF80  0B640096...00010024       11     150.0 MB   TMP           
-    211  00  YTQG15   0                        EOM
-  00001C02  0760FF80  0B640064...00010024       11     100.0 MB   TMP           
-    211  00  YTQG10   0                        EOM
-  00001C02  0760FF80  28640352...00010024       40     850.0 MB  DZDT           
-    211  00  YOQG85   0                        EOM
-  00001C02  0760FF80  286402BC...00010024       40     700.0 MB  DZDT           
-    211  00  YOQG70   0                        EOM
-  00001C02  0760FF80  286401F4...00010024       40     500.0 MB  DZDT           
-    211  00  YOQG50   0                        EOM
-  00001C02  0760FF80  28640190...00010024       40     400.0 MB  DZDT           
-    211  00  YOQG40   0                        EOM
-  00001C02  0760FF80  2864012C...00010024       40     300.0 MB  DZDT           
-    211  00  YOQG30   0                        EOM
-  00001C02  0760FF80  286400FA...00010024       40     250.0 MB  DZDT           
-    211  00  YOQG25   0                        EOM
-  00001C02  0760FF80  286400C8...00010024       40     200.0 MB  DZDT           
-    211  00  YOQG20   0                        EOM
-  00001C02  0760FF80  28640096...00010012       40     150.0 MB  DZDT           
-    211  00  YOQG15   0                        EOM
-  00001C02  0760FF80  28640064...00010024       40     100.0 MB  DZDT           
-    211  00  YOQG10   0                        EOM
-  00001C02  0760FF80  01010000...00010024       01          SFC  PRES           
-    211  00  YPQG98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010024       52        44/100  R H           
-    211  00  YRQG00   0                        EOM
-  00001C02  0760FF80  36C80000...00010024       54          EATM  P WAT         
-    211  00  YFQG00   0                        EOM
-  00001C02  0760FF80  0B690002...00010024       11          2m/SFC TMP         
-    211  00  YTQG98   0                        EOM
-  00001C02  0760FF80  34741E00...00010024       52     BNDRY/SPD  R H           
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  0B070000...00010024       11            TRO TMP           
-    211  00  YTQG97   0                        EOM
-  00001C02  0760FF80  01070000...00010024       01            TRO PRES          
-    211  00  YPQG97   0                        EOM
-  00001C02  0760FF80  21741E00...00010024       33           SPD  U GRD         
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  22741E00...00010024       34           SPD  V GRD         
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  21070000...00010024       33            TRO U GRD         
-    211  00  YUQG97   0                        EOM
-  00001C02  0760FF80  22070000...00010024       34            TRO V GRD         
-    211  00  YVQG97   0                        EOM
-  00001C02  0760FF80  88070000...00010024      136            TRO VW SH         
-    211  00  YBQG97   0                        EOM
-  00001C02  0760FF80  3D010000...00010024       61            SFC A PCP         
-    211  00  YEQG98   0                        EOM
-  00001C02  0760FF80  83010000...00010024      131            SFC LFT X         
-    211  00  YXQG98   0                        EOM
-  00001C02  0760FF80  296402BC...00010024       41    700.0 MB    ABS V         
-    211  00  YCQG70   0                        EOM
-  00001C02  0760FF80  296401F4...00010024       41    500.0 MB    ABS V         
-    211  00  YCQG50   0                        EOM
-  00001C02  0760FF80  9D010000...00010024      157          SFC   CAPE
-    211  00  YWQG98   0                        EOM
-  00001C02  0760FF80  9C010000...00010024      156          SFC   CIN
-    211  00  YYQG98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010024      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQG86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010024      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQG86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010024       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010024       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010024       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010024       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  0B749678...00010024       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010024       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQG86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010024       52   60 SPDY  30 SPDY  R H
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010024       52   90 SPDY  60 SPDY  R H
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  3474785A...00010024       52  120 SPDY  90 SPDY  R H
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  34749678...00010024       52  150 SPDY 120 SPDY  R H
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  3474B496...00010024       52  180 SPDY 150 SPDY  R H
-    211  00  YRQG86   0                        EOM
-  00001C02  0760FF80  21741E00...00010024       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010024       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010024       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  2174785A...00010024       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  21749678...00010024       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  2174B496...00010024       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQG86   0                        EOM
-  00001C02  0760FF80  22741E00...00010024       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010024       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010024       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  2274785A...00010024       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  22749678...00010024       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  2274B496...00010024       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQG86   0                        EOM
-  00001C02  0760FF80  0B690002...00010024       11    2  HTGL     TMP
-    211  00  YTQG98   0                        EOM
-  00001C02  0760FF80  34690002...00010024       52    2  HTGL     R H
-    211  00  YRQG98   0                        EOM
-  00001C02  0760FF80  2169000A...00010024       33   10  HTGL     U GRD
-    211  00  YUQG98   0                        EOM
-  00001C02  0760FF80  2269000A...00010024       34   10  HTGL     V GRD
-    211  00  YVQG98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs042.211 b/parm/wmo/grib_awpgfs042.211
deleted file mode 100755
index a0259b82f2..0000000000
--- a/parm/wmo/grib_awpgfs042.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...0001002A       07    1000.0 MB   HGT           
-    211  00  YHQH99   0                        EOM
-  00001C02  0760FF80  076403CF...0001002A       07     975.0 MB   HGT
-    211  00  YHQH93   0                        EOM
-  00001C02  0760FF80  076403B6...0001002A       07     950.0 MB   HGT
-    211  00  YHQH95   0                        EOM
-  00001C02  0760FF80  0764039D...0001002A       07     925.0 MB   HGT
-    211  00  YHQH92   0                        EOM
-  00001C02  0760FF80  07640384...0001002A       07     900.0 MB   HGT
-    211  00  YHQH90   0                        EOM
-  00001C02  0760FF80  0764036B...0001002A       07     875.0 MB   HGT
-    211  00  YHQH91   0                        EOM
-  00001C02  0760FF80  07640352...0001002A       07     850.0 MB   HGT           
-    211  00  YHQH85   0                        EOM
-  00001C02  0760FF80  07640339...0001002A       07     825.0 MB   HGT
-    211  00  YHQH82   0                        EOM
-  00001C02  0760FF80  07640320...0001002A       07     800.0 MB   HGT
-    211  00  YHQH80   0                        EOM
-  00001C02  0760FF80  07640307...0001002A       07     775.0 MB   HGT
-    211  00  YHQH77   0                        EOM
-  00001C02  0760FF80  076402EE...0001002A       07     750.0 MB   HGT
-    211  00  YHQH75   0                        EOM
-  00001C02  0760FF80  076402D5...0001002A       07     725.0 MB   HGT
-    211  00  YHQH72   0                        EOM
-  00001C02  0760FF80  076402BC...0001002A       07     700.0 MB   HGT           
-    211  00  YHQH70   0                        EOM
-  00001C02  0760FF80  076402A3...0001002A       07     675.0 MB   HGT
-    211  00  YHQH67   0                        EOM
-  00001C02  0760FF80  0764028A...0001002A       07     650.0 MB   HGT
-    211  00  YHQH65   0                        EOM
-  00001C02  0760FF80  07640271...0001002A       07     625.0 MB   HGT
-    211  00  YHQH62   0                        EOM
-  00001C02  0760FF80  07640258...0001002A       07     600.0 MB   HGT
-    211  00  YHQH60   0                        EOM
-  00001C02  0760FF80  0764023F...0001002A       07     575.0 MB   HGT
-    211  00  YHQH57   0                        EOM
-  00001C02  0760FF80  07640226...0001002A       07     550.0 MB   HGT
-    211  00  YHQH55   0                        EOM
-  00001C02  0760FF80  0764020D...0001002A       07     525.0 MB   HGT
-    211  00  YHQH52   0                        EOM
-  00001C02  0760FF80  076401F4...0001002A       07     500.0 MB   HGT           
-    211  00  YHQH50   0                        EOM
-  00001C02  0760FF80  076401C2...0001002A       07     450.0 MB   HGT
-    211  00  YHQH45   0                        EOM
-  00001C02  0760FF80  07640190...0001002A       07     400.0 MB   HGT           
-    211  00  YHQH40   0                        EOM
-  00001C02  0760FF80  0764015E...0001002A       07     350.0 MB   HGT
-    211  00  YHQH35   0                        EOM
-  00001C02  0760FF80  0764012C...0001002A       07     300.0 MB   HGT           
-    211  00  YHQH30   0                        EOM
-  00001C02  0760FF80  076400FA...0001002A       07     250.0 MB   HGT           
-    211  00  YHQH25   0                        EOM
-  00001C02  0760FF80  076400C8...0001002A       07     200.0 MB   HGT           
-    211  00  YHQH20   0                        EOM
-  00001C02  0760FF80  07640096...0001002A       07     150.0 MB   HGT           
-    211  00  YHQH15   0                        EOM
-  00001C02  0760FF80  07640064...0001002A       07     100.0 MB   HGT           
-    211  00  YHQH10   0                        EOM
-  00001C02  0760FF80  216403E8...0001002A       33    1000.0 MB   U GRD
-    211  00  YUQH99   0                        EOM
-  00001C02  0760FF80  216403CF...0001002A       33     975.0 MB   U GRD
-    211  00  YUQH93   0                        EOM
-  00001C02  0760FF80  216403B6...0001002A       33     950.0 MB   U GRD
-    211  00  YUQH95   0                        EOM
-  00001C02  0760FF80  2164039D...0001002A       33     925.0 MB   U GRD
-    211  00  YUQH92   0                        EOM
-  00001C02  0760FF80  21640384...0001002A       33     900.0 MB   U GRD
-    211  00  YUQH90   0                        EOM
-  00001C02  0760FF80  2164036B...0001002A       33     875.0 MB   U GRD
-    211  00  YUQH91   0                        EOM
-  00001C02  0760FF80  21640352...0001002A       33     850.0 MB   U GRD         
-    211  00  YUQH85   0                        EOM
-  00001C02  0760FF80  21640339...0001002A       33     825.0 MB   U GRD
-    211  00  YUQH82   0                        EOM
-  00001C02  0760FF80  21640320...0001002A       33     800.0 MB   U GRD
-    211  00  YUQH80   0                        EOM
-  00001C02  0760FF80  21640307...0001002A       33     775.0 MB   U GRD
-    211  00  YUQH77   0                        EOM
-  00001C02  0760FF80  216402EE...0001002A       33     750.0 MB   U GRD
-    211  00  YUQH75   0                        EOM
-  00001C02  0760FF80  216402D5...0001002A       33     725.0 MB   U GRD
-    211  00  YUQH72   0                        EOM
-  00001C02  0760FF80  216402BC...0001002A       33     700.0 MB   U GRD         
-    211  00  YUQH70   0                        EOM
-  00001C02  0760FF80  216402A3...0001002A       33     675.0 MB   U GRD
-    211  00  YUQH67   0                        EOM
-  00001C02  0760FF80  2164028A...0001002A       33     650.0 MB   U GRD
-    211  00  YUQH65   0                        EOM
-  00001C02  0760FF80  21640271...0001002A       33     625.0 MB   U GRD
-    211  00  YUQH62   0                        EOM
-  00001C02  0760FF80  21640258...0001002A       33     600.0 MB   U GRD
-    211  00  YUQH60   0                        EOM
-  00001C02  0760FF80  2164023F...0001002A       33     575.0 MB   U GRD
-    211  00  YUQH57   0                        EOM
-  00001C02  0760FF80  21640226...0001002A       33     550.0 MB   U GRD
-    211  00  YUQH55   0                        EOM
-  00001C02  0760FF80  2164020D...0001002A       33     525.0 MB   U GRD
-    211  00  YUQH52   0                        EOM
-  00001C02  0760FF80  216401F4...0001002A       33     500.0 MB   U GRD         
-    211  00  YUQH50   0                        EOM
-  00001C02  0760FF80  216401C2...0001002A       33     450.0 MB   U GRD
-    211  00  YUQH45   0                        EOM
-  00001C02  0760FF80  21640190...0001002A       33     400.0 MB   U GRD         
-    211  00  YUQH40   0                        EOM
-  00001C02  0760FF80  2164015E...0001002A       33     350.0 MB   U GRD
-    211  00  YUQH35   0                        EOM
-  00001C02  0760FF80  2164012C...0001002A       33     300.0 MB   U GRD         
-    211  00  YUQH30   0                        EOM
-  00001C02  0760FF80  216400FA...0001002A       33     250.0 MB   U GRD         
-    211  00  YUQH25   0                        EOM
-  00001C02  0760FF80  216400C8...0001002A       33     200.0 MB   U GRD         
-    211  00  YUQH20   0                        EOM
-  00001C02  0760FF80  21640096...0001002A       33     150.0 MB   U GRD         
-    211  00  YUQH15   0                        EOM
-  00001C02  0760FF80  21640064...0001002A       33     100.0 MB   U GRD         
-    211  00  YUQH10   0                        EOM
-  00001C02  0760FF80  226403E8...0001002A       34    1000.0 MB   V GRD
-    211  00  YVQH99   0                        EOM
-  00001C02  0760FF80  226403CF...0001002A       34     975.0 MB   V GRD
-    211  00  YVQH93   0                        EOM
-  00001C02  0760FF80  226403B6...0001002A       34     950.0 MB   V GRD
-    211  00  YVQH95   0                        EOM
-  00001C02  0760FF80  2264039D...0001002A       34     925.0 MB   V GRD
-    211  00  YVQH92   0                        EOM
-  00001C02  0760FF80  22640384...0001002A       34     900.0 MB   V GRD
-    211  00  YVQH90   0                        EOM
-  00001C02  0760FF80  2264036B...0001002A       34     875.0 MB   V GRD
-    211  00  YVQH91   0                        EOM
-  00001C02  0760FF80  22640352...0001002A       34     850.0 MB   V GRD         
-    211  00  YVQH85   0                        EOM
-  00001C02  0760FF80  22640339...0001002A       34     825.0 MB   V GRD
-    211  00  YVQH82   0                        EOM
-  00001C02  0760FF80  22640320...0001002A       34     800.0 MB   V GRD
-    211  00  YVQH80   0                        EOM
-  00001C02  0760FF80  22640307...0001002A       34     775.0 MB   V GRD
-    211  00  YVQH77   0                        EOM
-  00001C02  0760FF80  226402EE...0001002A       34     750.0 MB   V GRD
-    211  00  YVQH75   0                        EOM
-  00001C02  0760FF80  226402D5...0001002A       34     725.0 MB   V GRD
-    211  00  YVQH72   0                        EOM
-  00001C02  0760FF80  226402BC...0001002A       34     700.0 MB   V GRD         
-    211  00  YVQH70   0                        EOM
-  00001C02  0760FF80  226402A3...0001002A       34     675.0 MB   V GRD
-    211  00  YVQH67   0                        EOM
-  00001C02  0760FF80  2264028A...0001002A       34     650.0 MB   V GRD
-    211  00  YVQH65   0                        EOM
-  00001C02  0760FF80  22640271...0001002A       34     625.0 MB   V GRD
-    211  00  YVQH62   0                        EOM
-  00001C02  0760FF80  22640258...0001002A       34     600.0 MB   V GRD
-    211  00  YVQH60   0                        EOM
-  00001C02  0760FF80  2264023F...0001002A       34     575.0 MB   V GRD
-    211  00  YVQH57   0                        EOM
-  00001C02  0760FF80  22640226...0001002A       34     550.0 MB   V GRD
-    211  00  YVQH55   0                        EOM
-  00001C02  0760FF80  2264020D...0001002A       34     525.0 MB   V GRD
-    211  00  YVQH52   0                        EOM
-  00001C02  0760FF80  226401F4...0001002A       34     500.0 MB   V GRD         
-    211  00  YVQH50   0                        EOM
-  00001C02  0760FF80  226401C2...0001002A       34     450.0 MB   V GRD
-    211  00  YVQH45   0                        EOM
-  00001C02  0760FF80  22640190...0001002A       34     400.0 MB   V GRD         
-    211  00  YVQH40   0                        EOM
-  00001C02  0760FF80  2264015E...0001002A       34     350.0 MB   V GRD
-    211  00  YVQH35   0                        EOM
-  00001C02  0760FF80  2264012C...0001002A       34     300.0 MB   V GRD         
-    211  00  YVQH30   0                        EOM
-  00001C02  0760FF80  226400FA...0001002A       34     250.0 MB   V GRD         
-    211  00  YVQH25   0                        EOM
-  00001C02  0760FF80  226400C8...0001002A       34     200.0 MB   V GRD         
-    211  00  YVQH20   0                        EOM
-  00001C02  0760FF80  22640096...0001002A       34     150.0 MB   V GRD         
-    211  00  YVQH15   0                        EOM
-  00001C02  0760FF80  22640064...0001002A       34     100.0 MB   V GRD         
-    211  00  YVQH10   0                        EOM
-  00001C02  0760FF80  02660000...0001002A       02           MSL  PRMSL         
-    211  00  YPQH89   0                        EOM
-  00001C02  0760FF80  346403E8...0001002A       52    1000.0 MB   R H
-    211  00  YRQH99   0                        EOM
-  00001C02  0760FF80  346403CF...0001002A       52     975.0 MB   R H
-    211  00  YRQH93   0                        EOM
-  00001C02  0760FF80  346403B6...0001002A       52     950.0 MB   R H
-    211  00  YRQH95   0                        EOM
-  00001C02  0760FF80  3464039D...0001002A       52     925.0 MB   R H
-    211  00  YRQH92   0                        EOM
-  00001C02  0760FF80  34640384...0001002A       52     900.0 MB   R H
-    211  00  YRQH90   0                        EOM
-  00001C02  0760FF80  3464036B...0001002A       52     875.0 MB   R H
-    211  00  YRQH91   0                        EOM
-  00001C02  0760FF80  34640352...0001002A       52     850.0 MB   R H           
-    211  00  YRQH85   0                        EOM
-  00001C02  0760FF80  34640339...0001002A       52     825.0 MB   R H
-    211  00  YRQH82   0                        EOM
-  00001C02  0760FF80  34640320...0001002A       52     800.0 MB   R H
-    211  00  YRQH80   0                        EOM
-  00001C02  0760FF80  34640307...0001002A       52     775.0 MB   R H
-    211  00  YRQH77   0                        EOM
-  00001C02  0760FF80  346402EE...0001002A       52     750.0 MB   R H
-    211  00  YRQH75   0                        EOM
-  00001C02  0760FF80  346402D5...0001002A       52     725.0 MB   R H
-    211  00  YRQH72   0                        EOM
-  00001C02  0760FF80  346402BC...0001002A       52     700.0 MB   R H           
-    211  00  YRQH70   0                        EOM
-  00001C02  0760FF80  346402A3...0001002A       52     675.0 MB   R H
-    211  00  YRQH67   0                        EOM
-  00001C02  0760FF80  3464028A...0001002A       52     650.0 MB   R H
-    211  00  YRQH65   0                        EOM
-  00001C02  0760FF80  34640271...0001002A       52     625.0 MB   R H
-    211  00  YRQH62   0                        EOM
-  00001C02  0760FF80  34640258...0001002A       52     600.0 MB   R H
-    211  00  YRQH60   0                        EOM
-  00001C02  0760FF80  3464023F...0001002A       52     575.0 MB   R H
-    211  00  YRQH57   0                        EOM
-  00001C02  0760FF80  34640226...0001002A       52     550.0 MB   R H
-    211  00  YRQH55   0                        EOM
-  00001C02  0760FF80  3464020D...0001002A       52     525.0 MB   R H
-    211  00  YRQH52   0                        EOM
-  00001C02  0760FF80  346401F4...0001002A       52     500.0 MB   R H           
-    211  00  YRQH50   0                        EOM
-  00001C02  0760FF80  346401C2...0001002A       52     450.0 MB   R H
-    211  00  YRQH45   0                        EOM
-  00001C02  0760FF80  34640190...0001002A       52     400.0 MB   R H           
-    211  00  YRQH40   0                        EOM
-  00001C02  0760FF80  3464015E...0001002A       52     350.0 MB   R H
-    211  00  YRQH35   0                        EOM
-  00001C02  0760FF80  3464012C...0001002A       52     300.0 MB   R H           
-    211  00  YRQH30   0                        EOM
-  00001C02  0760FF80  346400FA...0001002A       52     250.0 MB   R H
-    211  00  YRQH25   0                        EOM
-  00001C02  0760FF80  346400C8...0001002A       52     200.0 MB   R H
-    211  00  YRQH20   0                        EOM
-  00001C02  0760FF80  34640096...0001002A       52     150.0 MB   R H
-    211  00  YRQH15   0                        EOM
-  00001C02  0760FF80  34640064...0001002A       52     100.0 MB   R H
-    211  00  YRQH10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001002A       11    1000.0 MB   TMP
-    211  00  YTQH99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001002A       11     975.0 MB   TMP
-    211  00  YTQH93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001002A       11     950.0 MB   TMP
-    211  00  YTQH95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001002A       11     925.0 MB   TMP
-    211  00  YTQH92   0                        EOM
-  00001C02  0760FF80  0B640384...0001002A       11     900.0 MB   TMP
-    211  00  YTQH90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001002A       11     875.0 MB   TMP
-    211  00  YTQH91   0                        EOM
-  00001C02  0760FF80  0B640352...0001002A       11     850.0 MB   TMP           
-    211  00  YTQH85   0                        EOM
-  00001C02  0760FF80  0B640339...0001002A       11     825.0 MB   TMP
-    211  00  YTQH82   0                        EOM
-  00001C02  0760FF80  0B640320...0001002A       11     800.0 MB   TMP
-    211  00  YTQH80   0                        EOM
-  00001C02  0760FF80  0B640307...0001002A       11     775.0 MB   TMP
-    211  00  YTQH77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001002A       11     750.0 MB   TMP
-    211  00  YTQH75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001002A       11     725.0 MB   TMP
-    211  00  YTQH72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001002A       11     700.0 MB   TMP           
-    211  00  YTQH70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001002A       11     675.0 MB   TMP
-    211  00  YTQH67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001002A       11     650.0 MB   TMP
-    211  00  YTQH65   0                        EOM
-  00001C02  0760FF80  0B640271...0001002A       11     625.0 MB   TMP
-    211  00  YTQH62   0                        EOM
-  00001C02  0760FF80  0B640258...0001002A       11     600.0 MB   TMP
-    211  00  YTQH60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001002A       11     575.0 MB   TMP
-    211  00  YTQH57   0                        EOM
-  00001C02  0760FF80  0B640226...0001002A       11     550.0 MB   TMP
-    211  00  YTQH55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001002A       11     525.0 MB   TMP
-    211  00  YTQH52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001002A       11     500.0 MB   TMP           
-    211  00  YTQH50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001002A       11     450.0 MB   TMP
-    211  00  YTQH45   0                        EOM
-  00001C02  0760FF80  0B640190...0001002A       11     400.0 MB   TMP           
-    211  00  YTQH40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001002A       11     350.0 MB   TMP
-    211  00  YTQH35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001002A       11     300.0 MB   TMP           
-    211  00  YTQH30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001002A       11     250.0 MB   TMP           
-    211  00  YTQH25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001002A       11     200.0 MB   TMP           
-    211  00  YTQH20   0                        EOM
-  00001C02  0760FF80  0B640096...0001002A       11     150.0 MB   TMP           
-    211  00  YTQH15   0                        EOM
-  00001C02  0760FF80  0B640064...0001002A       11     100.0 MB   TMP           
-    211  00  YTQH10   0                        EOM
-  00001C02  0760FF80  28640352...0001002A       40     850.0 MB  DZDT           
-    211  00  YOQH85   0                        EOM
-  00001C02  0760FF80  286402BC...0001002A       40     700.0 MB  DZDT           
-    211  00  YOQH70   0                        EOM
-  00001C02  0760FF80  286401F4...0001002A       40     500.0 MB  DZDT           
-    211  00  YOQH50   0                        EOM
-  00001C02  0760FF80  28640190...0001002A       40     400.0 MB  DZDT           
-    211  00  YOQH40   0                        EOM
-  00001C02  0760FF80  2864012C...0001002A       40     300.0 MB  DZDT           
-    211  00  YOQH30   0                        EOM
-  00001C02  0760FF80  286400FA...0001002A       40     250.0 MB  DZDT           
-    211  00  YOQH25   0                        EOM
-  00001C02  0760FF80  286400C8...0001002A       40     200.0 MB  DZDT           
-    211  00  YOQH20   0                        EOM
-  00001C02  0760FF80  28640096...0001002A       40     150.0 MB  DZDT           
-    211  00  YOQH15   0                        EOM
-  00001C02  0760FF80  28640064...0001002A       40     100.0 MB  DZDT           
-    211  00  YOQH10   0                        EOM
-  00001C02  0760FF80  01010000...0001002A       01          SFC  PRES           
-    211  00  YPQH98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001002A       52        44/100  R H           
-    211  00  YRQH00   0                        EOM
-  00001C02  0760FF80  36C80000...0001002A       54          EATM  P WAT         
-    211  00  YFQH00   0                        EOM
-  00001C02  0760FF80  0B690002...0001002A       11          2m/SFC TMP         
-    211  00  YTQH98   0                        EOM
-  00001C02  0760FF80  34741E00...0001002A       52      BNDRY/SPD  R H          
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  0B070000...0001002A       11            TRO TMP           
-    211  00  YTQH97   0                        EOM
-  00001C02  0760FF80  01070000...0001002A       01            TRO PRES          
-    211  00  YPQH97   0                        EOM
-  00001C02  0760FF80  21741E00...0001002A       33           SPD  U GRD         
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  22741E00...0001002A       34           SPD  V GRD         
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  21070000...0001002A       33            TRO U GRD         
-    211  00  YUQH97   0                        EOM
-  00001C02  0760FF80  22070000...0001002A       34            TRO V GRD         
-    211  00  YVQH97   0                        EOM
-  00001C02  0760FF80  88070000...0001002A      136            TRO VW SH         
-    211  00  YBQH97   0                        EOM
-  00001C02  0760FF80  3D010000...0001002A       61            SFC A PCP         
-    211  00  YEQH98   0                        EOM
-  00001C02  0760FF80  83010000...0001002A      131            SFC LFT X         
-    211  00  YXQH98   0                        EOM
-  00001C02  0760FF80  296402BC...0001002A       41    700.0 MB    ABS V         
-    211  00  YCQH70   0                        EOM
-  00001C02  0760FF80  296401F4...0001002A       41    500.0 MB    ABS V         
-    211  00  YCQH50   0                        EOM
-  00001C02  0760FF80  9D010000...0001002A      157          SFC   CAPE
-    211  00  YWQH98   0                        EOM
-  00001C02  0760FF80  9C010000...0001002A      156          SFC   CIN
-    211  00  YYQH98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001002A      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQH86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001002A      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQH86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001002A       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001002A       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001002A       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001002A       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  0B749678...0001002A       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001002A       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQH86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001002A       52   60 SPDY  30 SPDY  R H
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001002A       52   90 SPDY  60 SPDY  R H
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  3474785A...0001002A       52  120 SPDY  90 SPDY  R H
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  34749678...0001002A       52  150 SPDY 120 SPDY  R H
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  3474B496...0001002A       52  180 SPDY 150 SPDY  R H
-    211  00  YRQH86   0                        EOM
-  00001C02  0760FF80  21741E00...0001002A       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001002A       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001002A       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  2174785A...0001002A       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  21749678...0001002A       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  2174B496...0001002A       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQH86   0                        EOM
-  00001C02  0760FF80  22741E00...0001002A       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001002A       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001002A       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  2274785A...0001002A       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  22749678...0001002A       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  2274B496...0001002A       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQH86   0                        EOM
-  00001C02  0760FF80  0B690002...0001002A       11    2  HTGL     TMP
-    211  00  YTQH98   0                        EOM
-  00001C02  0760FF80  34690002...0001002A       52    2  HTGL     R H
-    211  00  YRQH98   0                        EOM
-  00001C02  0760FF80  2169000A...0001002A       33   10  HTGL     U GRD
-    211  00  YUQH98   0                        EOM
-  00001C02  0760FF80  2269000A...0001002A       34   10  HTGL     V GRD
-    211  00  YVQH98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs048.211 b/parm/wmo/grib_awpgfs048.211
deleted file mode 100755
index 572a22d38f..0000000000
--- a/parm/wmo/grib_awpgfs048.211
+++ /dev/null
@@ -1,405 +0,0 @@
-  00001C02  0760FF80  076403E8...00010030       07    1000.0 MB   HGT           
-    211  00  YHQI99   0                        EOM
-  00001C02  0760FF80  076403CF...00010030       07     975.0 MB   HGT
-    211  00  YHQI93   0                        EOM
-  00001C02  0760FF80  076403B6...00010030       07     950.0 MB   HGT
-    211  00  YHQI95   0                        EOM
-  00001C02  0760FF80  0764039D...00010030       07     925.0 MB   HGT
-    211  00  YHQI92   0                        EOM
-  00001C02  0760FF80  07640384...00010030       07     900.0 MB   HGT
-    211  00  YHQI90   0                        EOM
-  00001C02  0760FF80  0764036B...00010030       07     875.0 MB   HGT
-    211  00  YHQI91   0                        EOM
-  00001C02  0760FF80  07640352...00010030       07     850.0 MB   HGT           
-    211  00  YHQI85   0                        EOM
-  00001C02  0760FF80  07640339...00010030       07     825.0 MB   HGT
-    211  00  YHQI82   0                        EOM
-  00001C02  0760FF80  07640320...00010030       07     800.0 MB   HGT
-    211  00  YHQI80   0                        EOM
-  00001C02  0760FF80  07640307...00010030       07     775.0 MB   HGT
-    211  00  YHQI77   0                        EOM
-  00001C02  0760FF80  076402EE...00010030       07     750.0 MB   HGT
-    211  00  YHQI75   0                        EOM
-  00001C02  0760FF80  076402D5...00010030       07     725.0 MB   HGT
-    211  00  YHQI72   0                        EOM
-  00001C02  0760FF80  076402BC...00010030       07     700.0 MB   HGT           
-    211  00  YHQI70   0                        EOM
-  00001C02  0760FF80  076402A3...00010030       07     675.0 MB   HGT
-    211  00  YHQI67   0                        EOM
-  00001C02  0760FF80  0764028A...00010030       07     650.0 MB   HGT
-    211  00  YHQI65   0                        EOM
-  00001C02  0760FF80  07640271...00010030       07     625.0 MB   HGT
-    211  00  YHQI62   0                        EOM
-  00001C02  0760FF80  07640258...00010030       07     600.0 MB   HGT
-    211  00  YHQI60   0                        EOM
-  00001C02  0760FF80  0764023F...00010030       07     575.0 MB   HGT
-    211  00  YHQI57   0                        EOM
-  00001C02  0760FF80  07640226...00010030       07     550.0 MB   HGT
-    211  00  YHQI55   0                        EOM
-  00001C02  0760FF80  0764020D...00010030       07     525.0 MB   HGT
-    211  00  YHQI52   0                        EOM
-  00001C02  0760FF80  076401F4...00010030       07     500.0 MB   HGT           
-    211  00  YHQI50   0                        EOM
-  00001C02  0760FF80  076401C2...00010030       07     450.0 MB   HGT
-    211  00  YHQI45   0                        EOM
-  00001C02  0760FF80  07640190...00010030       07     400.0 MB   HGT           
-    211  00  YHQI40   0                        EOM
-  00001C02  0760FF80  0764015E...00010030       07     350.0 MB   HGT
-    211  00  YHQI35   0                        EOM
-  00001C02  0760FF80  0764012C...00010030       07     300.0 MB   HGT           
-    211  00  YHQI30   0                        EOM
-  00001C02  0760FF80  076400FA...00010030       07     250.0 MB   HGT           
-    211  00  YHQI25   0                        EOM
-  00001C02  0760FF80  076400C8...00010030       07     200.0 MB   HGT           
-    211  00  YHQI20   0                        EOM
-  00001C02  0760FF80  07640096...00010030       07     150.0 MB   HGT           
-    211  00  YHQI15   0                        EOM 
-  00001C02  0760FF80  07640064...00010030       07     100.0 MB   HGT           
-    211  00  YHQI10   0                        EOM
-  00001C02  0760FF80  216403E8...00010030       33    1000.0 MB   U GRD
-    211  00  YUQI99   0                        EOM
-  00001C02  0760FF80  216403CF...00010030       33     975.0 MB   U GRD
-    211  00  YUQI93   0                        EOM
-  00001C02  0760FF80  216403B6...00010030       33     950.0 MB   U GRD
-    211  00  YUQI95   0                        EOM
-  00001C02  0760FF80  2164039D...00010030       33     925.0 MB   U GRD
-    211  00  YUQI92   0                        EOM
-  00001C02  0760FF80  21640384...00010030       33     900.0 MB   U GRD
-    211  00  YUQI90   0                        EOM
-  00001C02  0760FF80  2164036B...00010030       33     875.0 MB   U GRD
-    211  00  YUQI91   0                        EOM
-  00001C02  0760FF80  21640352...00010030       33     850.0 MB   U GRD         
-    211  00  YUQI85   0                        EOM
-  00001C02  0760FF80  21640339...00010030       33     825.0 MB   U GRD
-    211  00  YUQI82   0                        EOM
-  00001C02  0760FF80  21640320...00010030       33     800.0 MB   U GRD
-    211  00  YUQI80   0                        EOM
-  00001C02  0760FF80  21640307...00010030       33     775.0 MB   U GRD
-    211  00  YUQI77   0                        EOM
-  00001C02  0760FF80  216402EE...00010030       33     750.0 MB   U GRD
-    211  00  YUQI75   0                        EOM
-  00001C02  0760FF80  216402D5...00010030       33     725.0 MB   U GRD
-    211  00  YUQI72   0                        EOM
-  00001C02  0760FF80  216402BC...00010030       33     700.0 MB   U GRD         
-    211  00  YUQI70   0                        EOM
-  00001C02  0760FF80  216402A3...00010030       33     675.0 MB   U GRD
-    211  00  YUQI67   0                        EOM
-  00001C02  0760FF80  2164028A...00010030       33     650.0 MB   U GRD
-    211  00  YUQI65   0                        EOM
-  00001C02  0760FF80  21640271...00010030       33     625.0 MB   U GRD
-    211  00  YUQI62   0                        EOM
-  00001C02  0760FF80  21640258...00010030       33     600.0 MB   U GRD
-    211  00  YUQI60   0                        EOM
-  00001C02  0760FF80  2164023F...00010030       33     575.0 MB   U GRD
-    211  00  YUQI57   0                        EOM
-  00001C02  0760FF80  21640226...00010030       33     550.0 MB   U GRD
-    211  00  YUQI55   0                        EOM
-  00001C02  0760FF80  2164020D...00010030       33     525.0 MB   U GRD
-    211  00  YUQI52   0                        EOM
-  00001C02  0760FF80  216401F4...00010030       33     500.0 MB   U GRD         
-    211  00  YUQI50   0                        EOM
-  00001C02  0760FF80  216401C2...00010030       33     450.0 MB   U GRD
-    211  00  YUQI45   0                        EOM
-  00001C02  0760FF80  21640190...00010030       33     400.0 MB   U GRD         
-    211  00  YUQI40   0                        EOM
-  00001C02  0760FF80  2164015E...00010030       33     350.0 MB   U GRD
-    211  00  YUQI35   0                        EOM
-  00001C02  0760FF80  2164012C...00010030       33     300.0 MB   U GRD         
-    211  00  YUQI30   0                        EOM
-  00001C02  0760FF80  216400FA...00010030       33     250.0 MB   U GRD         
-    211  00  YUQI25   0                        EOM
-  00001C02  0760FF80  216400C8...00010030       33     200.0 MB   U GRD         
-    211  00  YUQI20   0                        EOM
-  00001C02  0760FF80  21640096...00010030       33     150.0 MB   U GRD         
-    211  00  YUQI15   0                        EOM
-  00001C02  0760FF80  21640064...00010030       33     100.0 MB   U GRD         
-    211  00  YUQI10   0                        EOM
-  00001C02  0760FF80  226403E8...00010030       34    1000.0 MB   V GRD
-    211  00  YVQI99   0                        EOM
-  00001C02  0760FF80  226403CF...00010030       34     975.0 MB   V GRD
-    211  00  YVQI93   0                        EOM
-  00001C02  0760FF80  226403B6...00010030       34     950.0 MB   V GRD
-    211  00  YVQI95   0                        EOM
-  00001C02  0760FF80  2264039D...00010030       34     925.0 MB   V GRD
-    211  00  YVQI92   0                        EOM
-  00001C02  0760FF80  22640384...00010030       34     900.0 MB   V GRD
-    211  00  YVQI90   0                        EOM
-  00001C02  0760FF80  2264036B...00010030       34     875.0 MB   V GRD
-    211  00  YVQI91   0                        EOM
-  00001C02  0760FF80  22640352...00010030       34     850.0 MB   V GRD         
-    211  00  YVQI85   0                        EOM
-  00001C02  0760FF80  22640339...00010030       34     825.0 MB   V GRD
-    211  00  YVQI82   0                        EOM
-  00001C02  0760FF80  22640320...00010030       34     800.0 MB   V GRD
-    211  00  YVQI80   0                        EOM
-  00001C02  0760FF80  22640307...00010030       34     775.0 MB   V GRD
-    211  00  YVQI77   0                        EOM
-  00001C02  0760FF80  226402EE...00010030       34     750.0 MB   V GRD
-    211  00  YVQI75   0                        EOM
-  00001C02  0760FF80  226402D5...00010030       34     725.0 MB   V GRD
-    211  00  YVQI72   0                        EOM
-  00001C02  0760FF80  226402BC...00010030       34     700.0 MB   V GRD         
-    211  00  YVQI70   0                        EOM
-  00001C02  0760FF80  226402A3...00010030       34     675.0 MB   V GRD
-    211  00  YVQI67   0                        EOM
-  00001C02  0760FF80  2264028A...00010030       34     650.0 MB   V GRD
-    211  00  YVQI65   0                        EOM
-  00001C02  0760FF80  22640271...00010030       34     625.0 MB   V GRD
-    211  00  YVQI62   0                        EOM
-  00001C02  0760FF80  22640258...00010030       34     600.0 MB   V GRD
-    211  00  YVQI60   0                        EOM
-  00001C02  0760FF80  2264023F...00010030       34     575.0 MB   V GRD
-    211  00  YVQI57   0                        EOM
-  00001C02  0760FF80  22640226...00010030       34     550.0 MB   V GRD
-    211  00  YVQI55   0                        EOM
-  00001C02  0760FF80  2264020D...00010030       34     525.0 MB   V GRD
-    211  00  YVQI52   0                        EOM
-  00001C02  0760FF80  226401F4...00010030       34     500.0 MB   V GRD         
-    211  00  YVQI50   0                        EOM
-  00001C02  0760FF80  226401C2...00010030       34     450.0 MB   V GRD
-    211  00  YVQI45   0                        EOM
-  00001C02  0760FF80  22640190...00010030       34     400.0 MB   V GRD         
-    211  00  YVQI40   0                        EOM
-  00001C02  0760FF80  2264015E...00010030       34     350.0 MB   V GRD
-    211  00  YVQI35   0                        EOM
-  00001C02  0760FF80  2264012C...00010030       34     300.0 MB   V GRD         
-    211  00  YVQI30   0                        EOM
-  00001C02  0760FF80  226400FA...00010030       34     250.0 MB   V GRD         
-    211  00  YVQI25   0                        EOM
-  00001C02  0760FF80  226400C8...00010030       34     200.0 MB   V GRD         
-    211  00  YVQI20   0                        EOM
-  00001C02  0760FF80  22640096...00010030       34     150.0 MB   V GRD         
-    211  00  YVQI15   0                        EOM
-  00001C02  0760FF80  22640064...00010030       34     100.0 MB   V GRD         
-    211  00  YVQI10   0                        EOM
-  00001C02  0760FF80  02660000...00010030       02           MSL  PRMSL         
-    211  00  YPQI89   0                        EOM
-  00001C02  0760FF80  346403E8...00010030       52    1000.0 MB   R H
-    211  00  YRQI99   0                        EOM
-  00001C02  0760FF80  346403CF...00010030       52     975.0 MB   R H
-    211  00  YRQI93   0                        EOM
-  00001C02  0760FF80  346403B6...00010030       52     950.0 MB   R H
-    211  00  YRQI95   0                        EOM
-  00001C02  0760FF80  3464039D...00010030       52     925.0 MB   R H
-    211  00  YRQI92   0                        EOM
-  00001C02  0760FF80  34640384...00010030       52     900.0 MB   R H
-    211  00  YRQI90   0                        EOM
-  00001C02  0760FF80  3464036B...00010030       52     875.0 MB   R H
-    211  00  YRQI91   0                        EOM
-  00001C02  0760FF80  34640352...00010030       52     850.0 MB   R H           
-    211  00  YRQI85   0                        EOM
-  00001C02  0760FF80  34640339...00010030       52     825.0 MB   R H
-    211  00  YRQI82   0                        EOM
-  00001C02  0760FF80  34640320...00010030       52     800.0 MB   R H
-    211  00  YRQI80   0                        EOM
-  00001C02  0760FF80  34640307...00010030       52     775.0 MB   R H
-    211  00  YRQI77   0                        EOM
-  00001C02  0760FF80  346402EE...00010030       52     750.0 MB   R H
-    211  00  YRQI75   0                        EOM
-  00001C02  0760FF80  346402D5...00010030       52     725.0 MB   R H
-    211  00  YRQI72   0                        EOM
-  00001C02  0760FF80  346402BC...00010030       52     700.0 MB   R H           
-    211  00  YRQI70   0                        EOM
-  00001C02  0760FF80  346402A3...00010030       52     675.0 MB   R H
-    211  00  YRQI67   0                        EOM
-  00001C02  0760FF80  3464028A...00010030       52     650.0 MB   R H
-    211  00  YRQI65   0                        EOM
-  00001C02  0760FF80  34640271...00010030       52     625.0 MB   R H
-    211  00  YRQI62   0                        EOM
-  00001C02  0760FF80  34640258...00010030       52     600.0 MB   R H
-    211  00  YRQI60   0                        EOM
-  00001C02  0760FF80  3464023F...00010030       52     575.0 MB   R H
-    211  00  YRQI57   0                        EOM
-  00001C02  0760FF80  34640226...00010030       52     550.0 MB   R H
-    211  00  YRQI55   0                        EOM
-  00001C02  0760FF80  3464020D...00010030       52     525.0 MB   R H
-    211  00  YRQI52   0                        EOM
-  00001C02  0760FF80  346401F4...00010030       52     500.0 MB   R H           
-    211  00  YRQI50   0                        EOM
-  00001C02  0760FF80  346401C2...00010030       52     450.0 MB   R H
-    211  00  YRQI45   0                        EOM
-  00001C02  0760FF80  34640190...00010030       52     400.0 MB   R H           
-    211  00  YRQI40   0                        EOM
-  00001C02  0760FF80  3464015E...00010030       52     350.0 MB   R H
-    211  00  YRQI35   0                        EOM
-  00001C02  0760FF80  3464012C...00010030       52     300.0 MB   R H           
-    211  00  YRQI30   0                        EOM
-  00001C02  0760FF80  346400FA...00010030       52     250.0 MB   R H
-    211  00  YRQI25   0                        EOM
-  00001C02  0760FF80  346400C8...00010030       52     200.0 MB   R H
-    211  00  YRQI20   0                        EOM
-  00001C02  0760FF80  34640096...00010030       52     150.0 MB   R H
-    211  00  YRQI15   0                        EOM
-  00001C02  0760FF80  34640064...00010030       52     100.0 MB   R H
-    211  00  YRQI10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010030       11    1000.0 MB   TMP
-    211  00  YTQI99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010030       11     975.0 MB   TMP
-    211  00  YTQI93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010030       11     950.0 MB   TMP
-    211  00  YTQI95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010030       11     925.0 MB   TMP
-    211  00  YTQI92   0                        EOM
-  00001C02  0760FF80  0B640384...00010030       11     900.0 MB   TMP
-    211  00  YTQI90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010030       11     875.0 MB   TMP
-    211  00  YTQI91   0                        EOM
-  00001C02  0760FF80  0B640352...00010030       11     850.0 MB   TMP           
-    211  00  YTQI85   0                        EOM
-  00001C02  0760FF80  0B640339...00010030       11     825.0 MB   TMP
-    211  00  YTQI82   0                        EOM
-  00001C02  0760FF80  0B640320...00010030       11     800.0 MB   TMP
-    211  00  YTQI80   0                        EOM
-  00001C02  0760FF80  0B640307...00010030       11     775.0 MB   TMP
-    211  00  YTQI77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010030       11     750.0 MB   TMP
-    211  00  YTQI75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010030       11     725.0 MB   TMP
-    211  00  YTQI72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010030       11     700.0 MB   TMP           
-    211  00  YTQI70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010030       11     675.0 MB   TMP
-    211  00  YTQI67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010030       11     650.0 MB   TMP
-    211  00  YTQI65   0                        EOM
-  00001C02  0760FF80  0B640271...00010030       11     625.0 MB   TMP
-    211  00  YTQI62   0                        EOM
-  00001C02  0760FF80  0B640258...00010030       11     600.0 MB   TMP
-    211  00  YTQI60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010030       11     575.0 MB   TMP
-    211  00  YTQI57   0                        EOM
-  00001C02  0760FF80  0B640226...00010030       11     550.0 MB   TMP
-    211  00  YTQI55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010030       11     525.0 MB   TMP
-    211  00  YTQI52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010030       11     500.0 MB   TMP           
-    211  00  YTQI50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010030       11     450.0 MB   TMP
-    211  00  YTQI45   0                        EOM
-  00001C02  0760FF80  0B640190...00010030       11     400.0 MB   TMP           
-    211  00  YTQI40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010030       11     350.0 MB   TMP
-    211  00  YTQI35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010030       11     300.0 MB   TMP           
-    211  00  YTQI30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010030       11     250.0 MB   TMP           
-    211  00  YTQI25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010030       11     200.0 MB   TMP           
-    211  00  YTQI20   0                        EOM
-  00001C02  0760FF80  0B640096...00010030       11     150.0 MB   TMP           
-    211  00  YTQI15   0                        EOM
-  00001C02  0760FF80  0B640064...00010030       11     100.0 MB   TMP           
-    211  00  YTQI10   0                        EOM
-  00001C02  0760FF80  28640352...00010030       40     850.0 MB  DZDT           
-    211  00  YOQI85   0                        EOM
-  00001C02  0760FF80  286402BC...00010030       40     700.0 MB  DZDT           
-    211  00  YOQI70   0                        EOM
-  00001C02  0760FF80  286401F4...00010030       40     500.0 MB  DZDT           
-    211  00  YOQI50   0                        EOM
-  00001C02  0760FF80  28640190...00010030       40     400.0 MB  DZDT           
-    211  00  YOQI40   0                        EOM
-  00001C02  0760FF80  2864012C...00010030       40     300.0 MB  DZDT           
-    211  00  YOQI30   0                        EOM
-  00001C02  0760FF80  286400FA...00010030       40     250.0 MB  DZDT           
-    211  00  YOQI25   0                        EOM
-  00001C02  0760FF80  286400C8...00010030       40     200.0 MB  DZDT           
-    211  00  YOQI20   0                        EOM
-  00001C02  0760FF80  28640096...00010030       40     150.0 MB  DZDT           
-    211  00  YOQI15   0                        EOM
-  00001C02  0760FF80  28640064...00010030       40     100.0 MB  DZDT           
-    211  00  YOQI10   0                        EOM
-  00001C02  0760FF80  01010000...00010030       01          SFC  PRES           
-    211  00  YPQI98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010030       52        44/100  R H           
-    211  00  YRQI00   0                        EOM
-  00001C02  0760FF80  36C80000...00010030       54          EATM  P WAT         
-    211  00  YFQI00   0                        EOM
-  00001C02  0760FF80  0B690002...00010030       11          2m/SFC TMP         
-    211  00  YTQI98   0                        EOM
-  00001C02  0760FF80  34741E00...00010030       52      BNDRY/SPD  R H         
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  0B070000...00010030       11            TRO TMP           
-    211  00  YTQI97   0                        EOM
-  00001C02  0760FF80  01070000...00010030       01            TRO PRES          
-    211  00  YPQI97   0                        EOM
-  00001C02  0760FF80  21741E00...00010030       33           SPD  U GRD         
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  22741E00...00010030       34           SPD  V GRD         
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  21070000...00010030       33            TRO U GRD         
-    211  00  YUQI97   0                        EOM
-  00001C02  0760FF80  22070000...00010030       34            TRO V GRD         
-    211  00  YVQI97   0                        EOM
-  00001C02  0760FF80  88070000...00010030      136            TRO VW SH         
-    211  00  YBQI97   0                        EOM
-  00001C02  0760FF80  3D010000...00010030       61            SFC A PCP         
-    211  00  YEQI98   0                        EOM
-  00001C02  0760FF80  83010000...00010030      131            SFC LFT X         
-    211  00  YXQI98   0                        EOM
-  00001C02  0760FF80  296402BC...00010030       41    700.0 MB    ABS V         
-    211  00  YCQI70   0                        EOM
-  00001C02  0760FF80  296401F4...00010030       41    500.0 MB    ABS V         
-    211  00  YCQI50   0                        EOM
-  00001C02  0760FF80  9D010000...00010030      157          SFC   CAPE
-    211  00  YWQI98   0                        EOM
-  00001C02  0760FF80  9C010000...00010030      156          SFC   CIN
-    211  00  YYQI98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010030      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQI86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010030      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQI86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010030       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010030       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010030       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010030       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  0B749678...00010030       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010030       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQI86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010030       52   60 SPDY  30 SPDY  R H
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010030       52   90 SPDY  60 SPDY  R H
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  3474785A...00010030       52  120 SPDY  90 SPDY  R H
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  34749678...00010030       52  150 SPDY 120 SPDY  R H
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  3474B496...00010030       52  180 SPDY 150 SPDY  R H
-    211  00  YRQI86   0                        EOM
-  00001C02  0760FF80  21741E00...00010030       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010030       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010030       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  2174785A...00010030       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  21749678...00010030       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  2174B496...00010030       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQI86   0                        EOM
-  00001C02  0760FF80  22741E00...00010030       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010030       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010030       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  2274785A...00010030       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  22749678...00010030       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  2274B496...00010030       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQI86   0                        EOM
-  00001C02  0760FF80  0B690002...00010030       11    2  HTGL     TMP
-    211  00  YTQI98   0                        EOM
-  00001C02  0760FF80  34690002...00010030       52    2  HTGL     R H
-    211  00  YRQI98   0                        EOM
-  00001C02  0760FF80  2169000A...00010030       33   10  HTGL     U GRD
-    211  00  YUQI98   0                        EOM
-  00001C02  0760FF80  2269000A...00010030       34   10  HTGL     V GRD
-    211  00  YVQI98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs054.211 b/parm/wmo/grib_awpgfs054.211
deleted file mode 100755
index dffb5189ac..0000000000
--- a/parm/wmo/grib_awpgfs054.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010036       07    1000.0 MB   HGT           
-    211  00  ZHQM99   0                        EOM
-  00001C02  0760FF80  076403CF...00010036       07     975.0 MB   HGT
-    211  00  ZHQM93   0                        EOM
-  00001C02  0760FF80  076403B6...00010036       07     950.0 MB   HGT
-    211  00  ZHQM95   0                        EOM
-  00001C02  0760FF80  0764039D...00010036       07     925.0 MB   HGT
-    211  00  ZHQM92   0                        EOM
-  00001C02  0760FF80  07640384...00010036       07     900.0 MB   HGT
-    211  00  ZHQM90   0                        EOM
-  00001C02  0760FF80  0764036B...00010036       07     875.0 MB   HGT
-    211  00  ZHQM91   0                        EOM
-  00001C02  0760FF80  07640352...00010036       07     850.0 MB   HGT           
-    211  00  ZHQM85   0                        EOM
-  00001C02  0760FF80  07640339...00010036       07     825.0 MB   HGT
-    211  00  ZHQM82   0                        EOM
-  00001C02  0760FF80  07640320...00010036       07     800.0 MB   HGT
-    211  00  ZHQM80   0                        EOM
-  00001C02  0760FF80  07640307...00010036       07     775.0 MB   HGT
-    211  00  ZHQM77   0                        EOM
-  00001C02  0760FF80  076402EE...00010036       07     750.0 MB   HGT
-    211  00  ZHQM75   0                        EOM
-  00001C02  0760FF80  076402D5...00010036       07     725.0 MB   HGT
-    211  00  ZHQM72   0                        EOM
-  00001C02  0760FF80  076402BC...00010036       07     700.0 MB   HGT           
-    211  00  ZHQM70   0                        EOM
-  00001C02  0760FF80  076402A3...00010036       07     675.0 MB   HGT
-    211  00  ZHQM67   0                        EOM
-  00001C02  0760FF80  0764028A...00010036       07     650.0 MB   HGT
-    211  00  ZHQM65   0                        EOM
-  00001C02  0760FF80  07640271...00010036       07     625.0 MB   HGT
-    211  00  ZHQM62   0                        EOM
-  00001C02  0760FF80  07640258...00010036       07     600.0 MB   HGT
-    211  00  ZHQM60   0                        EOM
-  00001C02  0760FF80  0764023F...00010036       07     575.0 MB   HGT
-    211  00  ZHQM57   0                        EOM
-  00001C02  0760FF80  07640226...00010036       07     550.0 MB   HGT
-    211  00  ZHQM55   0                        EOM
-  00001C02  0760FF80  0764020D...00010036       07     525.0 MB   HGT
-    211  00  ZHQM52   0                        EOM
-  00001C02  0760FF80  076401F4...00010036       07     500.0 MB   HGT           
-    211  00  ZHQM50   0                        EOM
-  00001C02  0760FF80  076401C2...00010036       07     450.0 MB   HGT
-    211  00  ZHQM45   0                        EOM
-  00001C02  0760FF80  07640190...00010036       07     400.0 MB   HGT           
-    211  00  ZHQM40   0                        EOM
-  00001C02  0760FF80  0764015E...00010036       07     350.0 MB   HGT
-    211  00  ZHQM35   0                        EOM
-  00001C02  0760FF80  0764012C...00010036       07     300.0 MB   HGT           
-    211  00  ZHQM30   0                        EOM
-  00001C02  0760FF80  076400FA...00010036       07     250.0 MB   HGT           
-    211  00  ZHQM25   0                        EOM
-  00001C02  0760FF80  076400C8...00010036       07     200.0 MB   HGT           
-    211  00  ZHQM20   0                        EOM
-  00001C02  0760FF80  07640096...00010036       07     150.0 MB   HGT           
-    211  00  ZHQM15   0                        EOM
-  00001C02  0760FF80  07640064...00010036       07     100.0 MB   HGT           
-    211  00  ZHQM10   0                        EOM
-  00001C02  0760FF80  216403E8...00010036       33    1000.0 MB   U GRD
-    211  00  ZUQM99   0                        EOM
-  00001C02  0760FF80  216403CF...00010036       33     975.0 MB   U GRD
-    211  00  ZUQM93   0                        EOM
-  00001C02  0760FF80  216403B6...00010036       33     950.0 MB   U GRD
-    211  00  ZUQM95   0                        EOM
-  00001C02  0760FF80  2164039D...00010036       33     925.0 MB   U GRD
-    211  00  ZUQM92   0                        EOM
-  00001C02  0760FF80  21640384...00010036       33     900.0 MB   U GRD
-    211  00  ZUQM90   0                        EOM
-  00001C02  0760FF80  2164036B...00010036       33     875.0 MB   U GRD
-    211  00  ZUQM91   0                        EOM
-  00001C02  0760FF80  21640352...00010036       33     850.0 MB   U GRD         
-    211  00  ZUQM85   0                        EOM
-  00001C02  0760FF80  21640339...00010036       33     825.0 MB   U GRD
-    211  00  ZUQM82   0                        EOM
-  00001C02  0760FF80  21640320...00010036       33     800.0 MB   U GRD
-    211  00  ZUQM80   0                        EOM
-  00001C02  0760FF80  21640307...00010036       33     775.0 MB   U GRD
-    211  00  ZUQM77   0                        EOM
-  00001C02  0760FF80  216402EE...00010036       33     750.0 MB   U GRD
-    211  00  ZUQM75   0                        EOM
-  00001C02  0760FF80  216402D5...00010036       33     725.0 MB   U GRD
-    211  00  ZUQM72   0                        EOM
-  00001C02  0760FF80  216402BC...00010036       33     700.0 MB   U GRD         
-    211  00  ZUQM70   0                        EOM
-  00001C02  0760FF80  216402A3...00010036       33     675.0 MB   U GRD
-    211  00  ZUQM67   0                        EOM
-  00001C02  0760FF80  2164028A...00010036       33     650.0 MB   U GRD
-    211  00  ZUQM65   0                        EOM
-  00001C02  0760FF80  21640271...00010036       33     625.0 MB   U GRD
-    211  00  ZUQM62   0                        EOM
-  00001C02  0760FF80  21640258...00010036       33     600.0 MB   U GRD
-    211  00  ZUQM60   0                        EOM
-  00001C02  0760FF80  2164023F...00010036       33     575.0 MB   U GRD
-    211  00  ZUQM57   0                        EOM
-  00001C02  0760FF80  21640226...00010036       33     550.0 MB   U GRD
-    211  00  ZUQM55   0                        EOM
-  00001C02  0760FF80  2164020D...00010036       33     525.0 MB   U GRD
-    211  00  ZUQM52   0                        EOM
-  00001C02  0760FF80  216401F4...00010036       33     500.0 MB   U GRD         
-    211  00  ZUQM50   0                        EOM
-  00001C02  0760FF80  216401C2...00010036       33     450.0 MB   U GRD
-    211  00  ZUQM45   0                        EOM
-  00001C02  0760FF80  21640190...00010036       33     400.0 MB   U GRD         
-    211  00  ZUQM40   0                        EOM
-  00001C02  0760FF80  2164015E...00010036       33     350.0 MB   U GRD
-    211  00  ZUQM35   0                        EOM
-  00001C02  0760FF80  2164012C...00010036       33     300.0 MB   U GRD         
-    211  00  ZUQM30   0                        EOM
-  00001C02  0760FF80  216400FA...00010036       33     250.0 MB   U GRD         
-    211  00  ZUQM25   0                        EOM
-  00001C02  0760FF80  216400C8...00010036       33     200.0 MB   U GRD         
-    211  00  ZUQM20   0                        EOM
-  00001C02  0760FF80  21640096...00010036       33     150.0 MB   U GRD         
-    211  00  ZUQM15   0                        EOM
-  00001C02  0760FF80  21640064...00010036       33     100.0 MB   U GRD         
-    211  00  ZUQM10   0                        EOM
-  00001C02  0760FF80  226403E8...00010036       34    1000.0 MB   V GRD
-    211  00  ZVQM99   0                        EOM
-  00001C02  0760FF80  226403CF...00010036       34     975.0 MB   V GRD
-    211  00  ZVQM93   0                        EOM
-  00001C02  0760FF80  226403B6...00010036       34     950.0 MB   V GRD
-    211  00  ZVQM95   0                        EOM
-  00001C02  0760FF80  2264039D...00010036       34     925.0 MB   V GRD
-    211  00  ZVQM92   0                        EOM
-  00001C02  0760FF80  22640384...00010036       34     900.0 MB   V GRD
-    211  00  ZVQM90   0                        EOM
-  00001C02  0760FF80  2264036B...00010036       34     875.0 MB   V GRD
-    211  00  ZVQM91   0                        EOM
-  00001C02  0760FF80  22640352...00010036       34     850.0 MB   V GRD         
-    211  00  ZVQM85   0                        EOM
-  00001C02  0760FF80  22640339...00010036       34     825.0 MB   V GRD
-    211  00  ZVQM82   0                        EOM
-  00001C02  0760FF80  22640320...00010036       34     800.0 MB   V GRD
-    211  00  ZVQM80   0                        EOM
-  00001C02  0760FF80  22640307...00010036       34     775.0 MB   V GRD
-    211  00  ZVQM77   0                        EOM
-  00001C02  0760FF80  226402EE...00010036       34     750.0 MB   V GRD
-    211  00  ZVQM75   0                        EOM
-  00001C02  0760FF80  226402D5...00010036       34     725.0 MB   V GRD
-    211  00  ZVQM72   0                        EOM
-  00001C02  0760FF80  226402BC...00010036       34     700.0 MB   V GRD         
-    211  00  ZVQM70   0                        EOM 
-  00001C02  0760FF80  226402A3...00010036       34     675.0 MB   V GRD
-    211  00  ZVQM67   0                        EOM
-  00001C02  0760FF80  2264028A...00010036       34     650.0 MB   V GRD
-    211  00  ZVQM65   0                        EOM
-  00001C02  0760FF80  22640271...00010036       34     625.0 MB   V GRD
-    211  00  ZVQM62   0                        EOM
-  00001C02  0760FF80  22640258...00010036       34     600.0 MB   V GRD
-    211  00  ZVQM60   0                        EOM
-  00001C02  0760FF80  2264023F...00010036       34     575.0 MB   V GRD
-    211  00  ZVQM57   0                        EOM
-  00001C02  0760FF80  22640226...00010036       34     550.0 MB   V GRD
-    211  00  ZVQM55   0                        EOM
-  00001C02  0760FF80  2264020D...00010036       34     525.0 MB   V GRD
-    211  00  ZVQM52   0                        EOM
-  00001C02  0760FF80  226401F4...00010036       34     500.0 MB   V GRD         
-    211  00  ZVQM50   0                        EOM
-  00001C02  0760FF80  226401C2...00010036       34     450.0 MB   V GRD
-    211  00  ZVQM45   0                        EOM
-  00001C02  0760FF80  22640190...00010036       34     400.0 MB   V GRD         
-    211  00  ZVQM40   0                        EOM
-  00001C02  0760FF80  2264015E...00010036       34     350.0 MB   V GRD
-    211  00  ZVQM35   0                        EOM
-  00001C02  0760FF80  2264012C...00010036       34     300.0 MB   V GRD         
-    211  00  ZVQM30   0                        EOM
-  00001C02  0760FF80  226400FA...00010036       34     250.0 MB   V GRD         
-    211  00  ZVQM25   0                        EOM
-  00001C02  0760FF80  226400C8...00010036       34     200.0 MB   V GRD         
-    211  00  ZVQM20   0                        EOM
-  00001C02  0760FF80  22640096...00010036       34     150.0 MB   V GRD         
-    211  00  ZVQM15   0                        EOM
-  00001C02  0760FF80  22640064...00010036       34     100.0 MB   V GRD         
-    211  00  ZVQM10   0                        EOM
-  00001C02  0760FF80  02660000...00010036       02           MSL  PRMSL         
-    211  00  ZPQM89   0                        EOM
-  00001C02  0760FF80  346403E8...00010036       52    1000.0 MB   R H
-    211  00  ZRQM99   0                        EOM
-  00001C02  0760FF80  346403CF...00010036       52     975.0 MB   R H
-    211  00  ZRQM93   0                        EOM
-  00001C02  0760FF80  346403B6...00010036       52     950.0 MB   R H
-    211  00  ZRQM95   0                        EOM
-  00001C02  0760FF80  3464039D...00010036       52     925.0 MB   R H
-    211  00  ZRQM92   0                        EOM
-  00001C02  0760FF80  34640384...00010036       52     900.0 MB   R H
-    211  00  ZRQM90   0                        EOM
-  00001C02  0760FF80  3464036B...00010036       52     875.0 MB   R H
-    211  00  ZRQM91   0                        EOM
-  00001C02  0760FF80  34640352...00010036       52     850.0 MB   R H           
-    211  00  ZRQM85   0                        EOM
-  00001C02  0760FF80  34640339...00010036       52     825.0 MB   R H
-    211  00  ZRQM82   0                        EOM
-  00001C02  0760FF80  34640320...00010036       52     800.0 MB   R H
-    211  00  ZRQM80   0                        EOM
-  00001C02  0760FF80  34640307...00010036       52     775.0 MB   R H
-    211  00  ZRQM77   0                        EOM
-  00001C02  0760FF80  346402EE...00010036       52     750.0 MB   R H
-    211  00  ZRQM75   0                        EOM
-  00001C02  0760FF80  346402D5...00010036       52     725.0 MB   R H
-    211  00  ZRQM72   0                        EOM
-  00001C02  0760FF80  346402BC...00010036      52      700.0 MB   R H           
-    211  00  ZRQM70   0                        EOM
-  00001C02  0760FF80  346402A3...00010036       52     675.0 MB   R H
-    211  00  ZRQM67   0                        EOM
-  00001C02  0760FF80  3464028A...00010036       52     650.0 MB   R H
-    211  00  ZRQM65   0                        EOM
-  00001C02  0760FF80  34640271...00010036       52     625.0 MB   R H
-    211  00  ZRQM62   0                        EOM
-  00001C02  0760FF80  34640258...00010036       52     600.0 MB   R H
-    211  00  ZRQM60   0                        EOM
-  00001C02  0760FF80  3464023F...00010036       52     575.0 MB   R H
-    211  00  ZRQM57   0                        EOM
-  00001C02  0760FF80  34640226...00010036       52     550.0 MB   R H
-    211  00  ZRQM55   0                        EOM
-  00001C02  0760FF80  3464020D...00010036       52     525.0 MB   R H
-    211  00  ZRQM52   0                        EOM
-  00001C02  0760FF80  346401F4...00010036       52     500.0 MB   R H           
-    211  00  ZRQM50   0                        EOM
-  00001C02  0760FF80  346401C2...00010036       52     450.0 MB   R H
-    211  00  ZRQM45   0                        EOM
-  00001C02  0760FF80  34640190...00010036       52     400.0 MB   R H           
-    211  00  ZRQM40   0                        EOM
-  00001C02  0760FF80  3464015E...00010036       52     350.0 MB   R H
-    211  00  ZRQM35   0                        EOM
-  00001C02  0760FF80  3464012C...00010036       52     300.0 MB   R H           
-    211  00  ZRQM30   0                        EOM
-  00001C02  0760FF80  346400FA...00010036       52     250.0 MB   R H
-    211  00  ZRQM25   0                        EOM
-  00001C02  0760FF80  346400C8...00010036       52     200.0 MB   R H
-    211  00  ZRQM20   0                        EOM
-  00001C02  0760FF80  34640096...00010036       52     150.0 MB   R H
-    211  00  ZRQM15   0                        EOM
-  00001C02  0760FF80  34640064...00010036       52     100.0 MB   R H
-    211  00  ZRQM10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010036       11    1000.0 MB   TMP
-    211  00  ZTQM99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010036       11     975.0 MB   TMP
-    211  00  ZTQM93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010036       11     950.0 MB   TMP
-    211  00  ZTQM95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010036       11     925.0 MB   TMP
-    211  00  ZTQM92   0                        EOM
-  00001C02  0760FF80  0B640384...00010036       11     900.0 MB   TMP
-    211  00  ZTQM90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010036       11     875.0 MB   TMP
-    211  00  ZTQM91   0                        EOM
-  00001C02  0760FF80  0B640352...00010036       11     850.0 MB   TMP           
-    211  00  ZTQM85   0                        EOM
-  00001C02  0760FF80  0B640339...00010036       11     825.0 MB   TMP
-    211  00  ZTQM82   0                        EOM
-  00001C02  0760FF80  0B640320...00010036       11     800.0 MB   TMP
-    211  00  ZTQM80   0                        EOM
-  00001C02  0760FF80  0B640307...00010036       11     775.0 MB   TMP
-    211  00  ZTQM77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010036       11     750.0 MB   TMP
-    211  00  ZTQM75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010036       11     725.0 MB   TMP
-    211  00  ZTQM72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010036       11     700.0 MB   TMP           
-    211  00  ZTQM70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010036       11     675.0 MB   TMP
-    211  00  ZTQM67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010036       11     650.0 MB   TMP
-    211  00  ZTQM65   0                        EOM
-  00001C02  0760FF80  0B640271...00010036       11     625.0 MB   TMP
-    211  00  ZTQM62   0                        EOM
-  00001C02  0760FF80  0B640258...00010036       11     600.0 MB   TMP
-    211  00  ZTQM60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010036       11     575.0 MB   TMP
-    211  00  ZTQM57   0                        EOM
-  00001C02  0760FF80  0B640226...00010036       11     550.0 MB   TMP
-    211  00  ZTQM55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010036       11     525.0 MB   TMP
-    211  00  ZTQM52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010036       11     500.0 MB   TMP           
-    211  00  ZTQM50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010036       11     450.0 MB   TMP
-    211  00  ZTQM45   0                        EOM
-  00001C02  0760FF80  0B640190...00010036       11     400.0 MB   TMP           
-    211  00  ZTQM40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010036       11     350.0 MB   TMP
-    211  00  ZTQM35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010036       11     300.0 MB   TMP           
-    211  00  ZTQM30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010036       11     250.0 MB   TMP           
-    211  00  ZTQM25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010036       11     200.0 MB   TMP           
-    211  00  ZTQM20   0                        EOM
-  00001C02  0760FF80  0B640096...00010036       11     150.0 MB   TMP           
-    211  00  ZTQM15   0                        EOM
-  00001C02  0760FF80  0B640064...00010036       11     100.0 MB   TMP           
-    211  00  ZTQM10   0                        EOM
-  00001C02  0760FF80  28640352...00010036       40     850.0 MB  DZDT           
-    211  00  ZOQM85   0                        EOM
-  00001C02  0760FF80  286402BC...00010036       40     700.0 MB  DZDT           
-    211  00  ZOQM70   0                        EOM
-  00001C02  0760FF80  286401F4...00010036       40     500.0 MB  DZDT           
-    211  00  ZOQM50   0                        EOM
-  00001C02  0760FF80  28640190...00010036       40     400.0 MB  DZDT           
-    211  00  ZOQM40   0                        EOM
-  00001C02  0760FF80  2864012C...00010036       40     300.0 MB  DZDT           
-    211  00  ZOQM30   0                        EOM
-  00001C02  0760FF80  286400FA...00010036       40     250.0 MB  DZDT           
-    211  00  ZOQM25   0                        EOM
-  00001C02  0760FF80  286400C8...00010036       40     200.0 MB  DZDT           
-    211  00  ZOQM20   0                        EOM
-  00001C02  0760FF80  28640096...00010036       40     150.0 MB  DZDT           
-    211  00  ZOQM15   0                        EOM
-  00001C02  0760FF80  28640064...00010036       40     100.0 MB  DZDT           
-    211  00  ZOQM10   0                        EOM
-  00001C02  0760FF80  01010000...00010036       01          SFC  PRES           
-    211  00  ZPQM98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010036       52        44/100  R H           
-    211  00  ZRQM00   0                        EOM
-  00001C02  0760FF80  36C80000...00010036       54          EATM  P WAT         
-    211  00  ZFQM00   0                        EOM
-  00001C02  0760FF80  0B690002...00010036       11          2m/SFC TMP          
-    211  00  ZTQM98   0                        EOM
-  00001C02  0760FF80  34741E00...00010036       52     BNDRY/SPD  R H           
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  0B070000...00010036       11            TRO TMP           
-    211  00  ZTQM97   0                        EOM
-  00001C02  0760FF80  01070000...00010036       01            TRO PRES          
-    211  00  ZPQM97   0                        EOM
-  00001C02  0760FF80  21741E00...00010036       33           SPD  U GRD         
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  22741E00...00010036       34           SPD  V GRD         
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  21070000...00010036       33            TRO U GRD         
-    211  00  ZUQM97   0                        EOM
-  00001C02  0760FF80  22070000...00010036       34            TRO V GRD         
-    211  00  ZVQM97   0                        EOM
-  00001C02  0760FF80  88070000...00010036      136            TRO VW SH         
-    211  00  ZBQM97   0                        EOM
-  00001C02  0760FF80  3D010000...00010036       61            SFC A PCP         
-    211  00  ZEQM98   0                        EOM
-  00001C02  0760FF80  83010000...00010036      131            SFC LFT X         
-    211  00  ZXQM98   0                        EOM
-  00001C02  0760FF80  29640352...00010036       41    850.0 MB    ABS V         
-    211  00  ZCQM85   0                        EOM
-  00001C02  0760FF80  296402BC...00010036       41    700.0 MB    ABS V         
-    211  00  ZCQM70   0                        EOM
-  00001C02  0760FF80  296401F4...00010036       41    500.0 MB    ABS V         
-    211  00  ZCQM50   0                        EOM
-  00001C02  0760FF80  296400FA...00010036       41    250.0 MB    ABS V         
-    211  00  ZCQM25   0                        EOM
-  00001C02  0760FF80  9D010000...00010036      157          SFC   CAPE
-    211  00  ZWQM98   0                        EOM
-  00001C02  0760FF80  9C010000...00010036      156          SFC   CIN
-    211  00  ZYQM98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010036      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQM86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010036      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQM86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010036       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010036       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010036       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010036       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  0B749678...00010036       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010036       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQM86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010036       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010036       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  3474785A...00010036       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  34749678...00010036       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  3474B496...00010036       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQM86   0                        EOM
-  00001C02  0760FF80  21741E00...00010036       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010036       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010036       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  2174785A...00010036       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  21749678...00010036       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  2174B496...00010036       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQM86   0                        EOM
-  00001C02  0760FF80  22741E00...00010036       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010036       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010036       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  2274785A...00010036       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  22749678...00010036       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  2274B496...00010036       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQM86   0                        EOM
-  00001C02  0760FF80  0B690002...00010036       11    2  HTGL     TMP
-    211  00  ZTQM98   0                        EOM
-  00001C02  0760FF80  34690002...00010036       52    2  HTGL     R H
-    211  00  ZRQM98   0                        EOM
-  00001C02  0760FF80  2169000A...00010036       33   10  HTGL     U GRD
-    211  00  ZUQM98   0                        EOM
-  00001C02  0760FF80  2269000A...00010036       34   10  HTGL     V GRD
-    211  00  ZVQM98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs060.211 b/parm/wmo/grib_awpgfs060.211
deleted file mode 100755
index 966be10a5a..0000000000
--- a/parm/wmo/grib_awpgfs060.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...0001003C       07    1000.0 MB   HGT           
-    211  00  YHQJ99   0                        EOM
-  00001C02  0760FF80  076403CF...0001003C       07     975.0 MB   HGT
-    211  00  YHQJ93   0                        EOM
-  00001C02  0760FF80  076403B6...0001003C       07     950.0 MB   HGT
-    211  00  YHQJ95   0                        EOM
-  00001C02  0760FF80  0764039D...0001003C       07     925.0 MB   HGT
-    211  00  YHQJ92   0                        EOM
-  00001C02  0760FF80  07640384...0001003C       07     900.0 MB   HGT
-    211  00  YHQJ90   0                        EOM
-  00001C02  0760FF80  0764036B...0001003C       07     875.0 MB   HGT
-    211  00  YHQJ91   0                        EOM
-  00001C02  0760FF80  07640352...0001003C       07     850.0 MB   HGT           
-    211  00  YHQJ85   0                        EOM
-  00001C02  0760FF80  07640339...0001003C       07     825.0 MB   HGT
-    211  00  YHQJ82   0                        EOM
-  00001C02  0760FF80  07640320...0001003C       07     800.0 MB   HGT
-    211  00  YHQJ80   0                        EOM
-  00001C02  0760FF80  07640307...0001003C       07     775.0 MB   HGT
-    211  00  YHQJ77   0                        EOM
-  00001C02  0760FF80  076402EE...0001003C       07     750.0 MB   HGT
-    211  00  YHQJ75   0                        EOM
-  00001C02  0760FF80  076402D5...0001003C       07     725.0 MB   HGT
-    211  00  YHQJ72   0                        EOM
-  00001C02  0760FF80  076402BC...0001003C       07     700.0 MB   HGT           
-    211  00  YHQJ70   0                        EOM
-  00001C02  0760FF80  076402A3...0001003C       07     675.0 MB   HGT
-    211  00  YHQJ67   0                        EOM
-  00001C02  0760FF80  0764028A...0001003C       07     650.0 MB   HGT
-    211  00  YHQJ65   0                        EOM
-  00001C02  0760FF80  07640271...0001003C       07     625.0 MB   HGT
-    211  00  YHQJ62   0                        EOM
-  00001C02  0760FF80  07640258...0001003C       07     600.0 MB   HGT
-    211  00  YHQJ60   0                        EOM
-  00001C02  0760FF80  0764023F...0001003C       07     575.0 MB   HGT
-    211  00  YHQJ57   0                        EOM
-  00001C02  0760FF80  07640226...0001003C       07     550.0 MB   HGT
-    211  00  YHQJ55   0                        EOM
-  00001C02  0760FF80  0764020D...0001003C       07     525.0 MB   HGT
-    211  00  YHQJ52   0                        EOM
-  00001C02  0760FF80  076401F4...0001003C       07     500.0 MB   HGT           
-    211  00  YHQJ50   0                        EOM
-  00001C02  0760FF80  076401C2...0001003C       07     450.0 MB   HGT
-    211  00  YHQJ45   0                        EOM
-  00001C02  0760FF80  07640190...0001003C       07     400.0 MB   HGT           
-    211  00  YHQJ40   0                        EOM
-  00001C02  0760FF80  0764015E...0001003C       07     350.0 MB   HGT
-    211  00  YHQJ35   0                        EOM
-  00001C02  0760FF80  0764012C...0001003C       07     300.0 MB   HGT           
-    211  00  YHQJ30   0                        EOM
-  00001C02  0760FF80  076400FA...0001003C       07     250.0 MB   HGT           
-    211  00  YHQJ25   0                        EOM
-  00001C02  0760FF80  076400C8...0001003C       07     200.0 MB   HGT           
-    211  00  YHQJ20   0                        EOM
-  00001C02  0760FF80  07640096...0001003C       07     150.0 MB   HGT           
-    211  00  YHQJ15   0                        EOM
-  00001C02  0760FF80  07640064...0001003C       07     100.0 MB   HGT           
-    211  00  YHQJ10   0                        EOM
-  00001C02  0760FF80  216403E8...0001003C       33    1000.0 MB   U GRD
-    211  00  YUQJ99   0                        EOM
-  00001C02  0760FF80  216403CF...0001003C       33     975.0 MB   U GRD
-    211  00  YUQJ93   0                        EOM
-  00001C02  0760FF80  216403B6...0001003C       33     950.0 MB   U GRD
-    211  00  YUQJ95   0                        EOM
-  00001C02  0760FF80  2164039D...0001003C       33     925.0 MB   U GRD
-    211  00  YUQJ92   0                        EOM
-  00001C02  0760FF80  21640384...0001003C       33     900.0 MB   U GRD
-    211  00  YUQJ90   0                        EOM
-  00001C02  0760FF80  2164036B...0001003C       33     875.0 MB   U GRD
-    211  00  YUQJ91   0                        EOM
-  00001C02  0760FF80  21640352...0001003C       33     850.0 MB   U GRD         
-    211  00  YUQJ85   0                        EOM
-  00001C02  0760FF80  21640339...0001003C       33     825.0 MB   U GRD
-    211  00  YUQJ82   0                        EOM
-  00001C02  0760FF80  21640320...0001003C       33     800.0 MB   U GRD
-    211  00  YUQJ80   0                        EOM
-  00001C02  0760FF80  21640307...0001003C       33     775.0 MB   U GRD
-    211  00  YUQJ77   0                        EOM
-  00001C02  0760FF80  216402EE...0001003C       33     750.0 MB   U GRD
-    211  00  YUQJ75   0                        EOM
-  00001C02  0760FF80  216402D5...0001003C       33     725.0 MB   U GRD
-    211  00  YUQJ72   0                        EOM
-  00001C02  0760FF80  216402BC...0001003C       33     700.0 MB   U GRD         
-    211  00  YUQJ70   0                        EOM
-  00001C02  0760FF80  216402A3...0001003C       33     675.0 MB   U GRD
-    211  00  YUQJ67   0                        EOM
-  00001C02  0760FF80  2164028A...0001003C       33     650.0 MB   U GRD
-    211  00  YUQJ65   0                        EOM
-  00001C02  0760FF80  21640271...0001003C       33     625.0 MB   U GRD
-    211  00  YUQJ62   0                        EOM
-  00001C02  0760FF80  21640258...0001003C       33     600.0 MB   U GRD
-    211  00  YUQJ60   0                        EOM
-  00001C02  0760FF80  2164023F...0001003C       33     575.0 MB   U GRD
-    211  00  YUQJ57   0                        EOM
-  00001C02  0760FF80  21640226...0001003C       33     550.0 MB   U GRD
-    211  00  YUQJ55   0                        EOM
-  00001C02  0760FF80  2164020D...0001003C       33     525.0 MB   U GRD
-    211  00  YUQJ52   0                        EOM
-  00001C02  0760FF80  216401F4...0001003C       33     500.0 MB   U GRD         
-    211  00  YUQJ50   0                        EOM
-  00001C02  0760FF80  216401C2...0001003C       33     450.0 MB   U GRD
-    211  00  YUQJ45   0                        EOM
-  00001C02  0760FF80  21640190...0001003C       33     400.0 MB   U GRD         
-    211  00  YUQJ40   0                        EOM
-  00001C02  0760FF80  2164015E...0001003C       33     350.0 MB   U GRD
-    211  00  YUQJ35   0                        EOM
-  00001C02  0760FF80  2164012C...0001003C       33     300.0 MB   U GRD         
-    211  00  YUQJ30   0                        EOM
-  00001C02  0760FF80  216400FA...0001003C       33     250.0 MB   U GRD         
-    211  00  YUQJ25   0                        EOM
-  00001C02  0760FF80  216400C8...0001003C       33     200.0 MB   U GRD         
-    211  00  YUQJ20   0                        EOM
-  00001C02  0760FF80  21640096...0001003C       33     150.0 MB   U GRD         
-    211  00  YUQJ15   0                        EOM
-  00001C02  0760FF80  21640064...0001003C       33     100.0 MB   U GRD         
-    211  00  YUQJ10   0                        EOM
-  00001C02  0760FF80  226403E8...0001003C       34    1000.0 MB   V GRD
-    211  00  YVQJ99   0                        EOM
-  00001C02  0760FF80  226403CF...0001003C       34     975.0 MB   V GRD
-    211  00  YVQJ93   0                        EOM
-  00001C02  0760FF80  226403B6...0001003C       34     950.0 MB   V GRD
-    211  00  YVQJ95   0                        EOM
-  00001C02  0760FF80  2264039D...0001003C       34     925.0 MB   V GRD
-    211  00  YVQJ92   0                        EOM
-  00001C02  0760FF80  22640384...0001003C       34     900.0 MB   V GRD
-    211  00  YVQJ90   0                        EOM
-  00001C02  0760FF80  2264036B...0001003C       34     875.0 MB   V GRD
-    211  00  YVQJ91   0                        EOM
-  00001C02  0760FF80  22640352...0001003C       34     850.0 MB   V GRD         
-    211  00  YVQJ85   0                        EOM
-  00001C02  0760FF80  22640339...0001003C       34     825.0 MB   V GRD
-    211  00  YVQJ82   0                        EOM
-  00001C02  0760FF80  22640320...0001003C       34     800.0 MB   V GRD
-    211  00  YVQJ80   0                        EOM
-  00001C02  0760FF80  22640307...0001003C       34     775.0 MB   V GRD
-    211  00  YVQJ77   0                        EOM
-  00001C02  0760FF80  226402EE...0001003C       34     750.0 MB   V GRD
-    211  00  YVQJ75   0                        EOM
-  00001C02  0760FF80  226402D5...0001003C       34     725.0 MB   V GRD
-    211  00  YVQJ72   0                        EOM
-  00001C02  0760FF80  226402BC...0001003C       34     700.0 MB   V GRD         
-    211  00  YVQJ70   0                        EOM
-  00001C02  0760FF80  226402A3...0001003C       34     675.0 MB   V GRD
-    211  00  YVQJ67   0                        EOM
-  00001C02  0760FF80  2264028A...0001003C       34     650.0 MB   V GRD
-    211  00  YVQJ65   0                        EOM
-  00001C02  0760FF80  22640271...0001003C       34     625.0 MB   V GRD
-    211  00  YVQJ62   0                        EOM
-  00001C02  0760FF80  22640258...0001003C       34     600.0 MB   V GRD
-    211  00  YVQJ60   0                        EOM
-  00001C02  0760FF80  2264023F...0001003C       34     575.0 MB   V GRD
-    211  00  YVQJ57   0                        EOM
-  00001C02  0760FF80  22640226...0001003C       34     550.0 MB   V GRD
-    211  00  YVQJ55   0                        EOM
-  00001C02  0760FF80  2264020D...0001003C       34     525.0 MB   V GRD
-    211  00  YVQJ52   0                        EOM
-  00001C02  0760FF80  226401F4...0001003C       34     500.0 MB   V GRD         
-    211  00  YVQJ50   0                        EOM
-  00001C02  0760FF80  226401C2...0001003C       34     450.0 MB   V GRD
-    211  00  YVQJ45   0                        EOM
-  00001C02  0760FF80  22640190...0001003C       34     400.0 MB   V GRD         
-    211  00  YVQJ40   0                        EOM
-  00001C02  0760FF80  2264015E...0001003C       34     350.0 MB   V GRD
-    211  00  YVQJ35   0                        EOM
-  00001C02  0760FF80  2264012C...0001003C       34     300.0 MB   V GRD         
-    211  00  YVQJ30   0                        EOM
-  00001C02  0760FF80  226400FA...0001003C       34     250.0 MB   V GRD         
-    211  00  YVQJ25   0                        EOM
-  00001C02  0760FF80  226400C8...0001003C       34     200.0 MB   V GRD         
-    211  00  YVQJ20   0                        EOM
-  00001C02  0760FF80  22640096...0001003C       34     150.0 MB   V GRD         
-    211  00  YVQJ15   0                        EOM
-  00001C02  0760FF80  22640064...0001003C       34     100.0 MB   V GRD         
-    211  00  YVQJ10   0                        EOM
-  00001C02  0760FF80  02660000...0001003C       02           MSL  PRMSL         
-    211  00  YPQJ89   0                        EOM
-  00001C02  0760FF80  346403E8...0001003C       52    1000.0 MB   R H
-    211  00  YRQJ99   0                        EOM
-  00001C02  0760FF80  346403CF...0001003C       52     975.0 MB   R H
-    211  00  YRQJ93   0                        EOM
-  00001C02  0760FF80  346403B6...0001003C       52     950.0 MB   R H
-    211  00  YRQJ95   0                        EOM
-  00001C02  0760FF80  3464039D...0001003C       52     925.0 MB   R H
-    211  00  YRQJ92   0                        EOM
-  00001C02  0760FF80  34640384...0001003C       52     900.0 MB   R H
-    211  00  YRQJ90   0                        EOM
-  00001C02  0760FF80  3464036B...0001003C       52     875.0 MB   R H
-    211  00  YRQJ91   0                        EOM
-  00001C02  0760FF80  34640352...0001003C       52     850.0 MB   R H           
-    211  00  YRQJ85   0                        EOM
-  00001C02  0760FF80  34640339...0001003C       52     825.0 MB   R H
-    211  00  YRQJ82   0                        EOM
-  00001C02  0760FF80  34640320...0001003C       52     800.0 MB   R H
-    211  00  YRQJ80   0                        EOM
-  00001C02  0760FF80  34640307...0001003C       52     775.0 MB   R H
-    211  00  YRQJ77   0                        EOM
-  00001C02  0760FF80  346402EE...0001003C       52     750.0 MB   R H
-    211  00  YRQJ75   0                        EOM
-  00001C02  0760FF80  346402D5...0001003C       52     725.0 MB   R H
-    211  00  YRQJ72   0                        EOM
-  00001C02  0760FF80  346402BC...0001003C       52     700.0 MB   R H           
-    211  00  YRQJ70   0                        EOM
-  00001C02  0760FF80  346402A3...0001003C       52     675.0 MB   R H
-    211  00  YRQJ67   0                        EOM
-  00001C02  0760FF80  3464028A...0001003C       52     650.0 MB   R H
-    211  00  YRQJ65   0                        EOM
-  00001C02  0760FF80  34640271...0001003C       52     625.0 MB   R H
-    211  00  YRQJ62   0                        EOM
-  00001C02  0760FF80  34640258...0001003C       52     600.0 MB   R H
-    211  00  YRQJ60   0                        EOM
-  00001C02  0760FF80  3464023F...0001003C       52     575.0 MB   R H
-    211  00  YRQJ57   0                        EOM
-  00001C02  0760FF80  34640226...0001003C       52     550.0 MB   R H
-    211  00  YRQJ55   0                        EOM
-  00001C02  0760FF80  3464020D...0001003C       52     525.0 MB   R H
-    211  00  YRQJ52   0                        EOM
-  00001C02  0760FF80  346401F4...0001003C       52     500.0 MB   R H           
-    211  00  YRQJ50   0                        EOM
-  00001C02  0760FF80  346401C2...0001003C       52     450.0 MB   R H
-    211  00  YRQJ45   0                        EOM
-  00001C02  0760FF80  34640190...0001003C       52     400.0 MB   R H           
-    211  00  YRQJ40   0                        EOM
-  00001C02  0760FF80  3464015E...0001003C       52     350.0 MB   R H
-    211  00  YRQJ35   0                        EOM
-  00001C02  0760FF80  3464012C...0001003C       52     300.0 MB   R H           
-    211  00  YRQJ30   0                        EOM
-  00001C02  0760FF80  346400FA...0001003C       52     250.0 MB   R H
-    211  00  YRQJ25   0                        EOM
-  00001C02  0760FF80  346400C8...0001003C       52     200.0 MB   R H
-    211  00  YRQJ20   0                        EOM
-  00001C02  0760FF80  34640096...0001003C       52     150.0 MB   R H
-    211  00  YRQJ15   0                        EOM
-  00001C02  0760FF80  34640064...0001003C       52     100.0 MB   R H
-    211  00  YRQJ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001003C       11    1000.0 MB   TMP
-    211  00  YTQJ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001003C       11     975.0 MB   TMP
-    211  00  YTQJ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001003C       11     950.0 MB   TMP
-    211  00  YTQJ95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001003C       11     925.0 MB   TMP
-    211  00  YTQJ92   0                        EOM
-  00001C02  0760FF80  0B640384...0001003C       11     900.0 MB   TMP
-    211  00  YTQJ90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001003C       11     875.0 MB   TMP
-    211  00  YTQJ91   0                        EOM
-  00001C02  0760FF80  0B640352...0001003C       11     850.0 MB   TMP           
-    211  00  YTQJ85   0                        EOM
-  00001C02  0760FF80  0B640339...0001003C       11     825.0 MB   TMP
-    211  00  YTQJ82   0                        EOM
-  00001C02  0760FF80  0B640320...0001003C       11     800.0 MB   TMP
-    211  00  YTQJ80   0                        EOM
-  00001C02  0760FF80  0B640307...0001003C       11     775.0 MB   TMP
-    211  00  YTQJ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001003C       11     750.0 MB   TMP
-    211  00  YTQJ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001003C       11     725.0 MB   TMP
-    211  00  YTQJ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001003C       11     700.0 MB   TMP           
-    211  00  YTQJ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001003C       11     675.0 MB   TMP
-    211  00  YTQJ67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001003C       11     650.0 MB   TMP
-    211  00  YTQJ65   0                        EOM
-  00001C02  0760FF80  0B640271...0001003C       11     625.0 MB   TMP
-    211  00  YTQJ62   0                        EOM
-  00001C02  0760FF80  0B640258...0001003C       11     600.0 MB   TMP
-    211  00  YTQJ60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001003C       11     575.0 MB   TMP
-    211  00  YTQJ57   0                        EOM
-  00001C02  0760FF80  0B640226...0001003C       11     550.0 MB   TMP
-    211  00  YTQJ55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001003C       11     525.0 MB   TMP
-    211  00  YTQJ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001003C       11     500.0 MB   TMP           
-    211  00  YTQJ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001003C       11     450.0 MB   TMP
-    211  00  YTQJ45   0                        EOM
-  00001C02  0760FF80  0B640190...0001003C       11     400.0 MB   TMP           
-    211  00  YTQJ40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001003C       11     350.0 MB   TMP
-    211  00  YTQJ35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001003C       11     300.0 MB   TMP           
-    211  00  YTQJ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001003C       11     250.0 MB   TMP           
-    211  00  YTQJ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001003C       11     200.0 MB   TMP           
-    211  00  YTQJ20   0                        EOM
-  00001C02  0760FF80  0B640096...0001003C       11     150.0 MB   TMP           
-    211  00  YTQJ15   0                        EOM
-  00001C02  0760FF80  0B640064...0001003C       11     100.0 MB   TMP           
-    211  00  YTQJ10   0                        EOM
-  00001C02  0760FF80  28640352...0001003C       40     850.0 MB  DZDT           
-    211  00  YOQJ85   0                        EOM
-  00001C02  0760FF80  286402BC...0001003C       40     700.0 MB  DZDT           
-    211  00  YOQJ70   0                        EOM
-  00001C02  0760FF80  286401F4...0001003C       40     500.0 MB  DZDT           
-    211  00  YOQJ50   0                        EOM
-  00001C02  0760FF80  28640190...0001003C       40     400.0 MB  DZDT           
-    211  00  YOQJ40   0                        EOM
-  00001C02  0760FF80  2864012C...0001003C       40     300.0 MB  DZDT           
-    211  00  YOQJ30   0                        EOM
-  00001C02  0760FF80  286400FA...0001003C       40     250.0 MB  DZDT           
-    211  00  YOQJ25   0                        EOM
-  00001C02  0760FF80  286400C8...0001003C       40     200.0 MB  DZDT           
-    211  00  YOQJ20   0                        EOM
-  00001C02  0760FF80  28640096...0001003C       40     150.0 MB  DZDT           
-    211  00  YOQJ15   0                        EOM
-  00001C02  0760FF80  28640064...0001003C       40     100.0 MB  DZDT           
-    211  00  YOQJ10   0                        EOM
-  00001C02  0760FF80  01010000...0001003C       01          SFC  PRES           
-    211  00  YPQJ98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001003C       52        44/100  R H           
-    211  00  YRQJ00   0                        EOM
-  00001C02  0760FF80  36C80000...0001003C       54          EATM  P WAT         
-    211  00  YFQJ00   0                        EOM
-  00001C02  0760FF80  0B690002...0001003C       11          2m/SFC TMP         
-    211  00  YTQJ98   0                        EOM
-  00001C02  0760FF80  34741E00...0001003C       52      BNDRY/SPD  R H          
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  0B070000...0001003C       11            TRO TMP           
-    211  00  YTQJ97   0                        EOM
-  00001C02  0760FF80  01070000...0001003C       01            TRO PRES          
-    211  00  YPQJ97   0                        EOM
-  00001C02  0760FF80  21741E00...0001003C       33           SPD  U GRD         
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  22741E00...0001003C       34           SPD  V GRD         
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  21070000...0001003C       33            TRO U GRD         
-    211  00  YUQJ97   0                        EOM
-  00001C02  0760FF80  22070000...0001003C       34            TRO V GRD         
-    211  00  YVQJ97   0                        EOM
-  00001C02  0760FF80  88070000...0001003C      136            TRO VW SH         
-    211  00  YBQJ97   0                        EOM
-  00001C02  0760FF80  3D010000...0001003C       61            SFC A PCP         
-    211  00  YEQJ98   0                        EOM
-  00001C02  0760FF80  83010000...0001003C      131            SFC LFT X         
-    211  00  YXQJ98   0                        EOM
-  00001C02  0760FF80  29640352...0001003C       41    850.0 MB    ABS V         
-    211  00  YCQJ85   0                        EOM
-  00001C02  0760FF80  296402BC...0001003C       41    700.0 MB    ABS V         
-    211  00  YCQJ70   0                        EOM
-  00001C02  0760FF80  296401F4...0001003C       41    500.0 MB    ABS V         
-    211  00  YCQJ50   0                        EOM
-  00001C02  0760FF80  296400FA...0001003C       41    250.0 MB    ABS V         
-    211  00  YCQJ25   0                        EOM
-  00001C02  0760FF80  9D010000...0001003C      157          SFC   CAPE
-    211  00  YWQJ98   0                        EOM
-  00001C02  0760FF80  9C010000...0001003C      156          SFC   CIN
-    211  00  YYQJ98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001003C      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQJ86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001003C      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQJ86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001003C       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001003C       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001003C       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001003C       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  0B749678...0001003C       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001003C       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQJ86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001003C       52   60 SPDY  30 SPDY  R H
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001003C       52   90 SPDY  60 SPDY  R H
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  3474785A...0001003C       52  120 SPDY  90 SPDY  R H
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  34749678...0001003C       52  150 SPDY 120 SPDY  R H
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  3474B496...0001003C       52  180 SPDY 150 SPDY  R H
-    211  00  YRQJ86   0                        EOM
-  00001C02  0760FF80  21741E00...0001003C       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001003C       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001003C       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  2174785A...0001003C       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  21749678...0001003C       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  2174B496...0001003C       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQJ86   0                        EOM
-  00001C02  0760FF80  22741E00...0001003C       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001003C       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001003C       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  2274785A...0001003C       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  22749678...0001003C       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  2274B496...0001003C       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQJ86   0                        EOM
-  00001C02  0760FF80  0B690002...0001003C       11    2  HTGL     TMP
-    211  00  YTQJ98   0                        EOM
-  00001C02  0760FF80  34690002...0001003C       52    2  HTGL     R H
-    211  00  YRQJ98   0                        EOM
-  00001C02  0760FF80  2169000A...0001003C       33   10  HTGL     U GRD
-    211  00  YUQJ98   0                        EOM
-  00001C02  0760FF80  2269000A...0001003C       34   10  HTGL     V GRD
-    211  00  YVQJ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs066.211 b/parm/wmo/grib_awpgfs066.211
deleted file mode 100755
index a3aad32e2f..0000000000
--- a/parm/wmo/grib_awpgfs066.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010042       07    1000.0 MB   HGT           
-    211  00  ZHQN99   0                        EOM
-  00001C02  0760FF80  076403CF...00010042       07     975.0 MB   HGT
-    211  00  ZHQN93   0                        EOM
-  00001C02  0760FF80  076403B6...00010042       07     950.0 MB   HGT
-    211  00  ZHQN95   0                        EOM
-  00001C02  0760FF80  0764039D...00010042       07     925.0 MB   HGT
-    211  00  ZHQN92   0                        EOM
-  00001C02  0760FF80  07640384...00010042       07     900.0 MB   HGT
-    211  00  ZHQN90   0                        EOM
-  00001C02  0760FF80  0764036B...00010042       07     875.0 MB   HGT
-    211  00  ZHQN91   0                        EOM
-  00001C02  0760FF80  07640352...00010042       07     850.0 MB   HGT           
-    211  00  ZHQN85   0                        EOM
-  00001C02  0760FF80  07640339...00010042       07     825.0 MB   HGT
-    211  00  ZHQN82   0                        EOM
-  00001C02  0760FF80  07640320...00010042       07     800.0 MB   HGT
-    211  00  ZHQN80   0                        EOM
-  00001C02  0760FF80  07640307...00010042       07     775.0 MB   HGT
-    211  00  ZHQN77   0                        EOM
-  00001C02  0760FF80  076402EE...00010042       07     750.0 MB   HGT
-    211  00  ZHQN75   0                        EOM
-  00001C02  0760FF80  076402D5...00010042       07     725.0 MB   HGT
-    211  00  ZHQN72   0                        EOM
-  00001C02  0760FF80  076402BC...00010042       07     700.0 MB   HGT           
-    211  00  ZHQN70   0                        EOM
-  00001C02  0760FF80  076402A3...00010042       07     675.0 MB   HGT
-    211  00  ZHQN67   0                        EOM
-  00001C02  0760FF80  0764028A...00010042       07     650.0 MB   HGT
-    211  00  ZHQN65   0                        EOM
-  00001C02  0760FF80  07640271...00010042       07     625.0 MB   HGT
-    211  00  ZHQN62   0                        EOM
-  00001C02  0760FF80  07640258...00010042       07     600.0 MB   HGT
-    211  00  ZHQN60   0                        EOM
-  00001C02  0760FF80  0764023F...00010042       07     575.0 MB   HGT
-    211  00  ZHQN57   0                        EOM
-  00001C02  0760FF80  07640226...00010042       07     550.0 MB   HGT
-    211  00  ZHQN55   0                        EOM
-  00001C02  0760FF80  0764020D...00010042       07     525.0 MB   HGT
-    211  00  ZHQN52   0                        EOM
-  00001C02  0760FF80  076401F4...00010042       07     500.0 MB   HGT           
-    211  00  ZHQN50   0                        EOM
-  00001C02  0760FF80  076401C2...00010042       07     450.0 MB   HGT
-    211  00  ZHQN45   0                        EOM
-  00001C02  0760FF80  07640190...00010042       07     400.0 MB   HGT           
-    211  00  ZHQN40   0                        EOM
-  00001C02  0760FF80  0764015E...00010042       07     350.0 MB   HGT
-    211  00  ZHQN35   0                        EOM
-  00001C02  0760FF80  0764012C...00010042       07     300.0 MB   HGT           
-    211  00  ZHQN30   0                        EOM
-  00001C02  0760FF80  076400FA...00010042       07     250.0 MB   HGT           
-    211  00  ZHQN25   0                        EOM
-  00001C02  0760FF80  076400C8...00010048       07     200.0 MB   HGT           
-    211  00  ZHQN20   0                        EOM
-  00001C02  0760FF80  07640096...00010042       07     150.0 MB   HGT           
-    211  00  ZHQN15   0                        EOM
-  00001C02  0760FF80  07640064...00010042       07     100.0 MB   HGT           
-    211  00  ZHQN10   0                        EOM
-  00001C02  0760FF80  216403E8...00010042       33    1000.0 MB   U GRD
-    211  00  ZUQN99   0                        EOM
-  00001C02  0760FF80  216403CF...00010042       33     975.0 MB   U GRD
-    211  00  ZUQN93   0                        EOM
-  00001C02  0760FF80  216403B6...00010042       33     950.0 MB   U GRD
-    211  00  ZUQN95   0                        EOM
-  00001C02  0760FF80  2164039D...00010042       33     925.0 MB   U GRD
-    211  00  ZUQN92   0                        EOM
-  00001C02  0760FF80  21640384...00010042       33     900.0 MB   U GRD
-    211  00  ZUQN90   0                        EOM
-  00001C02  0760FF80  2164036B...00010042       33     875.0 MB   U GRD
-    211  00  ZUQN91   0                        EOM
-  00001C02  0760FF80  21640352...00010042       33     850.0 MB   U GRD         
-    211  00  ZUQN85   0                        EOM
-  00001C02  0760FF80  21640339...00010042       33     825.0 MB   U GRD
-    211  00  ZUQN82   0                        EOM
-  00001C02  0760FF80  21640320...00010042       33     800.0 MB   U GRD
-    211  00  ZUQN80   0                        EOM
-  00001C02  0760FF80  21640307...00010042       33     775.0 MB   U GRD
-    211  00  ZUQN77   0                        EOM
-  00001C02  0760FF80  216402EE...00010042       33     750.0 MB   U GRD
-    211  00  ZUQN75   0                        EOM
-  00001C02  0760FF80  216402D5...00010042       33     725.0 MB   U GRD
-    211  00  ZUQN72   0                        EOM
-  00001C02  0760FF80  216402BC...00010042       33     700.0 MB   U GRD         
-    211  00  ZUQN70   0                        EOM
-  00001C02  0760FF80  216402A3...00010042       33     675.0 MB   U GRD
-    211  00  ZUQN67   0                        EOM
-  00001C02  0760FF80  2164028A...00010042       33     650.0 MB   U GRD
-    211  00  ZUQN65   0                        EOM
-  00001C02  0760FF80  21640271...00010042       33     625.0 MB   U GRD
-    211  00  ZUQN62   0                        EOM
-  00001C02  0760FF80  21640258...00010042       33     600.0 MB   U GRD
-    211  00  ZUQN60   0                        EOM
-  00001C02  0760FF80  2164023F...00010042       33     575.0 MB   U GRD
-    211  00  ZUQN57   0                        EOM
-  00001C02  0760FF80  21640226...00010042       33     550.0 MB   U GRD
-    211  00  ZUQN55   0                        EOM
-  00001C02  0760FF80  2164020D...00010042       33     525.0 MB   U GRD
-    211  00  ZUQN52   0                        EOM
-  00001C02  0760FF80  216401F4...00010042       33     500.0 MB   U GRD         
-    211  00  ZUQN50   0                        EOM
-  00001C02  0760FF80  216401C2...00010042       33     450.0 MB   U GRD
-    211  00  ZUQN45   0                        EOM
-  00001C02  0760FF80  21640190...00010042       33     400.0 MB   U GRD         
-    211  00  ZUQN40   0                        EOM
-  00001C02  0760FF80  2164015E...00010042       33     350.0 MB   U GRD
-    211  00  ZUQN35   0                        EOM
-  00001C02  0760FF80  2164012C...00010042       33     300.0 MB   U GRD         
-    211  00  ZUQN30   0                        EOM
-  00001C02  0760FF80  216400FA...00010042       33     250.0 MB   U GRD         
-    211  00  ZUQN25   0                        EOM
-  00001C02  0760FF80  216400C8...00010042       33     200.0 MB   U GRD         
-    211  00  ZUQN20   0                        EOM
-  00001C02  0760FF80  21640096...00010042       33     150.0 MB   U GRD         
-    211  00  ZUQN15   0                        EOM
-  00001C02  0760FF80  21640064...00010042       33     100.0 MB   U GRD         
-    211  00  ZUQN10   0                        EOM
-  00001C02  0760FF80  226403E8...00010042       34    1000.0 MB   V GRD
-    211  00  ZVQN99   0                        EOM
-  00001C02  0760FF80  226403CF...00010042       34     975.0 MB   V GRD
-    211  00  ZVQN93   0                        EOM
-  00001C02  0760FF80  226403B6...00010042       34     950.0 MB   V GRD
-    211  00  ZVQN95   0                        EOM
-  00001C02  0760FF80  2264039D...00010042       34     925.0 MB   V GRD
-    211  00  ZVQN92   0                        EOM
-  00001C02  0760FF80  22640384...00010042       34     900.0 MB   V GRD
-    211  00  ZVQN90   0                        EOM
-  00001C02  0760FF80  2264036B...00010042       34     875.0 MB   V GRD
-    211  00  ZVQN91   0                        EOM
-  00001C02  0760FF80  22640352...00010042       34     850.0 MB   V GRD         
-    211  00  ZVQN85   0                        EOM
-  00001C02  0760FF80  22640339...00010042       34     825.0 MB   V GRD
-    211  00  ZVQN82   0                        EOM
-  00001C02  0760FF80  22640320...00010042       34     800.0 MB   V GRD
-    211  00  ZVQN80   0                        EOM
-  00001C02  0760FF80  22640307...00010042       34     775.0 MB   V GRD
-    211  00  ZVQN77   0                        EOM
-  00001C02  0760FF80  226402EE...00010042       34     750.0 MB   V GRD
-    211  00  ZVQN75   0                        EOM
-  00001C02  0760FF80  226402D5...00010042       34     725.0 MB   V GRD
-    211  00  ZVQN72   0                        EOM
-  00001C02  0760FF80  226402BC...00010042       34     700.0 MB   V GRD         
-    211  00  ZVQN70   0                        EOM
-  00001C02  0760FF80  226402A3...00010042       34     675.0 MB   V GRD
-    211  00  ZVQN67   0                        EOM
-  00001C02  0760FF80  2264028A...00010042       34     650.0 MB   V GRD
-    211  00  ZVQN65   0                        EOM
-  00001C02  0760FF80  22640271...00010042       34     625.0 MB   V GRD
-    211  00  ZVQN62   0                        EOM
-  00001C02  0760FF80  22640258...00010042       34     600.0 MB   V GRD
-    211  00  ZVQN60   0                        EOM
-  00001C02  0760FF80  2264023F...00010042       34     575.0 MB   V GRD
-    211  00  ZVQN57   0                        EOM
-  00001C02  0760FF80  22640226...00010042       34     550.0 MB   V GRD
-    211  00  ZVQN55   0                        EOM
-  00001C02  0760FF80  2264020D...00010042       34     525.0 MB   V GRD
-    211  00  ZVQN52   0                        EOM
-  00001C02  0760FF80  226401F4...00010042       34     500.0 MB   V GRD         
-    211  00  ZVQN50   0                        EOM
-  00001C02  0760FF80  226401C2...00010042       34     450.0 MB   V GRD
-    211  00  ZVQN45   0                        EOM
-  00001C02  0760FF80  22640190...00010042       34     400.0 MB   V GRD         
-    211  00  ZVQN40   0                        EOM
-  00001C02  0760FF80  2264015E...00010042       34     350.0 MB   V GRD
-    211  00  ZVQN35   0                        EOM
-  00001C02  0760FF80  2264012C...00010042       34     300.0 MB   V GRD         
-    211  00  ZVQN30   0                        EOM
-  00001C02  0760FF80  226400FA...00010042       34     250.0 MB   V GRD         
-    211  00  ZVQN25   0                        EOM
-  00001C02  0760FF80  226400C8...00010042       34     200.0 MB   V GRD         
-    211  00  ZVQN20   0                        EOM
-  00001C02  0760FF80  22640096...00010042       34     150.0 MB   V GRD         
-    211  00  ZVQN15   0                        EOM
-  00001C02  0760FF80  22640064...00010042       34     100.0 MB   V GRD         
-    211  00  ZVQN10   0                        EOM
-  00001C02  0760FF80  02660000...00010042       02           MSL  PRMSL         
-    211  00  ZPQN89   0                        EOM
-  00001C02  0760FF80  346403E8...00010042       52    1000.0 MB   R H
-    211  00  ZRQN99   0                        EOM
-  00001C02  0760FF80  346403CF...00010042       52     975.0 MB   R H
-    211  00  ZRQN93   0                        EOM
-  00001C02  0760FF80  346403B6...00010042       52     950.0 MB   R H
-    211  00  ZRQN95   0                        EOM
-  00001C02  0760FF80  3464039D...00010042       52     925.0 MB   R H
-    211  00  ZRQN92   0                        EOM
-  00001C02  0760FF80  34640384...00010042       52     900.0 MB   R H
-    211  00  ZRQN90   0                        EOM
-  00001C02  0760FF80  3464036B...00010042       52     875.0 MB   R H
-    211  00  ZRQN91   0                        EOM
-  00001C02  0760FF80  34640352...00010042       52     850.0 MB   R H           
-    211  00  ZRQN85   0                        EOM
-  00001C02  0760FF80  34640339...00010042       52     825.0 MB   R H
-    211  00  ZRQN82   0                        EOM
-  00001C02  0760FF80  34640320...00010042       52     800.0 MB   R H
-    211  00  ZRQN80   0                        EOM
-  00001C02  0760FF80  34640307...00010042       52     775.0 MB   R H
-    211  00  ZRQN77   0                        EOM
-  00001C02  0760FF80  346402EE...00010042       52     750.0 MB   R H
-    211  00  ZRQN75   0                        EOM
-  00001C02  0760FF80  346402D5...00010042       52     725.0 MB   R H
-    211  00  ZRQN72   0                        EOM
-  00001C02  0760FF80  346402BC...00010042       52     700.0 MB   R H           
-    211  00  ZRQN70   0                        EOM
-  00001C02  0760FF80  346402A3...00010042       52     675.0 MB   R H
-    211  00  ZRQN67   0                        EOM
-  00001C02  0760FF80  3464028A...00010042       52     650.0 MB   R H
-    211  00  ZRQN65   0                        EOM
-  00001C02  0760FF80  34640271...00010042       52     625.0 MB   R H
-    211  00  ZRQN62   0                        EOM
-  00001C02  0760FF80  34640258...00010042       52     600.0 MB   R H
-    211  00  ZRQN60   0                        EOM
-  00001C02  0760FF80  3464023F...00010042       52     575.0 MB   R H
-    211  00  ZRQN57   0                        EOM
-  00001C02  0760FF80  34640226...00010042       52     550.0 MB   R H
-    211  00  ZRQN55   0                        EOM
-  00001C02  0760FF80  3464020D...00010042       52     525.0 MB   R H
-    211  00  ZRQN52   0                        EOM
-  00001C02  0760FF80  346401F4...00010042       52     500.0 MB   R H           
-    211  00  ZRQN50   0                        EOM
-  00001C02  0760FF80  346401C2...00010042       52     450.0 MB   R H
-    211  00  ZRQN45   0                        EOM
-  00001C02  0760FF80  34640190...00010042       52     400.0 MB   R H           
-    211  00  ZRQN40   0                        EOM
-  00001C02  0760FF80  3464015E...00010042       52     350.0 MB   R H
-    211  00  ZRQN35   0                        EOM
-  00001C02  0760FF80  3464012C...00010042       52     300.0 MB   R H           
-    211  00  ZRQN30   0                        EOM
-  00001C02  0760FF80  346400FA...00010042       52     250.0 MB   R H
-    211  00  ZRQN25   0                        EOM
-  00001C02  0760FF80  346400C8...00010042       52     200.0 MB   R H
-    211  00  ZRQN20   0                        EOM
-  00001C02  0760FF80  34640096...00010042       52     150.0 MB   R H
-    211  00  ZRQN15   0                        EOM
-  00001C02  0760FF80  34640064...00010042       52     100.0 MB   R H
-    211  00  ZRQN10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010042       11    1000.0 MB   TMP
-    211  00  ZTQN99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010042       11     975.0 MB   TMP
-    211  00  ZTQN93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010042       11     950.0 MB   TMP
-    211  00  ZTQN95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010042       11     925.0 MB   TMP
-    211  00  ZTQN92   0                        EOM
-  00001C02  0760FF80  0B640384...00010042       11     900.0 MB   TMP
-    211  00  ZTQN90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010042       11     875.0 MB   TMP
-    211  00  ZTQN91   0                        EOM
-  00001C02  0760FF80  0B640352...00010042       11     850.0 MB   TMP           
-    211  00  ZTQN85   0                        EOM
-  00001C02  0760FF80  0B640339...00010042       11     825.0 MB   TMP
-    211  00  ZTQN82   0                        EOM
-  00001C02  0760FF80  0B640320...00010042       11     800.0 MB   TMP
-    211  00  ZTQN80   0                        EOM
-  00001C02  0760FF80  0B640307...00010042       11     775.0 MB   TMP
-    211  00  ZTQN77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010042       11     750.0 MB   TMP
-    211  00  ZTQN75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010042       11     725.0 MB   TMP
-    211  00  ZTQN72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010042       11     700.0 MB   TMP           
-    211  00  ZTQN70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010042       11     675.0 MB   TMP
-    211  00  ZTQN67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010042       11     650.0 MB   TMP
-    211  00  ZTQN65   0                        EOM
-  00001C02  0760FF80  0B640271...00010042       11     625.0 MB   TMP
-    211  00  ZTQN62   0                        EOM
-  00001C02  0760FF80  0B640258...00010042       11     600.0 MB   TMP
-    211  00  ZTQN60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010042       11     575.0 MB   TMP
-    211  00  ZTQN57   0                        EOM
-  00001C02  0760FF80  0B640226...00010042       11     550.0 MB   TMP
-    211  00  ZTQN55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010042       11     525.0 MB   TMP
-    211  00  ZTQN52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010042       11     500.0 MB   TMP           
-    211  00  ZTQN50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010042       11     450.0 MB   TMP
-    211  00  ZTQN45   0                        EOM
-  00001C02  0760FF80  0B640190...00010042       11     400.0 MB   TMP           
-    211  00  ZTQN40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010042       11     350.0 MB   TMP
-    211  00  ZTQN35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010042       11     300.0 MB   TMP           
-    211  00  ZTQN30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010042       11     250.0 MB   TMP           
-    211  00  ZTQN25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010042       11     200.0 MB   TMP           
-    211  00  ZTQN20   0                        EOM
-  00001C02  0760FF80  0B640096...00010042       11     150.0 MB   TMP           
-    211  00  ZTQN15   0                        EOM
-  00001C02  0760FF80  0B640064...00010042       11     100.0 MB   TMP           
-    211  00  ZTQN10   0                        EOM
-  00001C02  0760FF80  28640352...00010042       40     850.0 MB  DZDT           
-    211  00  ZOQN85   0                        EOM
-  00001C02  0760FF80  286402BC...00010042       40     700.0 MB  DZDT           
-    211  00  ZOQN70   0                        EOM
-  00001C02  0760FF80  286401F4...00010042       40     500.0 MB  DZDT           
-    211  00  ZOQN50   0                        EOM
-  00001C02  0760FF80  28640190...00010042       40     400.0 MB  DZDT           
-    211  00  ZOQN40   0                        EOM
-  00001C02  0760FF80  2864012C...00010042       40     300.0 MB  DZDT           
-    211  00  ZOQN30   0                        EOM
-  00001C02  0760FF80  286400FA...00010042       40     250.0 MB  DZDT           
-    211  00  ZOQN25   0                        EOM
-  00001C02  0760FF80  286400C8...00010042       40     200.0 MB  DZDT           
-    211  00  ZOQN20   0                        EOM
-  00001C02  0760FF80  28640096...00010042       40     150.0 MB  DZDT           
-    211  00  ZOQN15   0                        EOM
-  00001C02  0760FF80  28640064...00010042       40     100.0 MB  DZDT           
-    211  00  ZOQN10   0                        EOM
-  00001C02  0760FF80  01010000...00010042       01          SFC  PRES           
-    211  00  ZPQN98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010042       52        44/100  R H           
-    211  00  ZRQN00   0                        EOM
-  00001C02  0760FF80  36C80000...00010042       54          EATM  P WAT         
-    211  00  ZFQN00   0                        EOM
-  00001C02  0760FF80  0B690002...00010042       11          2m/SFC TMP         
-    211  00  ZTQN98   0                        EOM
-  00001C02  0760FF80  34741E00...00010042       52      BNDRY/SPD  R H          
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  0B070000...00010042       11            TRO TMP           
-    211  00  ZTQN97   0                        EOM
-  00001C02  0760FF80  01070000...00010042       01            TRO PRES          
-    211  00  ZPQN97   0                        EOM
-  00001C02  0760FF80  21741E00...00010042       33           SPD  U GRD         
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  22741E00...00010042       34           SPD  V GRD         
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  21070000...00010042       33            TRO U GRD         
-    211  00  ZUQN97   0                        EOM
-  00001C02  0760FF80  22070000...00010042       34            TRO V GRD         
-    211  00  ZVQN97   0                        EOM
-  00001C02  0760FF80  88070000...00010042      136            TRO VW SH         
-    211  00  ZBQN97   0                        EOM
-  00001C02  0760FF80  3D010000...00010042       61            SFC A PCP         
-    211  00  ZEQN98   0                        EOM
-  00001C02  0760FF80  83010000...00010042      131            SFC LFT X         
-    211  00  ZXQN98   0                        EOM
-  00001C02  0760FF80  29640352...00010042       41    850.0 MB    ABS V         
-    211  00  ZCQN85   0                        EOM
-  00001C02  0760FF80  296402BC...00010042       41    700.0 MB    ABS V         
-    211  00  ZCQN70   0                        EOM
-  00001C02  0760FF80  296401F4...00010042       41    500.0 MB    ABS V         
-    211  00  ZCQN50   0                        EOM
-  00001C02  0760FF80  296400FA...00010042       41    250.0 MB    ABS V         
-    211  00  ZCQN25   0                        EOM
-  00001C02  0760FF80  9D010000...00010042      157          SFC   CAPE
-    211  00  ZWQN98   0                        EOM
-  00001C02  0760FF80  9C010000...00010042      156          SFC   CIN
-    211  00  ZYQN98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010042      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQN86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010042      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQN86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010042       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010042       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010042       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010042       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  0B749678...00010042       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010042       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQN86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010042       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010042       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  3474785A...00010042       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  34749678...00010042       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  3474B496...00010042       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQN86   0                        EOM
-  00001C02  0760FF80  21741E00...00010042       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010042       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010042       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  2174785A...00010042       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  21749678...00010042       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  2174B496...00010042       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQN86   0                        EOM
-  00001C02  0760FF80  22741E00...00010042       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010042       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010042       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  2274785A...00010042       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  22749678...00010042       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  2274B496...00010042       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQN86   0                        EOM
-  00001C02  0760FF80  0B690002...00010042       11    2  HTGL     TMP
-    211  00  ZTQN98   0                        EOM
-  00001C02  0760FF80  34690002...00010042       52    2  HTGL     R H
-    211  00  ZRQN98   0                        EOM
-  00001C02  0760FF80  2169000A...00010042       33   10  HTGL     U GRD
-    211  00  ZUQN98   0                        EOM
-  00001C02  0760FF80  2269000A...00010042       34   10  HTGL     V GRD
-    211  00  ZVQN98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs072.211 b/parm/wmo/grib_awpgfs072.211
deleted file mode 100755
index 210fcad867..0000000000
--- a/parm/wmo/grib_awpgfs072.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010048       07    1000.0 MB   HGT           
-    211  00  YHQK99   0                        EOM
-  00001C02  0760FF80  076403CF...00010048       07     975.0 MB   HGT
-    211  00  YHQK93   0                        EOM
-  00001C02  0760FF80  076403B6...00010048       07     950.0 MB   HGT
-    211  00  YHQK95   0                        EOM
-  00001C02  0760FF80  0764039D...00010048       07     925.0 MB   HGT
-    211  00  YHQK92   0                        EOM
-  00001C02  0760FF80  07640384...00010048       07     900.0 MB   HGT
-    211  00  YHQK90   0                        EOM
-  00001C02  0760FF80  0764036B...00010048       07     875.0 MB   HGT
-    211  00  YHQK91   0                        EOM
-  00001C02  0760FF80  07640352...00010048       07     850.0 MB   HGT           
-    211  00  YHQK85   0                        EOM
-  00001C02  0760FF80  07640339...00010048       07     825.0 MB   HGT
-    211  00  YHQK82   0                        EOM
-  00001C02  0760FF80  07640320...00010048       07     800.0 MB   HGT
-    211  00  YHQK80   0                        EOM
-  00001C02  0760FF80  07640307...00010048       07     775.0 MB   HGT
-    211  00  YHQK77   0                        EOM
-  00001C02  0760FF80  076402EE...00010048       07     750.0 MB   HGT
-    211  00  YHQK75   0                        EOM
-  00001C02  0760FF80  076402D5...00010048       07     725.0 MB   HGT
-    211  00  YHQK72   0                        EOM
-  00001C02  0760FF80  076402BC...00010048       07     700.0 MB   HGT           
-    211  00  YHQK70   0                        EOM
-  00001C02  0760FF80  076402A3...00010048       07     675.0 MB   HGT
-    211  00  YHQK67   0                        EOM
-  00001C02  0760FF80  0764028A...00010048       07     650.0 MB   HGT
-    211  00  YHQK65   0                        EOM
-  00001C02  0760FF80  07640271...00010048       07     625.0 MB   HGT
-    211  00  YHQK62   0                        EOM
-  00001C02  0760FF80  07640258...00010048       07     600.0 MB   HGT
-    211  00  YHQK60   0                        EOM
-  00001C02  0760FF80  0764023F...00010048       07     575.0 MB   HGT
-    211  00  YHQK57   0                        EOM
-  00001C02  0760FF80  07640226...00010048       07     550.0 MB   HGT
-    211  00  YHQK55   0                        EOM
-  00001C02  0760FF80  0764020D...00010048       07     525.0 MB   HGT
-    211  00  YHQK52   0                        EOM
-  00001C02  0760FF80  076401F4...00010048       07     500.0 MB   HGT           
-    211  00  YHQK50   0                        EOM
-  00001C02  0760FF80  076401C2...00010048       07     450.0 MB   HGT
-    211  00  YHQK45   0                        EOM
-  00001C02  0760FF80  07640190...00010048       07     400.0 MB   HGT           
-    211  00  YHQK40   0                        EOM
-  00001C02  0760FF80  0764015E...00010048       07     350.0 MB   HGT
-    211  00  YHQK35   0                        EOM
-  00001C02  0760FF80  0764012C...00010048       07     300.0 MB   HGT           
-    211  00  YHQK30   0                        EOM
-  00001C02  0760FF80  076400FA...00010048       07     250.0 MB   HGT           
-    211  00  YHQK25   0                        EOM
-  00001C02  0760FF80  076400C8...00010048       07     200.0 MB   HGT           
-    211  00  YHQK20   0                        EOM
-  00001C02  0760FF80  07640096...00010048       07     150.0 MB   HGT           
-    211  00  YHQK15   0                        EOM
-  00001C02  0760FF80  07640064...00010048       07     100.0 MB   HGT           
-    211  00  YHQK10   0                        EOM
-  00001C02  0760FF80  216403E8...00010048       33    1000.0 MB   U GRD
-    211  00  YUQK99   0                        EOM
-  00001C02  0760FF80  216403CF...00010048       33     975.0 MB   U GRD
-    211  00  YUQK93   0                        EOM
-  00001C02  0760FF80  216403B6...00010048       33     950.0 MB   U GRD
-    211  00  YUQK95   0                        EOM
-  00001C02  0760FF80  2164039D...00010048       33     925.0 MB   U GRD
-    211  00  YUQK92   0                        EOM
-  00001C02  0760FF80  21640384...00010048       33     900.0 MB   U GRD
-    211  00  YUQK90   0                        EOM
-  00001C02  0760FF80  2164036B...00010048       33     875.0 MB   U GRD
-    211  00  YUQK91   0                        EOM
-  00001C02  0760FF80  21640352...00010048       33     850.0 MB   U GRD         
-    211  00  YUQK85   0                        EOM
-  00001C02  0760FF80  21640339...00010048       33     825.0 MB   U GRD
-    211  00  YUQK82   0                        EOM
-  00001C02  0760FF80  21640320...00010048       33     800.0 MB   U GRD
-    211  00  YUQK80   0                        EOM
-  00001C02  0760FF80  21640307...00010048       33     775.0 MB   U GRD
-    211  00  YUQK77   0                        EOM
-  00001C02  0760FF80  216402EE...00010048       33     750.0 MB   U GRD
-    211  00  YUQK75   0                        EOM
-  00001C02  0760FF80  216402D5...00010048       33     725.0 MB   U GRD
-    211  00  YUQK72   0                        EOM
-  00001C02  0760FF80  216402BC...00010048       33     700.0 MB   U GRD         
-    211  00  YUQK70   0                        EOM
-  00001C02  0760FF80  216402A3...00010048       33     675.0 MB   U GRD
-    211  00  YUQK67   0                        EOM
-  00001C02  0760FF80  2164028A...00010048       33     650.0 MB   U GRD
-    211  00  YUQK65   0                        EOM
-  00001C02  0760FF80  21640271...00010048       33     625.0 MB   U GRD
-    211  00  YUQK62   0                        EOM
-  00001C02  0760FF80  21640258...00010048       33     600.0 MB   U GRD
-    211  00  YUQK60   0                        EOM
-  00001C02  0760FF80  2164023F...00010048       33     575.0 MB   U GRD
-    211  00  YUQK57   0                        EOM
-  00001C02  0760FF80  21640226...00010048       33     550.0 MB   U GRD
-    211  00  YUQK55   0                        EOM
-  00001C02  0760FF80  2164020D...00010048       33     525.0 MB   U GRD
-    211  00  YUQK52   0                        EOM
-  00001C02  0760FF80  216401F4...00010048       33     500.0 MB   U GRD         
-    211  00  YUQK50   0                        EOM
-  00001C02  0760FF80  216401C2...00010048       33     450.0 MB   U GRD
-    211  00  YUQK45   0                        EOM
-  00001C02  0760FF80  21640190...00010048       33     400.0 MB   U GRD         
-    211  00  YUQK40   0                        EOM
-  00001C02  0760FF80  2164015E...00010048       33     350.0 MB   U GRD
-    211  00  YUQK35   0                        EOM
-  00001C02  0760FF80  2164012C...00010048       33     300.0 MB   U GRD         
-    211  00  YUQK30   0                        EOM
-  00001C02  0760FF80  216400FA...00010048       33     250.0 MB   U GRD         
-    211  00  YUQK25   0                        EOM
-  00001C02  0760FF80  216400C8...00010048       33     200.0 MB   U GRD         
-    211  00  YUQK20   0                        EOM
-  00001C02  0760FF80  21640096...00010048       33     150.0 MB   U GRD         
-    211  00  YUQK15   0                        EOM
-  00001C02  0760FF80  21640064...00010048       33     100.0 MB   U GRD         
-    211  00  YUQK10   0                        EOM
-  00001C02  0760FF80  226403E8...00010048       34    1000.0 MB   V GRD
-    211  00  YVQK99   0                        EOM
-  00001C02  0760FF80  226403CF...00010048       34     975.0 MB   V GRD
-    211  00  YVQK93   0                        EOM
-  00001C02  0760FF80  226403B6...00010048       34     950.0 MB   V GRD
-    211  00  YVQK95   0                        EOM
-  00001C02  0760FF80  2264039D...00010048       34     925.0 MB   V GRD
-    211  00  YVQK92   0                        EOM
-  00001C02  0760FF80  22640384...00010048       34     900.0 MB   V GRD
-    211  00  YVQK90   0                        EOM
-  00001C02  0760FF80  2264036B...00010048       34     875.0 MB   V GRD
-    211  00  YVQK91   0                        EOM
-  00001C02  0760FF80  22640352...00010048       34     850.0 MB   V GRD         
-    211  00  YVQK85   0                        EOM
-  00001C02  0760FF80  22640339...00010048       34     825.0 MB   V GRD
-    211  00  YVQK82   0                        EOM
-  00001C02  0760FF80  22640320...00010048       34     800.0 MB   V GRD
-    211  00  YVQK80   0                        EOM
-  00001C02  0760FF80  22640307...00010048       34     775.0 MB   V GRD
-    211  00  YVQK77   0                        EOM
-  00001C02  0760FF80  226402EE...00010048       34     750.0 MB   V GRD
-    211  00  YVQK75   0                        EOM
-  00001C02  0760FF80  226402D5...00010048       34     725.0 MB   V GRD
-    211  00  YVQK72   0                        EOM
-  00001C02  0760FF80  226402BC...00010048       34     700.0 MB   V GRD         
-    211  00  YVQK70   0                        EOM
-  00001C02  0760FF80  226402A3...00010048       34     675.0 MB   V GRD
-    211  00  YVQK67   0                        EOM
-  00001C02  0760FF80  2264028A...00010048       34     650.0 MB   V GRD
-    211  00  YVQK65   0                        EOM
-  00001C02  0760FF80  22640271...00010048       34     625.0 MB   V GRD
-    211  00  YVQK62   0                        EOM
-  00001C02  0760FF80  22640258...00010048       34     600.0 MB   V GRD
-    211  00  YVQK60   0                        EOM
-  00001C02  0760FF80  2264023F...00010048       34     575.0 MB   V GRD
-    211  00  YVQK57   0                        EOM
-  00001C02  0760FF80  22640226...00010048       34     550.0 MB   V GRD
-    211  00  YVQK55   0                        EOM
-  00001C02  0760FF80  2264020D...00010048       34     525.0 MB   V GRD
-    211  00  YVQK52   0                        EOM
-  00001C02  0760FF80  226401F4...00010048       34     500.0 MB   V GRD         
-    211  00  YVQK50   0                        EOM
-  00001C02  0760FF80  226401C2...00010048       34     450.0 MB   V GRD
-    211  00  YVQK45   0                        EOM
-  00001C02  0760FF80  22640190...00010048       34     400.0 MB   V GRD         
-    211  00  YVQK40   0                        EOM
-  00001C02  0760FF80  2264015E...00010048       34     350.0 MB   V GRD
-    211  00  YVQK35   0                        EOM
-  00001C02  0760FF80  2264012C...00010048       34     300.0 MB   V GRD         
-    211  00  YVQK30   0                        EOM
-  00001C02  0760FF80  226400FA...00010048       34     250.0 MB   V GRD         
-    211  00  YVQK25   0                        EOM
-  00001C02  0760FF80  226400C8...00010048       34     200.0 MB   V GRD         
-    211  00  YVQK20   0                        EOM
-  00001C02  0760FF80  22640096...00010048       34     150.0 MB   V GRD         
-    211  00  YVQK15   0                        EOM
-  00001C02  0760FF80  22640064...00010048       34     100.0 MB   V GRD         
-    211  00  YVQK10   0                        EOM
-  00001C02  0760FF80  02660000...00010048       02           MSL  PRMSL         
-    211  00  YPQK89   0                        EOM
-  00001C02  0760FF80  346403E8...00010048       52    1000.0 MB   R H
-    211  00  YRQK99   0                        EOM
-  00001C02  0760FF80  346403CF...00010048       52     975.0 MB   R H
-    211  00  YRQK93   0                        EOM
-  00001C02  0760FF80  346403B6...00010048       52     950.0 MB   R H
-    211  00  YRQK95   0                        EOM
-  00001C02  0760FF80  3464039D...00010048       52     925.0 MB   R H
-    211  00  YRQK92   0                        EOM
-  00001C02  0760FF80  34640384...00010048       52     900.0 MB   R H
-    211  00  YRQK90   0                        EOM
-  00001C02  0760FF80  3464036B...00010048       52     875.0 MB   R H
-    211  00  YRQK91   0                        EOM
-  00001C02  0760FF80  34640352...00010048       52     850.0 MB   R H           
-    211  00  YRQK85   0                        EOM
-  00001C02  0760FF80  34640339...00010048       52     825.0 MB   R H
-    211  00  YRQK82   0                        EOM
-  00001C02  0760FF80  34640320...00010048       52     800.0 MB   R H
-    211  00  YRQK80   0                        EOM
-  00001C02  0760FF80  34640307...00010048       52     775.0 MB   R H
-    211  00  YRQK77   0                        EOM
-  00001C02  0760FF80  346402EE...00010048       52     750.0 MB   R H
-    211  00  YRQK75   0                        EOM
-  00001C02  0760FF80  346402D5...00010048       52     725.0 MB   R H
-    211  00  YRQK72   0                        EOM
-  00001C02  0760FF80  346402BC...00010048       52     700.0 MB   R H           
-    211  00  YRQK70   0                        EOM
-  00001C02  0760FF80  346402A3...00010048       52     675.0 MB   R H
-    211  00  YRQK67   0                        EOM
-  00001C02  0760FF80  3464028A...00010048       52     650.0 MB   R H
-    211  00  YRQK65   0                        EOM
-  00001C02  0760FF80  34640271...00010048       52     625.0 MB   R H
-    211  00  YRQK62   0                        EOM
-  00001C02  0760FF80  34640258...00010048       52     600.0 MB   R H
-    211  00  YRQK60   0                        EOM
-  00001C02  0760FF80  3464023F...00010048       52     575.0 MB   R H
-    211  00  YRQK57   0                        EOM
-  00001C02  0760FF80  34640226...00010048       52     550.0 MB   R H
-    211  00  YRQK55   0                        EOM
-  00001C02  0760FF80  3464020D...00010048       52     525.0 MB   R H
-    211  00  YRQK52   0                        EOM
-  00001C02  0760FF80  346401F4...00010048       52     500.0 MB   R H           
-    211  00  YRQK50   0                        EOM
-  00001C02  0760FF80  346401C2...00010048       52     450.0 MB   R H
-    211  00  YRQK45   0                        EOM
-  00001C02  0760FF80  34640190...00010048       52     400.0 MB   R H           
-    211  00  YRQK40   0                        EOM
-  00001C02  0760FF80  3464015E...00010048       52     350.0 MB   R H
-    211  00  YRQK35   0                        EOM
-  00001C02  0760FF80  3464012C...00010048       52     300.0 MB   R H           
-    211  00  YRQK30   0                        EOM
-  00001C02  0760FF80  346400FA...00010048       52     250.0 MB   R H
-    211  00  YRQK25   0                        EOM
-  00001C02  0760FF80  346400C8...00010048       52     200.0 MB   R H
-    211  00  YRQK20   0                        EOM
-  00001C02  0760FF80  34640096...00010048       52     150.0 MB   R H
-    211  00  YRQK15   0                        EOM
-  00001C02  0760FF80  34640064...00010048       52     100.0 MB   R H
-    211  00  YRQK10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010048       11    1000.0 MB   TMP
-    211  00  YTQK99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010048       11     975.0 MB   TMP
-    211  00  YTQK93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010048       11     950.0 MB   TMP
-    211  00  YTQK95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010048       11     925.0 MB   TMP
-    211  00  YTQK92   0                        EOM
-  00001C02  0760FF80  0B640384...00010048       11     900.0 MB   TMP
-    211  00  YTQK90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010048       11     875.0 MB   TMP
-    211  00  YTQK91   0                        EOM
-  00001C02  0760FF80  0B640352...00010048       11     850.0 MB   TMP           
-    211  00  YTQK85   0                        EOM
-  00001C02  0760FF80  0B640339...00010048       11     825.0 MB   TMP
-    211  00  YTQK82   0                        EOM
-  00001C02  0760FF80  0B640320...00010048       11     800.0 MB   TMP
-    211  00  YTQK80   0                        EOM
-  00001C02  0760FF80  0B640307...00010048       11     775.0 MB   TMP
-    211  00  YTQK77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010048       11     750.0 MB   TMP
-    211  00  YTQK75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010048       11     725.0 MB   TMP
-    211  00  YTQK72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010048       11     700.0 MB   TMP           
-    211  00  YTQK70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010048       11     675.0 MB   TMP
-    211  00  YTQK67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010048       11     650.0 MB   TMP
-    211  00  YTQK65   0                        EOM
-  00001C02  0760FF80  0B640271...00010048       11     625.0 MB   TMP
-    211  00  YTQK62   0                        EOM
-  00001C02  0760FF80  0B640258...00010048       11     600.0 MB   TMP
-    211  00  YTQK60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010048       11     575.0 MB   TMP
-    211  00  YTQK57   0                        EOM
-  00001C02  0760FF80  0B640226...00010048       11     550.0 MB   TMP
-    211  00  YTQK55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010048       11     525.0 MB   TMP
-    211  00  YTQK52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010048       11     500.0 MB   TMP           
-    211  00  YTQK50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010048       11     450.0 MB   TMP
-    211  00  YTQK45   0                        EOM
-  00001C02  0760FF80  0B640190...00010048       11     400.0 MB   TMP           
-    211  00  YTQK40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010048       11     350.0 MB   TMP
-    211  00  YTQK35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010048       11     300.0 MB   TMP           
-    211  00  YTQK30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010048       11     250.0 MB   TMP           
-    211  00  YTQK25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010048       11     200.0 MB   TMP           
-    211  00  YTQK20   0                        EOM
-  00001C02  0760FF80  0B640096...00010048       11     150.0 MB   TMP           
-    211  00  YTQK15   0                        EOM
-  00001C02  0760FF80  0B640064...00010048       11     100.0 MB   TMP           
-    211  00  YTQK10   0                        EOM
-  00001C02  0760FF80  28640352...00010048       40     850.0 MB  DZDT           
-    211  00  YOQK85   0                        EOM
-  00001C02  0760FF80  286402BC...00010048       40     700.0 MB  DZDT           
-    211  00  YOQK70   0                        EOM
-  00001C02  0760FF80  286401F4...00010048       40     500.0 MB  DZDT           
-    211  00  YOQK50   0                        EOM
-  00001C02  0760FF80  28640190...00010048       40     400.0 MB  DZDT           
-    211  00  YOQK40   0                        EOM
-  00001C02  0760FF80  2864012C...00010048       40     300.0 MB  DZDT           
-    211  00  YOQK30   0                        EOM
-  00001C02  0760FF80  286400FA...00010048       40     250.0 MB  DZDT           
-    211  00  YOQK25   0                        EOM
-  00001C02  0760FF80  286400C8...00010048       40     200.0 MB  DZDT           
-    211  00  YOQK20   0                        EOM
-  00001C02  0760FF80  28640096...00010048       40     150.0 MB  DZDT           
-    211  00  YOQK15   0                        EOM
-  00001C02  0760FF80  28640064...00010048       40     100.0 MB  DZDT           
-    211  00  YOQK10   0                        EOM
-  00001C02  0760FF80  01010000...00010048       01          SFC  PRES           
-    211  00  YPQK98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010048       52        44/100  R H           
-    211  00  YRQK00   0                        EOM
-  00001C02  0760FF80  36C80000...00010048       54          EATM  P WAT         
-    211  00  YFQK00   0                        EOM
-  00001C02  0760FF80  0B690002...00010048       11          2m/SFC TMP         
-    211  00  YTQK98   0                        EOM
-  00001C02  0760FF80  34741E00...00010048       52     BNDRY/SPD  R H           
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  0B070000...00010048       11            TRO TMP           
-    211  00  YTQK97   0                        EOM
-  00001C02  0760FF80  01070000...00010048       01            TRO PRES          
-    211  00  YPQK97   0                        EOM
-  00001C02  0760FF80  21741E00...00010048       33           SPD  U GRD         
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  22741E00...00010048       34           SPD  V GRD         
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  21070000...00010048       33            TRO U GRD         
-    211  00  YUQK97   0                        EOM
-  00001C02  0760FF80  22070000...00010048       34            TRO V GRD         
-    211  00  YVQK97   0                        EOM
-  00001C02  0760FF80  88070000...00010048      136            TRO VW SH         
-    211  00  YBQK97   0                        EOM
-  00001C02  0760FF80  3D010000...00010048       61            SFC A PCP         
-    211  00  YEQK98   0                        EOM
-  00001C02  0760FF80  83010000...00010048      131            SFC LFT X         
-    211  00  YXQK98   0                        EOM
-  00001C02  0760FF80  29640352...00010048       41    850.0 MB    ABS V         
-    211  00  YCQK85   0                        EOM
-  00001C02  0760FF80  296402BC...00010048       41    700.0 MB    ABS V         
-    211  00  YCQK70   0                        EOM
-  00001C02  0760FF80  296401F4...00010048       41    500.0 MB    ABS V         
-    211  00  YCQK50   0                        EOM
-  00001C02  0760FF80  296400FA...00010048       41    250.0 MB    ABS V         
-    211  00  YCQK25   0                        EOM
-  00001C02  0760FF80  9D010000...00010048      157          SFC   CAPE
-    211  00  YWQK98   0                        EOM
-  00001C02  0760FF80  9C010000...00010048      156          SFC   CIN
-    211  00  YYQK98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010048      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQK86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010048      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQK86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010048       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010048       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010048       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010048       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  0B749678...00010048       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010048       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQK86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010048       52   60 SPDY  30 SPDY  R H
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010048       52   90 SPDY  60 SPDY  R H
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  3474785A...00010048       52  120 SPDY  90 SPDY  R H
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  34749678...00010048       52  150 SPDY 120 SPDY  R H
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  3474B496...00010048       52  180 SPDY 150 SPDY  R H
-    211  00  YRQK86   0                        EOM
-  00001C02  0760FF80  21741E00...00010048       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010048       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010048       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  2174785A...00010048       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  21749678...00010048       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  2174B496...00010048       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQK86   0                        EOM
-  00001C02  0760FF80  22741E00...00010048       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010048       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010048       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  2274785A...00010048       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  22749678...00010048       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  2274B496...00010048       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQK86   0                        EOM
-  00001C02  0760FF80  0B690002...00010048       11    2  HTGL     TMP
-    211  00  YTQK98   0                        EOM
-  00001C02  0760FF80  34690002...00010048       52    2  HTGL     R H
-    211  00  YRQK98   0                        EOM
-  00001C02  0760FF80  2169000A...00010048       33   10  HTGL     U GRD
-    211  00  YUQK98   0                        EOM
-  00001C02  0760FF80  2269000A...00010048       34   10  HTGL     V GRD
-    211  00  YVQK98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs078.211 b/parm/wmo/grib_awpgfs078.211
deleted file mode 100755
index 83a2e24079..0000000000
--- a/parm/wmo/grib_awpgfs078.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...0001004E       07    1000.0 MB   HGT           
-    211  00  ZHQT99   0                        EOM
-  00001C02  0760FF80  076403CF...0001004E       07     975.0 MB   HGT
-    211  00  ZHQT93   0                        EOM
-  00001C02  0760FF80  076403B6...0001004E       07     950.0 MB   HGT
-    211  00  ZHQT95   0                        EOM
-  00001C02  0760FF80  0764039D...0001004E       07     925.0 MB   HGT
-    211  00  ZHQT92   0                        EOM
-  00001C02  0760FF80  07640384...0001004E       07     900.0 MB   HGT
-    211  00  ZHQT90   0                        EOM
-  00001C02  0760FF80  0764036B...0001004E       07     875.0 MB   HGT
-    211  00  ZHQT91   0                        EOM
-  00001C02  0760FF80  07640352...0001004E       07     850.0 MB   HGT           
-    211  00  ZHQT85   0                        EOM
-  00001C02  0760FF80  07640339...0001004E       07     825.0 MB   HGT
-    211  00  ZHQT82   0                        EOM
-  00001C02  0760FF80  07640320...0001004E       07     800.0 MB   HGT
-    211  00  ZHQT80   0                        EOM
-  00001C02  0760FF80  07640307...0001004E       07     775.0 MB   HGT
-    211  00  ZHQT77   0                        EOM
-  00001C02  0760FF80  076402EE...0001004E       07     750.0 MB   HGT
-    211  00  ZHQT75   0                        EOM
-  00001C02  0760FF80  076402D5...0001004E       07     725.0 MB   HGT
-    211  00  ZHQT72   0                        EOM
-  00001C02  0760FF80  076402BC...0001004E       07     700.0 MB   HGT           
-    211  00  ZHQT70   0                        EOM
-  00001C02  0760FF80  076402A3...0001004E       07     675.0 MB   HGT
-    211  00  ZHQT67   0                        EOM
-  00001C02  0760FF80  0764028A...0001004E       07     650.0 MB   HGT
-    211  00  ZHQT65   0                        EOM
-  00001C02  0760FF80  07640271...0001004E       07     625.0 MB   HGT
-    211  00  ZHQT62   0                        EOM
-  00001C02  0760FF80  07640258...0001004E       07     600.0 MB   HGT
-    211  00  ZHQT60   0                        EOM
-  00001C02  0760FF80  0764023F...0001004E       07     575.0 MB   HGT
-    211  00  ZHQT57   0                        EOM
-  00001C02  0760FF80  07640226...0001004E       07     550.0 MB   HGT
-    211  00  ZHQT55   0                        EOM
-  00001C02  0760FF80  0764020D...0001004E       07     525.0 MB   HGT
-    211  00  ZHQT52   0                        EOM
-  00001C02  0760FF80  076401F4...0001004E       07     500.0 MB   HGT           
-    211  00  ZHQT50   0                        EOM
-  00001C02  0760FF80  076401C2...0001004E       07     450.0 MB   HGT
-    211  00  ZHQT45   0                        EOM
-  00001C02  0760FF80  07640190...0001004E       07     400.0 MB   HGT           
-    211  00  ZHQT40   0                        EOM
-  00001C02  0760FF80  0764015E...0001004E       07     350.0 MB   HGT
-    211  00  ZHQT35   0                        EOM
-  00001C02  0760FF80  0764012C...0001004E       07     300.0 MB   HGT           
-    211  00  ZHQT30   0                        EOM
-  00001C02  0760FF80  076400FA...0001004E       07     250.0 MB   HGT           
-    211  00  ZHQT25   0                        EOM
-  00001C02  0760FF80  076400C8...0001004E       07     200.0 MB   HGT           
-    211  00  ZHQT20   0                        EOM
-  00001C02  0760FF80  07640096...0001004E       07     150.0 MB   HGT           
-    211  00  ZHQT15   0                        EOM
-  00001C02  0760FF80  07640064...0001004E       07     100.0 MB   HGT           
-    211  00  ZHQT10   0                        EOM
-  00001C02  0760FF80  216403E8...0001004E       33    1000.0 MB   U GRD
-    211  00  ZUQT99   0                        EOM
-  00001C02  0760FF80  216403CF...0001004E       33     975.0 MB   U GRD
-    211  00  ZUQT93   0                        EOM
-  00001C02  0760FF80  216403B6...0001004E       33     950.0 MB   U GRD
-    211  00  ZUQT95   0                        EOM
-  00001C02  0760FF80  2164039D...0001004E       33     925.0 MB   U GRD
-    211  00  ZUQT92   0                        EOM
-  00001C02  0760FF80  21640384...0001004E       33     900.0 MB   U GRD
-    211  00  ZUQT90   0                        EOM
-  00001C02  0760FF80  2164036B...0001004E       33     875.0 MB   U GRD
-    211  00  ZUQT91   0                        EOM
-  00001C02  0760FF80  21640352...0001004E       33     850.0 MB   U GRD         
-    211  00  ZUQT85   0                        EOM
-  00001C02  0760FF80  21640339...0001004E       33     825.0 MB   U GRD
-    211  00  ZUQT82   0                        EOM
-  00001C02  0760FF80  21640320...0001004E       33     800.0 MB   U GRD
-    211  00  ZUQT80   0                        EOM
-  00001C02  0760FF80  21640307...0001004E       33     775.0 MB   U GRD
-    211  00  ZUQT77   0                        EOM
-  00001C02  0760FF80  216402EE...0001004E       33     750.0 MB   U GRD
-    211  00  ZUQT75   0                        EOM
-  00001C02  0760FF80  216402D5...0001004E       33     725.0 MB   U GRD
-    211  00  ZUQT72   0                        EOM
-  00001C02  0760FF80  216402BC...0001004E       33     700.0 MB   U GRD         
-    211  00  ZUQT70   0                        EOM
-  00001C02  0760FF80  216402A3...0001004E       33     675.0 MB   U GRD
-    211  00  ZUQT67   0                        EOM
-  00001C02  0760FF80  2164028A...0001004E       33     650.0 MB   U GRD
-    211  00  ZUQT65   0                        EOM
-  00001C02  0760FF80  21640271...0001004E       33     625.0 MB   U GRD
-    211  00  ZUQT62   0                        EOM
-  00001C02  0760FF80  21640258...0001004E       33     600.0 MB   U GRD
-    211  00  ZUQT60   0                        EOM
-  00001C02  0760FF80  2164023F...0001004E       33     575.0 MB   U GRD
-    211  00  ZUQT57   0                        EOM
-  00001C02  0760FF80  21640226...0001004E       33     550.0 MB   U GRD
-    211  00  ZUQT55   0                        EOM
-  00001C02  0760FF80  2164020D...0001004E       33     525.0 MB   U GRD
-    211  00  ZUQT52   0                        EOM
-  00001C02  0760FF80  216401F4...0001004E       33     500.0 MB   U GRD         
-    211  00  ZUQT50   0                        EOM
-  00001C02  0760FF80  216401C2...0001004E       33     450.0 MB   U GRD
-    211  00  ZUQT45   0                        EOM
-  00001C02  0760FF80  21640190...0001004E       33     400.0 MB   U GRD         
-    211  00  ZUQT40   0                        EOM
-  00001C02  0760FF80  2164015E...0001004E       33     350.0 MB   U GRD
-    211  00  ZUQT35   0                        EOM
-  00001C02  0760FF80  2164012C...0001004E       33     300.0 MB   U GRD         
-    211  00  ZUQT30   0                        EOM
-  00001C02  0760FF80  216400FA...0001004E       33     250.0 MB   U GRD         
-    211  00  ZUQT25   0                        EOM
-  00001C02  0760FF80  216400C8...0001004E       33     200.0 MB   U GRD         
-    211  00  ZUQT20   0                        EOM
-  00001C02  0760FF80  21640096...0001004E       33     150.0 MB   U GRD         
-    211  00  ZUQT15   0                        EOM
-  00001C02  0760FF80  21640064...0001004E       33     100.0 MB   U GRD         
-    211  00  ZUQT10   0                        EOM
-  00001C02  0760FF80  226403E8...0001004E       34    1000.0 MB   V GRD
-    211  00  ZVQT99   0                        EOM
-  00001C02  0760FF80  226403CF...0001004E       34     975.0 MB   V GRD
-    211  00  ZVQT93   0                        EOM
-  00001C02  0760FF80  226403B6...0001004E       34     950.0 MB   V GRD
-    211  00  ZVQT95   0                        EOM
-  00001C02  0760FF80  2264039D...0001004E       34     925.0 MB   V GRD
-    211  00  ZVQT92   0                        EOM
-  00001C02  0760FF80  22640384...0001004E       34     900.0 MB   V GRD
-    211  00  ZVQT90   0                        EOM
-  00001C02  0760FF80  2264036B...0001004E       34     875.0 MB   V GRD
-    211  00  ZVQT91   0                        EOM
-  00001C02  0760FF80  22640352...0001004E       34     850.0 MB   V GRD         
-    211  00  ZVQT85   0                        EOM
-  00001C02  0760FF80  22640339...0001004E       34     825.0 MB   V GRD
-    211  00  ZVQT82   0                        EOM
-  00001C02  0760FF80  22640320...0001004E       34     800.0 MB   V GRD
-    211  00  ZVQT80   0                        EOM
-  00001C02  0760FF80  22640307...0001004E       34     775.0 MB   V GRD
-    211  00  ZVQT77   0                        EOM
-  00001C02  0760FF80  226402EE...0001004E       34     750.0 MB   V GRD
-    211  00  ZVQT75   0                        EOM
-  00001C02  0760FF80  226402D5...0001004E       34     725.0 MB   V GRD
-    211  00  ZVQT72   0                        EOM
-  00001C02  0760FF80  226402BC...0001004E       34     700.0 MB   V GRD         
-    211  00  ZVQT70   0                        EOM
-  00001C02  0760FF80  226402A3...0001004E       34     675.0 MB   V GRD
-    211  00  ZVQT67   0                        EOM
-  00001C02  0760FF80  2264028A...0001004E       34     650.0 MB   V GRD
-    211  00  ZVQT65   0                        EOM
-  00001C02  0760FF80  22640271...0001004E       34     625.0 MB   V GRD
-    211  00  ZVQT62   0                        EOM
-  00001C02  0760FF80  22640258...0001004E       34     600.0 MB   V GRD
-    211  00  ZVQT60   0                        EOM
-  00001C02  0760FF80  2264023F...0001004E       34     575.0 MB   V GRD
-    211  00  ZVQT57   0                        EOM
-  00001C02  0760FF80  22640226...0001004E       34     550.0 MB   V GRD
-    211  00  ZVQT55   0                        EOM
-  00001C02  0760FF80  2264020D...0001004E       34     525.0 MB   V GRD
-    211  00  ZVQT52   0                        EOM
-  00001C02  0760FF80  226401F4...0001004E       34     500.0 MB   V GRD         
-    211  00  ZVQT50   0                        EOM
-  00001C02  0760FF80  226401C2...0001004E       34     450.0 MB   V GRD
-    211  00  ZVQT45   0                        EOM
-  00001C02  0760FF80  22640190...0001004E       34     400.0 MB   V GRD         
-    211  00  ZVQT40   0                        EOM
-  00001C02  0760FF80  2264015E...0001004E       34     350.0 MB   V GRD
-    211  00  ZVQT35   0                        EOM
-  00001C02  0760FF80  2264012C...0001004E       34     300.0 MB   V GRD         
-    211  00  ZVQT30   0                        EOM
-  00001C02  0760FF80  226400FA...0001004E       34     250.0 MB   V GRD         
-    211  00  ZVQT25   0                        EOM
-  00001C02  0760FF80  226400C8...0001004E       34     200.0 MB   V GRD         
-    211  00  ZVQT20   0                        EOM
-  00001C02  0760FF80  22640096...0001004E       34     150.0 MB   V GRD         
-    211  00  ZVQT15   0                        EOM
-  00001C02  0760FF80  22640064...0001004E       34     100.0 MB   V GRD         
-    211  00  ZVQT10   0                        EOM
-  00001C02  0760FF80  02660000...0001004E       02           MSL  PRMSL         
-    211  00  ZPQT89   0                        EOM
-  00001C02  0760FF80  346403E8...0001004E       52    1000.0 MB   R H
-    211  00  ZRQT99   0                        EOM
-  00001C02  0760FF80  346403CF...0001004E       52     975.0 MB   R H
-    211  00  ZRQT93   0                        EOM
-  00001C02  0760FF80  346403B6...0001004E       52     950.0 MB   R H
-    211  00  ZRQT95   0                        EOM
-  00001C02  0760FF80  3464039D...0001004E       52     925.0 MB   R H
-    211  00  ZRQT92   0                        EOM
-  00001C02  0760FF80  34640384...0001004E       52     900.0 MB   R H
-    211  00  ZRQT90   0                        EOM
-  00001C02  0760FF80  3464036B...0001004E       52     875.0 MB   R H
-    211  00  ZRQT91   0                        EOM
-  00001C02  0760FF80  34640352...0001004E       52     850.0 MB   R H           
-    211  00  ZRQT85   0                        EOM
-  00001C02  0760FF80  34640339...0001004E       52     825.0 MB   R H
-    211  00  ZRQT82   0                        EOM
-  00001C02  0760FF80  34640320...0001004E       52     800.0 MB   R H
-    211  00  ZRQT80   0                        EOM
-  00001C02  0760FF80  34640307...0001004E       52     775.0 MB   R H
-    211  00  ZRQT77   0                        EOM
-  00001C02  0760FF80  346402EE...0001004E       52     750.0 MB   R H
-    211  00  ZRQT75   0                        EOM
-  00001C02  0760FF80  346402D5...0001004E       52     725.0 MB   R H
-    211  00  ZRQT72   0                        EOM
-  00001C02  0760FF80  346402BC...0001004E       52     700.0 MB   R H           
-    211  00  ZRQT70   0                        EOM
-  00001C02  0760FF80  346402A3...0001004E       52     675.0 MB   R H
-    211  00  ZRQT67   0                        EOM
-  00001C02  0760FF80  3464028A...0001004E       52     650.0 MB   R H
-    211  00  ZRQT65   0                        EOM
-  00001C02  0760FF80  34640271...0001004E       52     625.0 MB   R H
-    211  00  ZRQT62   0                        EOM
-  00001C02  0760FF80  34640258...0001004E       52     600.0 MB   R H
-    211  00  ZRQT60   0                        EOM
-  00001C02  0760FF80  3464023F...0001004E       52     575.0 MB   R H
-    211  00  ZRQT57   0                        EOM
-  00001C02  0760FF80  34640226...0001004E       52     550.0 MB   R H
-    211  00  ZRQT55   0                        EOM
-  00001C02  0760FF80  3464020D...0001004E       52     525.0 MB   R H
-    211  00  ZRQT52   0                        EOM
-  00001C02  0760FF80  346401F4...0001004E       52     500.0 MB   R H           
-    211  00  ZRQT50   0                        EOM
-  00001C02  0760FF80  346401C2...0001004E       52     450.0 MB   R H
-    211  00  ZRQT45   0                        EOM
-  00001C02  0760FF80  34640190...0001004E       52     400.0 MB   R H           
-    211  00  ZRQT40   0                        EOM
-  00001C02  0760FF80  3464015E...0001004E       52     350.0 MB   R H
-    211  00  ZRQT35   0                        EOM
-  00001C02  0760FF80  3464012C...0001004E       52     300.0 MB   R H           
-    211  00  ZRQT30   0                        EOM
-  00001C02  0760FF80  346400FA...0001004E       52     250.0 MB   R H
-    211  00  ZRQT25   0                        EOM
-  00001C02  0760FF80  346400C8...0001004E       52     200.0 MB   R H
-    211  00  ZRQT20   0                        EOM
-  00001C02  0760FF80  34640096...0001004E       52     150.0 MB   R H
-    211  00  ZRQT15   0                        EOM
-  00001C02  0760FF80  34640064...0001004E       52     100.0 MB   R H
-    211  00  ZRQT10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001004E       11    1000.0 MB   TMP
-    211  00  ZTQT99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001004E       11     975.0 MB   TMP
-    211  00  ZTQT93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001004E       11     950.0 MB   TMP
-    211  00  ZTQT95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001004E       11     925.0 MB   TMP
-    211  00  ZTQT92   0                        EOM
-  00001C02  0760FF80  0B640384...0001004E       11     900.0 MB   TMP
-    211  00  ZTQT90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001004E       11     875.0 MB   TMP
-    211  00  ZTQT91   0                        EOM
-  00001C02  0760FF80  0B640352...0001004E       11     850.0 MB   TMP           
-    211  00  ZTQT85   0                        EOM
-  00001C02  0760FF80  0B640339...0001004E       11     825.0 MB   TMP
-    211  00  ZTQT82   0                        EOM
-  00001C02  0760FF80  0B640320...0001004E       11     800.0 MB   TMP
-    211  00  ZTQT80   0                        EOM
-  00001C02  0760FF80  0B640307...0001004E       11     775.0 MB   TMP
-    211  00  ZTQT77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001004E       11     750.0 MB   TMP
-    211  00  ZTQT75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001004E       11     725.0 MB   TMP
-    211  00  ZTQT72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001004E       11     700.0 MB   TMP           
-    211  00  ZTQT70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001004E       11     675.0 MB   TMP
-    211  00  ZTQT67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001004E       11     650.0 MB   TMP
-    211  00  ZTQT65   0                        EOM
-  00001C02  0760FF80  0B640271...0001004E       11     625.0 MB   TMP
-    211  00  ZTQT62   0                        EOM
-  00001C02  0760FF80  0B640258...0001004E       11     600.0 MB   TMP
-    211  00  ZTQT60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001004E       11     575.0 MB   TMP
-    211  00  ZTQT57   0                        EOM
-  00001C02  0760FF80  0B640226...0001004E       11     550.0 MB   TMP
-    211  00  ZTQT55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001004E       11     525.0 MB   TMP
-    211  00  ZTQT52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001004E       11     500.0 MB   TMP           
-    211  00  ZTQT50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001004E       11     450.0 MB   TMP
-    211  00  ZTQT45   0                        EOM
-  00001C02  0760FF80  0B640190...0001004E       11     400.0 MB   TMP           
-    211  00  ZTQT40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001004E       11     350.0 MB   TMP
-    211  00  ZTQT35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001004E       11     300.0 MB   TMP           
-    211  00  ZTQT30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001004E       11     250.0 MB   TMP           
-    211  00  ZTQT25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001004E       11     200.0 MB   TMP           
-    211  00  ZTQT20   0                        EOM
-  00001C02  0760FF80  0B640096...0001004E       11     150.0 MB   TMP           
-    211  00  ZTQT15   0                        EOM
-  00001C02  0760FF80  0B640064...0001004E       11     100.0 MB   TMP           
-    211  00  ZTQT10   0                        EOM
-  00001C02  0760FF80  28640352...0001004E       40     850.0 MB  DZDT           
-    211  00  ZOQT85   0                        EOM
-  00001C02  0760FF80  286402BC...0001004E       40     700.0 MB  DZDT           
-    211  00  ZOQT70   0                        EOM
-  00001C02  0760FF80  286401F4...0001004E       40     500.0 MB  DZDT           
-    211  00  ZOQT50   0                        EOM
-  00001C02  0760FF80  28640190...0001004E       40     400.0 MB  DZDT           
-    211  00  ZOQT40   0                        EOM
-  00001C02  0760FF80  2864012C...0001004E       40     300.0 MB  DZDT           
-    211  00  ZOQT30   0                        EOM
-  00001C02  0760FF80  286400FA...0001004E       40     250.0 MB  DZDT           
-    211  00  ZOQT25   0                        EOM
-  00001C02  0760FF80  286400C8...0001004E       40     200.0 MB  DZDT           
-    211  00  ZOQT20   0                        EOM
-  00001C02  0760FF80  28640096...0001004E       40     150.0 MB  DZDT           
-    211  00  ZOQT15   0                        EOM
-  00001C02  0760FF80  28640064...0001004E       40     100.0 MB  DZDT           
-    211  00  ZOQT10   0                        EOM
-  00001C02  0760FF80  01010000...0001004E       01          SFC  PRES           
-    211  00  ZPQT98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001004E       52        44/100  R H           
-    211  00  ZRQT00   0                        EOM
-  00001C02  0760FF80  36C80000...0001004E       54          EATM  P WAT         
-    211  00  ZFQT00   0                        EOM
-  00001C02  0760FF80  0B690002...0001004E       11          2m/SFC TMP         
-    211  00  ZTQT98   0                        EOM
-  00001C02  0760FF80  34741E00...0001004E       52      BNDRY/SPD  R H          
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  0B070000...0001004E       11            TRO TMP           
-    211  00  ZTQT97   0                        EOM
-  00001C02  0760FF80  01070000...0001004E       01            TRO PRES          
-    211  00  ZPQT97   0                        EOM
-  00001C02  0760FF80  21741E00...0001004E       33           SPD  U GRD         
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  22741E00...0001004E       34           SPD  V GRD         
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  21070000...0001004E       33            TRO U GRD         
-    211  00  ZUQT97   0                        EOM
-  00001C02  0760FF80  22070000...0001004E       34            TRO V GRD         
-    211  00  ZVQT97   0                        EOM
-  00001C02  0760FF80  88070000...0001004E      136            TRO VW SH         
-    211  00  ZBQT97   0                        EOM
-  00001C02  0760FF80  3D010000...0001004E       61            SFC A PCP         
-    211  00  ZEQT98   0                        EOM
-  00001C02  0760FF80  83010000...0001004E      131            SFC LFT X         
-    211  00  ZXQT98   0                        EOM
-  00001C02  0760FF80  29640352...0001004E       41    850.0 MB    ABS V         
-    211  00  ZCQT85   0                        EOM
-  00001C02  0760FF80  296402BC...0001004E       41    700.0 MB    ABS V         
-    211  00  ZCQT70   0                        EOM
-  00001C02  0760FF80  296401F4...0001004E       41    500.0 MB    ABS V         
-    211  00  ZCQT50   0                        EOM
-  00001C02  0760FF80  296400FA...0001004E       41    250.0 MB    ABS V         
-    211  00  ZCQT25   0                        EOM
-  00001C02  0760FF80  9D010000...0001004E      157          SFC   CAPE
-    211  00  ZWQT98   0                        EOM
-  00001C02  0760FF80  9C010000...0001004E      156          SFC   CIN
-    211  00  ZYQT98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001004E      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQT86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001004E      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQT86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001004E       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001004E       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001004E       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001004E       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  0B749678...0001004E       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001004E       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQT86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001004E       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001004E       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  3474785A...0001004E       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  34749678...0001004E       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  3474B496...0001004E       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQT86   0                        EOM
-  00001C02  0760FF80  21741E00...0001004E       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001004E       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001004E       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  2174785A...0001004E       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  21749678...0001004E       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  2174B496...0001004E       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQT86   0                        EOM
-  00001C02  0760FF80  22741E00...0001004E       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001004E       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001004E       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  2274785A...0001004E       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  22749678...0001004E       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  2274B496...0001004E       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQT86   0                        EOM
-  00001C02  0760FF80  0B690002...0001004E       11    2  HTGL     TMP
-    211  00  ZTQT98   0                        EOM
-  00001C02  0760FF80  34690002...0001004E       52    2  HTGL     R H
-    211  00  ZRQT98   0                        EOM
-  00001C02  0760FF80  2169000A...0001004E       33   10  HTGL     U GRD
-    211  00  ZUQT98   0                        EOM
-  00001C02  0760FF80  2269000A...0001004E       34   10  HTGL     V GRD
-    211  00  ZVQT98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs084.211 b/parm/wmo/grib_awpgfs084.211
deleted file mode 100755
index ee86abe6ee..0000000000
--- a/parm/wmo/grib_awpgfs084.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010054       07    1000.0 MB   HGT           
-    211  00  YHQL99   0                        EOM
-  00001C02  0760FF80  076403CF...00010054       07     975.0 MB   HGT
-    211  00  YHQL93   0                        EOM
-  00001C02  0760FF80  076403B6...00010054       07     950.0 MB   HGT
-    211  00  YHQL95   0                        EOM
-  00001C02  0760FF80  0764039D...00010054       07     925.0 MB   HGT
-    211  00  YHQL92   0                        EOM
-  00001C02  0760FF80  07640384...00010054       07     900.0 MB   HGT
-    211  00  YHQL90   0                        EOM
-  00001C02  0760FF80  0764036B...00010054       07     875.0 MB   HGT
-    211  00  YHQL91   0                        EOM
-  00001C02  0760FF80  07640352...00010054       07     850.0 MB   HGT           
-    211  00  YHQL85   0                        EOM
-  00001C02  0760FF80  07640339...00010054       07     825.0 MB   HGT
-    211  00  YHQL82   0                        EOM
-  00001C02  0760FF80  07640320...00010054       07     800.0 MB   HGT
-    211  00  YHQL80   0                        EOM
-  00001C02  0760FF80  07640307...00010054       07     775.0 MB   HGT
-    211  00  YHQL77   0                        EOM
-  00001C02  0760FF80  076402EE...00010054       07     750.0 MB   HGT
-    211  00  YHQL75   0                        EOM
-  00001C02  0760FF80  076402D5...00010054       07     725.0 MB   HGT
-    211  00  YHQL72   0                        EOM
-  00001C02  0760FF80  076402BC...00010054       07     700.0 MB   HGT           
-    211  00  YHQL70   0                        EOM
-  00001C02  0760FF80  076402A3...00010054       07     675.0 MB   HGT
-    211  00  YHQL67   0                        EOM
-  00001C02  0760FF80  0764028A...00010054       07     650.0 MB   HGT
-    211  00  YHQL65   0                        EOM
-  00001C02  0760FF80  07640271...00010054       07     625.0 MB   HGT
-    211  00  YHQL62   0                        EOM
-  00001C02  0760FF80  07640258...00010054       07     600.0 MB   HGT
-    211  00  YHQL60   0                        EOM
-  00001C02  0760FF80  0764023F...00010054       07     575.0 MB   HGT
-    211  00  YHQL57   0                        EOM
-  00001C02  0760FF80  07640226...00010054       07     550.0 MB   HGT
-    211  00  YHQL55   0                        EOM
-  00001C02  0760FF80  0764020D...00010054       07     525.0 MB   HGT
-    211  00  YHQL52   0                        EOM
-  00001C02  0760FF80  076401F4...00010054       07     500.0 MB   HGT           
-    211  00  YHQL50   0                        EOM
-  00001C02  0760FF80  076401C2...00010054       07     450.0 MB   HGT
-    211  00  YHQL45   0                        EOM
-  00001C02  0760FF80  07640190...00010054       07     400.0 MB   HGT           
-    211  00  YHQL40   0                        EOM
-  00001C02  0760FF80  0764015E...00010054       07     350.0 MB   HGT
-    211  00  YHQL35   0                        EOM
-  00001C02  0760FF80  0764012C...00010054       07     300.0 MB   HGT           
-    211  00  YHQL30   0                        EOM
-  00001C02  0760FF80  076400FA...00010054       07     250.0 MB   HGT           
-    211  00  YHQL25   0                        EOM
-  00001C02  0760FF80  076400C8...00010054       07     200.0 MB   HGT           
-    211  00  YHQL20   0                        EOM
-  00001C02  0760FF80  07640096...00010054       07     150.0 MB   HGT           
-    211  00  YHQL15   0                        EOM
-  00001C02  0760FF80  07640064...00010054       07     100.0 MB   HGT           
-    211  00  YHQL10   0                        EOM
-  00001C02  0760FF80  216403E8...00010054       33    1000.0 MB   U GRD
-    211  00  YUQL99   0                        EOM
-  00001C02  0760FF80  216403CF...00010054       33     975.0 MB   U GRD
-    211  00  YUQL93   0                        EOM
-  00001C02  0760FF80  216403B6...00010054       33     950.0 MB   U GRD
-    211  00  YUQL95   0                        EOM
-  00001C02  0760FF80  2164039D...00010054       33     925.0 MB   U GRD
-    211  00  YUQL92   0                        EOM
-  00001C02  0760FF80  21640384...00010054       33     900.0 MB   U GRD
-    211  00  YUQL90   0                        EOM
-  00001C02  0760FF80  2164036B...00010054       33     875.0 MB   U GRD
-    211  00  YUQL91   0                        EOM
-  00001C02  0760FF80  21640352...00010054       33     850.0 MB   U GRD         
-    211  00  YUQL85   0                        EOM
-  00001C02  0760FF80  21640339...00010054       33     825.0 MB   U GRD
-    211  00  YUQL82   0                        EOM
-  00001C02  0760FF80  21640320...00010054       33     800.0 MB   U GRD
-    211  00  YUQL80   0                        EOM
-  00001C02  0760FF80  21640307...00010054       33     775.0 MB   U GRD
-    211  00  YUQL77   0                        EOM
-  00001C02  0760FF80  216402EE...00010054       33     750.0 MB   U GRD
-    211  00  YUQL75   0                        EOM
-  00001C02  0760FF80  216402D5...00010054       33     725.0 MB   U GRD
-    211  00  YUQL72   0                        EOM
-  00001C02  0760FF80  216402BC...00010054       33     700.0 MB   U GRD         
-    211  00  YUQL70   0                        EOM
-  00001C02  0760FF80  216402A3...00010054       33     675.0 MB   U GRD
-    211  00  YUQL67   0                        EOM
-  00001C02  0760FF80  2164028A...00010054       33     650.0 MB   U GRD
-    211  00  YUQL65   0                        EOM
-  00001C02  0760FF80  21640271...00010054       33     625.0 MB   U GRD
-    211  00  YUQL62   0                        EOM
-  00001C02  0760FF80  21640258...00010054       33     600.0 MB   U GRD
-    211  00  YUQL60   0                        EOM
-  00001C02  0760FF80  2164023F...00010054       33     575.0 MB   U GRD
-    211  00  YUQL57   0                        EOM
-  00001C02  0760FF80  21640226...00010054       33     550.0 MB   U GRD
-    211  00  YUQL55   0                        EOM
-  00001C02  0760FF80  2164020D...00010054       33     525.0 MB   U GRD
-    211  00  YUQL52   0                        EOM
-  00001C02  0760FF80  216401F4...00010054       33     500.0 MB   U GRD         
-    211  00  YUQL50   0                        EOM
-  00001C02  0760FF80  216401C2...00010054       33     450.0 MB   U GRD
-    211  00  YUQL45   0                        EOM
-  00001C02  0760FF80  21640190...00010054       33     400.0 MB   U GRD         
-    211  00  YUQL40   0                        EOM
-  00001C02  0760FF80  2164015E...00010054       33     350.0 MB   U GRD
-    211  00  YUQL35   0                        EOM
-  00001C02  0760FF80  2164012C...00010054       33     300.0 MB   U GRD         
-    211  00  YUQL30   0                        EOM
-  00001C02  0760FF80  216400FA...00010054       33     250.0 MB   U GRD         
-    211  00  YUQL25   0                        EOM
-  00001C02  0760FF80  216400C8...00010054       33     200.0 MB   U GRD         
-    211  00  YUQL20   0                        EOM
-  00001C02  0760FF80  21640096...00010054       33     150.0 MB   U GRD         
-    211  00  YUQL15   0                        EOM
-  00001C02  0760FF80  21640064...00010054       33     100.0 MB   U GRD         
-    211  00  YUQL10   0                        EOM
-  00001C02  0760FF80  226403E8...00010054       34    1000.0 MB   V GRD
-    211  00  YVQL99   0                        EOM
-  00001C02  0760FF80  226403CF...00010054       34     975.0 MB   V GRD
-    211  00  YVQL93   0                        EOM
-  00001C02  0760FF80  226403B6...00010054       34     950.0 MB   V GRD
-    211  00  YVQL95   0                        EOM
-  00001C02  0760FF80  2264039D...00010054       34     925.0 MB   V GRD
-    211  00  YVQL92   0                        EOM
-  00001C02  0760FF80  22640384...00010054       34     900.0 MB   V GRD
-    211  00  YVQL90   0                        EOM
-  00001C02  0760FF80  2264036B...00010054       34     875.0 MB   V GRD
-    211  00  YVQL91   0                        EOM
-  00001C02  0760FF80  22640352...00010054       34     850.0 MB   V GRD         
-    211  00  YVQL85   0                        EOM
-  00001C02  0760FF80  22640339...00010054       34     825.0 MB   V GRD
-    211  00  YVQL82   0                        EOM
-  00001C02  0760FF80  22640320...00010054       34     800.0 MB   V GRD
-    211  00  YVQL80   0                        EOM
-  00001C02  0760FF80  22640307...00010054       34     775.0 MB   V GRD
-    211  00  YVQL77   0                        EOM
-  00001C02  0760FF80  226402EE...00010054       34     750.0 MB   V GRD
-    211  00  YVQL75   0                        EOM
-  00001C02  0760FF80  226402D5...00010054       34     725.0 MB   V GRD
-    211  00  YVQL72   0                        EOM
-  00001C02  0760FF80  226402BC...00010054       34     700.0 MB   V GRD         
-    211  00  YVQL70   0                        EOM
-  00001C02  0760FF80  226402A3...00010054       34     675.0 MB   V GRD
-    211  00  YVQL67   0                        EOM
-  00001C02  0760FF80  2264028A...00010054       34     650.0 MB   V GRD
-    211  00  YVQL65   0                        EOM
-  00001C02  0760FF80  22640271...00010054       34     625.0 MB   V GRD
-    211  00  YVQL62   0                        EOM
-  00001C02  0760FF80  22640258...00010054       34     600.0 MB   V GRD
-    211  00  YVQL60   0                        EOM
-  00001C02  0760FF80  2264023F...00010054       34     575.0 MB   V GRD
-    211  00  YVQL57   0                        EOM
-  00001C02  0760FF80  22640226...00010054       34     550.0 MB   V GRD
-    211  00  YVQL55   0                        EOM
-  00001C02  0760FF80  2264020D...00010054       34     525.0 MB   V GRD
-    211  00  YVQL52   0                        EOM
-  00001C02  0760FF80  226401F4...00010054       34     500.0 MB   V GRD         
-    211  00  YVQL50   0                        EOM
-  00001C02  0760FF80  226401C2...00010054       34     450.0 MB   V GRD
-    211  00  YVQL45   0                        EOM
-  00001C02  0760FF80  22640190...00010054       34     400.0 MB   V GRD         
-    211  00  YVQL40   0                        EOM
-  00001C02  0760FF80  2264015E...00010054       34     350.0 MB   V GRD
-    211  00  YVQL35   0                        EOM
-  00001C02  0760FF80  2264012C...00010054       34     300.0 MB   V GRD         
-    211  00  YVQL30   0                        EOM
-  00001C02  0760FF80  226400FA...00010054       34     250.0 MB   V GRD         
-    211  00  YVQL25   0                        EOM
-  00001C02  0760FF80  226400C8...00010054       34     200.0 MB   V GRD         
-    211  00  YVQL20   0                        EOM
-  00001C02  0760FF80  22640096...00010054       34     150.0 MB   V GRD         
-    211  00  YVQL15   0                        EOM
-  00001C02  0760FF80  22640064...00010054       34     100.0 MB   V GRD         
-    211  00  YVQL10   0                        EOM
-  00001C02  0760FF80  02660000...00010054       02           MSL  PRMSL         
-    211  00  YPQL89   0                        EOM
-  00001C02  0760FF80  346403E8...00010054       52    1000.0 MB   R H
-    211  00  YRQL99   0                        EOM
-  00001C02  0760FF80  346403CF...00010054       52     975.0 MB   R H
-    211  00  YRQL93   0                        EOM
-  00001C02  0760FF80  346403B6...00010054       52     950.0 MB   R H
-    211  00  YRQL95   0                        EOM
-  00001C02  0760FF80  3464039D...00010054       52     925.0 MB   R H
-    211  00  YRQL92   0                        EOM
-  00001C02  0760FF80  34640384...00010054       52     900.0 MB   R H
-    211  00  YRQL90   0                        EOM
-  00001C02  0760FF80  3464036B...00010054       52     875.0 MB   R H
-    211  00  YRQL91   0                        EOM
-  00001C02  0760FF80  34640352...00010054       52     850.0 MB   R H           
-    211  00  YRQL85   0                        EOM
-  00001C02  0760FF80  34640339...00010054       52     825.0 MB   R H
-    211  00  YRQL82   0                        EOM
-  00001C02  0760FF80  34640320...00010054       52     800.0 MB   R H
-    211  00  YRQL80   0                        EOM
-  00001C02  0760FF80  34640307...00010054       52     775.0 MB   R H
-    211  00  YRQL77   0                        EOM
-  00001C02  0760FF80  346402EE...00010054       52     750.0 MB   R H
-    211  00  YRQL75   0                        EOM
-  00001C02  0760FF80  346402D5...00010054       52     725.0 MB   R H
-    211  00  YRQL72   0                        EOM
-  00001C02  0760FF80  346402BC...00010054       52     700.0 MB   R H           
-    211  00  YRQL70   0                        EOM
-  00001C02  0760FF80  346402A3...00010054       52     675.0 MB   R H
-    211  00  YRQL67   0                        EOM
-  00001C02  0760FF80  3464028A...00010054       52     650.0 MB   R H
-    211  00  YRQL65   0                        EOM
-  00001C02  0760FF80  34640271...00010054       52     625.0 MB   R H
-    211  00  YRQL62   0                        EOM
-  00001C02  0760FF80  34640258...00010054       52     600.0 MB   R H
-    211  00  YRQL60   0                        EOM
-  00001C02  0760FF80  3464023F...00010054       52     575.0 MB   R H
-    211  00  YRQL57   0                        EOM
-  00001C02  0760FF80  34640226...00010054       52     550.0 MB   R H
-    211  00  YRQL55   0                        EOM
-  00001C02  0760FF80  3464020D...00010054       52     525.0 MB   R H
-    211  00  YRQL52   0                        EOM
-  00001C02  0760FF80  346401F4...00010054       52     500.0 MB   R H           
-    211  00  YRQL50   0                        EOM
-  00001C02  0760FF80  346401C2...00010054       52     450.0 MB   R H
-    211  00  YRQL45   0                        EOM
-  00001C02  0760FF80  34640190...00010054       52     400.0 MB   R H           
-    211  00  YRQL40   0                        EOM
-  00001C02  0760FF80  3464015E...00010054       52     350.0 MB   R H
-    211  00  YRQL35   0                        EOM
-  00001C02  0760FF80  3464012C...00010054       52     300.0 MB   R H           
-    211  00  YRQL30   0                        EOM
-  00001C02  0760FF80  346400FA...00010054       52     250.0 MB   R H
-    211  00  YRQL25   0                        EOM
-  00001C02  0760FF80  346400C8...00010054       52     200.0 MB   R H
-    211  00  YRQL20   0                        EOM
-  00001C02  0760FF80  34640096...00010054       52     150.0 MB   R H
-    211  00  YRQL15   0                        EOM
-  00001C02  0760FF80  34640064...00010054       52     100.0 MB   R H
-    211  00  YRQL10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010054       11    1000.0 MB   TMP
-    211  00  YTQL99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010054       11     975.0 MB   TMP
-    211  00  YTQL93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010054       11     950.0 MB   TMP
-    211  00  YTQL95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010054       11     925.0 MB   TMP
-    211  00  YTQL92   0                        EOM
-  00001C02  0760FF80  0B640384...00010054       11     900.0 MB   TMP
-    211  00  YTQL90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010054       11     875.0 MB   TMP
-    211  00  YTQL91   0                        EOM
-  00001C02  0760FF80  0B640352...00010054       11     850.0 MB   TMP           
-    211  00  YTQL85   0                        EOM
-  00001C02  0760FF80  0B640339...00010054       11     825.0 MB   TMP
-    211  00  YTQL82   0                        EOM
-  00001C02  0760FF80  0B640320...00010054       11     800.0 MB   TMP
-    211  00  YTQL80   0                        EOM
-  00001C02  0760FF80  0B640307...00010054       11     775.0 MB   TMP
-    211  00  YTQL77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010054       11     750.0 MB   TMP
-    211  00  YTQL75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010054       11     725.0 MB   TMP
-    211  00  YTQL72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010054       11     700.0 MB   TMP           
-    211  00  YTQL70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010054       11     675.0 MB   TMP
-    211  00  YTQL67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010054       11     650.0 MB   TMP
-    211  00  YTQL65   0                        EOM
-  00001C02  0760FF80  0B640271...00010054       11     625.0 MB   TMP
-    211  00  YTQL62   0                        EOM
-  00001C02  0760FF80  0B640258...00010054       11     600.0 MB   TMP
-    211  00  YTQL60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010054       11     575.0 MB   TMP
-    211  00  YTQL57   0                        EOM
-  00001C02  0760FF80  0B640226...00010054       11     550.0 MB   TMP
-    211  00  YTQL55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010054       11     525.0 MB   TMP
-    211  00  YTQL52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010054       11     500.0 MB   TMP           
-    211  00  YTQL50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010054       11     450.0 MB   TMP
-    211  00  YTQL45   0                        EOM
-  00001C02  0760FF80  0B640190...00010054       11     400.0 MB   TMP           
-    211  00  YTQL40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010054       11     350.0 MB   TMP
-    211  00  YTQL35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010054       11     300.0 MB   TMP           
-    211  00  YTQL30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010054       11     250.0 MB   TMP           
-    211  00  YTQL25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010054       11     200.0 MB   TMP           
-    211  00  YTQL20   0                        EOM
-  00001C02  0760FF80  0B640096...00010054       11     150.0 MB   TMP           
-    211  00  YTQL15   0                        EOM
-  00001C02  0760FF80  0B640064...00010054       11     100.0 MB   TMP           
-    211  00  YTQL10   0                        EOM
-  00001C02  0760FF80  28640352...00010054       40     850.0 MB  DZDT           
-    211  00  YOQL85   0                        EOM
-  00001C02  0760FF80  286402BC...00010054       40     700.0 MB  DZDT           
-    211  00  YOQL70   0                        EOM
-  00001C02  0760FF80  286401F4...00010054       40     500.0 MB  DZDT           
-    211  00  YOQL50   0                        EOM
-  00001C02  0760FF80  28640190...00010054       40     400.0 MB  DZDT           
-    211  00  YOQL40   0                        EOM
-  00001C02  0760FF80  2864012C...00010054       40     300.0 MB  DZDT           
-    211  00  YOQL30   0                        EOM
-  00001C02  0760FF80  286400FA...00010054       40     250.0 MB  DZDT           
-    211  00  YOQL25   0                        EOM
-  00001C02  0760FF80  286400C8...00010054       40     200.0 MB  DZDT           
-    211  00  YOQL20   0                        EOM
-  00001C02  0760FF80  28640096...00010054       40     150.0 MB  DZDT           
-    211  00  YOQL15   0                        EOM
-  00001C02  0760FF80  28640064...00010054       40     100.0 MB  DZDT           
-    211  00  YOQL10   0                        EOM
-  00001C02  0760FF80  01010000...00010054       01          SFC  PRES           
-    211  00  YPQL98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010054       52        44/100  R H           
-    211  00  YRQL00   0                        EOM
-  00001C02  0760FF80  36C80000...00010054       54          EATM  P WAT         
-    211  00  YFQL00   0                        EOM
-  00001C02  0760FF80  0B690002...00010054       11          2m/SFC TMP         
-    211  00  YTQL98   0                        EOM
-  00001C02  0760FF80  34741E00...00010054       52     BNDRY/SPD  R H           
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  0B070000...00010054       11            TRO TMP           
-    211  00  YTQL97   0                        EOM
-  00001C02  0760FF80  01070000...00010054       01            TRO PRES          
-    211  00  YPQL97   0                        EOM
-  00001C02  0760FF80  21741E00...00010054       33           SPD  U GRD         
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  22741E00...00010054       34           SPD  V GRD         
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  21070000...00010054       33            TRO U GRD         
-    211  00  YUQL97   0                        EOM
-  00001C02  0760FF80  22070000...00010054       34            TRO V GRD         
-    211  00  YVQL97   0                        EOM
-  00001C02  0760FF80  88070000...00010054      136            TRO VW SH         
-    211  00  YBQL97   0                        EOM
-  00001C02  0760FF80  3D010000...00010054       61            SFC A PCP         
-    211  00  YEQL98   0                        EOM
-  00001C02  0760FF80  83010000...00010054      131            SFC LFT X         
-    211  00  YXQL98   0                        EOM
-  00001C02  0760FF80  29640352...00010054       41    850.0 MB    ABS V         
-    211  00  YCQL85   0                        EOM
-  00001C02  0760FF80  296402BC...00010054       41    700.0 MB    ABS V         
-    211  00  YCQL70   0                        EOM
-  00001C02  0760FF80  296401F4...00010054       41    500.0 MB    ABS V         
-    211  00  YCQL50   0                        EOM
-  00001C02  0760FF80  296400FA...00010054       41    250.0 MB    ABS V         
-    211  00  YCQL25   0                        EOM
-  00001C02  0760FF80  9D010000...00010054      157          SFC   CAPE
-    211  00  YWQL98   0                        EOM
-  00001C02  0760FF80  9C010000...00010054      156          SFC   CIN
-    211  00  YYQL98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010054      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQL86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010054      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQL86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010054       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010054       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010054       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010054       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  0B749678...00010054       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010054       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQL86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010054       52   60 SPDY  30 SPDY  R H
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010054       52   90 SPDY  60 SPDY  R H
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  3474785A...00010054       52  120 SPDY  90 SPDY  R H
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  34749678...00010054       52  150 SPDY 120 SPDY  R H
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  3474B496...00010054       52  180 SPDY 150 SPDY  R H
-    211  00  YRQL86   0                        EOM
-  00001C02  0760FF80  21741E00...00010054       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010054       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010054       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  2174785A...00010054       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  21749678...00010054       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  2174B496...00010054       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQL86   0                        EOM
-  00001C02  0760FF80  22741E00...00010054       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010054       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010054       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  2274785A...00010054       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  22749678...00010054       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  2274B496...00010054       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQL86   0                        EOM
-  00001C02  0760FF80  0B690002...00010054       11    2  HTGL     TMP
-    211  00  YTQL98   0                        EOM
-  00001C02  0760FF80  34690002...00010054       52    2  HTGL     R H
-    211  00  YRQL98   0                        EOM
-  00001C02  0760FF80  2169000A...00010054       33   10  HTGL     U GRD
-    211  00  YUQL98   0                        EOM
-  00001C02  0760FF80  2269000A...00010054       34   10  HTGL     V GRD
-    211  00  YVQL98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs090.211 b/parm/wmo/grib_awpgfs090.211
deleted file mode 100755
index e59942775b..0000000000
--- a/parm/wmo/grib_awpgfs090.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...0001005A       07    1000.0 MB   HGT           
-    211  00  ZHQU99   0                        EOM
-  00001C02  0760FF80  076403CF...0001005A       07     975.0 MB   HGT
-    211  00  ZHQU93   0                        EOM
-  00001C02  0760FF80  076403B6...0001005A       07     950.0 MB   HGT
-    211  00  ZHQU95   0                        EOM
-  00001C02  0760FF80  0764039D...0001005A       07     925.0 MB   HGT
-    211  00  ZHQU92   0                        EOM
-  00001C02  0760FF80  07640384...0001005A       07     900.0 MB   HGT
-    211  00  ZHQU90   0                        EOM
-  00001C02  0760FF80  0764036B...0001005A       07     875.0 MB   HGT
-    211  00  ZHQU91   0                        EOM
-  00001C02  0760FF80  07640352...0001005A       07     850.0 MB   HGT           
-    211  00  ZHQU85   0                        EOM
-  00001C02  0760FF80  07640339...0001005A       07     825.0 MB   HGT
-    211  00  ZHQU82   0                        EOM
-  00001C02  0760FF80  07640320...0001005A       07     800.0 MB   HGT
-    211  00  ZHQU80   0                        EOM
-  00001C02  0760FF80  07640307...0001005A       07     775.0 MB   HGT
-    211  00  ZHQU77   0                        EOM
-  00001C02  0760FF80  076402EE...0001005A       07     750.0 MB   HGT
-    211  00  ZHQU75   0                        EOM
-  00001C02  0760FF80  076402D5...0001005A       07     725.0 MB   HGT
-    211  00  ZHQU72   0                        EOM
-  00001C02  0760FF80  076402BC...0001005A       07     700.0 MB   HGT           
-    211  00  ZHQU70   0                        EOM
-  00001C02  0760FF80  076402A3...0001005A       07     675.0 MB   HGT
-    211  00  ZHQU67   0                        EOM
-  00001C02  0760FF80  0764028A...0001005A       07     650.0 MB   HGT
-    211  00  ZHQU65   0                        EOM
-  00001C02  0760FF80  07640271...0001005A       07     625.0 MB   HGT
-    211  00  ZHQU62   0                        EOM
-  00001C02  0760FF80  07640258...0001005A       07     600.0 MB   HGT
-    211  00  ZHQU60   0                        EOM
-  00001C02  0760FF80  0764023F...0001005A       07     575.0 MB   HGT
-    211  00  ZHQU57   0                        EOM
-  00001C02  0760FF80  07640226...0001005A       07     550.0 MB   HGT
-    211  00  ZHQU55   0                        EOM
-  00001C02  0760FF80  0764020D...0001005A       07     525.0 MB   HGT
-    211  00  ZHQU52   0                        EOM
-  00001C02  0760FF80  076401F4...0001005A       07     500.0 MB   HGT           
-    211  00  ZHQU50   0                        EOM
-  00001C02  0760FF80  076401C2...0001005A       07     450.0 MB   HGT
-    211  00  ZHQU45   0                        EOM
-  00001C02  0760FF80  07640190...0001005A       07     400.0 MB   HGT           
-    211  00  ZHQU40   0                        EOM
-  00001C02  0760FF80  0764015E...0001005A       07     350.0 MB   HGT
-    211  00  ZHQU35   0                        EOM
-  00001C02  0760FF80  0764012C...0001005A       07     300.0 MB   HGT           
-    211  00  ZHQU30   0                        EOM
-  00001C02  0760FF80  076400FA...0001005A       07     250.0 MB   HGT           
-    211  00  ZHQU25   0                        EOM
-  00001C02  0760FF80  076400C8...0001005A       07     200.0 MB   HGT           
-    211  00  ZHQU20   0                        EOM
-  00001C02  0760FF80  07640096...0001005A       07     150.0 MB   HGT           
-    211  00  ZHQU15   0                        EOM
-  00001C02  0760FF80  07640064...0001005A       07     100.0 MB   HGT           
-    211  00  ZHQU10   0                        EOM
-  00001C02  0760FF80  216403E8...0001005A       33    1000.0 MB   U GRD
-    211  00  ZUQU99   0                        EOM
-  00001C02  0760FF80  216403CF...0001005A       33     975.0 MB   U GRD
-    211  00  ZUQU93   0                        EOM
-  00001C02  0760FF80  216403B6...0001005A       33     950.0 MB   U GRD
-    211  00  ZUQU95   0                        EOM
-  00001C02  0760FF80  2164039D...0001005A       33     925.0 MB   U GRD
-    211  00  ZUQU92   0                        EOM
-  00001C02  0760FF80  21640384...0001005A       33     900.0 MB   U GRD
-    211  00  ZUQU90   0                        EOM
-  00001C02  0760FF80  2164036B...0001005A       33     875.0 MB   U GRD
-    211  00  ZUQU91   0                        EOM
-  00001C02  0760FF80  21640352...0001005A       33     850.0 MB   U GRD         
-    211  00  ZUQU85   0                        EOM
-  00001C02  0760FF80  21640339...0001005A       33     825.0 MB   U GRD
-    211  00  ZUQU82   0                        EOM
-  00001C02  0760FF80  21640320...0001005A       33     800.0 MB   U GRD
-    211  00  ZUQU80   0                        EOM
-  00001C02  0760FF80  21640307...0001005A       33     775.0 MB   U GRD
-    211  00  ZUQU77   0                        EOM
-  00001C02  0760FF80  216402EE...0001005A       33     750.0 MB   U GRD
-    211  00  ZUQU75   0                        EOM
-  00001C02  0760FF80  216402D5...0001005A       33     725.0 MB   U GRD
-    211  00  ZUQU72   0                        EOM
-  00001C02  0760FF80  216402BC...0001005A       33     700.0 MB   U GRD         
-    211  00  ZUQU70   0                        EOM
-  00001C02  0760FF80  216402A3...0001005A       33     675.0 MB   U GRD
-    211  00  ZUQU67   0                        EOM
-  00001C02  0760FF80  2164028A...0001005A       33     650.0 MB   U GRD
-    211  00  ZUQU65   0                        EOM
-  00001C02  0760FF80  21640271...0001005A       33     625.0 MB   U GRD
-    211  00  ZUQU62   0                        EOM
-  00001C02  0760FF80  21640258...0001005A       33     600.0 MB   U GRD
-    211  00  ZUQU60   0                        EOM
-  00001C02  0760FF80  2164023F...0001005A       33     575.0 MB   U GRD
-    211  00  ZUQU57   0                        EOM
-  00001C02  0760FF80  21640226...0001005A       33     550.0 MB   U GRD
-    211  00  ZUQU55   0                        EOM
-  00001C02  0760FF80  2164020D...0001005A       33     525.0 MB   U GRD
-    211  00  ZUQU52   0                        EOM
-  00001C02  0760FF80  216401F4...0001005A       33     500.0 MB   U GRD         
-    211  00  ZUQU50   0                        EOM
-  00001C02  0760FF80  216401C2...0001005A       33     450.0 MB   U GRD
-    211  00  ZUQU45   0                        EOM
-  00001C02  0760FF80  21640190...0001005A       33     400.0 MB   U GRD         
-    211  00  ZUQU40   0                        EOM
-  00001C02  0760FF80  2164015E...0001005A       33     350.0 MB   U GRD
-    211  00  ZUQU35   0                        EOM
-  00001C02  0760FF80  2164012C...0001005A       33     300.0 MB   U GRD         
-    211  00  ZUQU30   0                        EOM
-  00001C02  0760FF80  216400FA...0001005A       33     250.0 MB   U GRD         
-    211  00  ZUQU25   0                        EOM
-  00001C02  0760FF80  216400C8...0001005A       33     200.0 MB   U GRD         
-    211  00  ZUQU20   0                        EOM
-  00001C02  0760FF80  21640096...0001005A       33     150.0 MB   U GRD         
-    211  00  ZUQU15   0                        EOM
-  00001C02  0760FF80  21640064...0001005A       33     100.0 MB   U GRD         
-    211  00  ZUQU10   0                        EOM
-  00001C02  0760FF80  226403E8...0001005A       34    1000.0 MB   V GRD
-    211  00  ZVQU99   0                        EOM
-  00001C02  0760FF80  226403CF...0001005A       34     975.0 MB   V GRD
-    211  00  ZVQU93   0                        EOM
-  00001C02  0760FF80  226403B6...0001005A       34     950.0 MB   V GRD
-    211  00  ZVQU95   0                        EOM
-  00001C02  0760FF80  2264039D...0001005A       34     925.0 MB   V GRD
-    211  00  ZVQU92   0                        EOM
-  00001C02  0760FF80  22640384...0001005A       34     900.0 MB   V GRD
-    211  00  ZVQU90   0                        EOM
-  00001C02  0760FF80  2264036B...0001005A       34     875.0 MB   V GRD
-    211  00  ZVQU91   0                        EOM
-  00001C02  0760FF80  22640352...0001005A       34     850.0 MB   V GRD         
-    211  00  ZVQU85   0                        EOM
-  00001C02  0760FF80  22640339...0001005A       34     825.0 MB   V GRD
-    211  00  ZVQU82   0                        EOM
-  00001C02  0760FF80  22640320...0001005A       34     800.0 MB   V GRD
-    211  00  ZVQU80   0                        EOM
-  00001C02  0760FF80  22640307...0001005A       34     775.0 MB   V GRD
-    211  00  ZVQU77   0                        EOM
-  00001C02  0760FF80  226402EE...0001005A       34     750.0 MB   V GRD
-    211  00  ZVQU75   0                        EOM
-  00001C02  0760FF80  226402D5...0001005A       34     725.0 MB   V GRD
-    211  00  ZVQU72   0                        EOM
-  00001C02  0760FF80  226402BC...0001005A       34     700.0 MB   V GRD         
-    211  00  ZVQU70   0                        EOM
-  00001C02  0760FF80  226402A3...0001005A       34     675.0 MB   V GRD
-    211  00  ZVQU67   0                        EOM
-  00001C02  0760FF80  2264028A...0001005A       34     650.0 MB   V GRD
-    211  00  ZVQU65   0                        EOM
-  00001C02  0760FF80  22640271...0001005A       34     625.0 MB   V GRD
-    211  00  ZVQU62   0                        EOM
-  00001C02  0760FF80  22640258...0001005A       34     600.0 MB   V GRD
-    211  00  ZVQU60   0                        EOM
-  00001C02  0760FF80  2264023F...0001005A       34     575.0 MB   V GRD
-    211  00  ZVQU57   0                        EOM
-  00001C02  0760FF80  22640226...0001005A       34     550.0 MB   V GRD
-    211  00  ZVQU55   0                        EOM
-  00001C02  0760FF80  2264020D...0001005A       34     525.0 MB   V GRD
-    211  00  ZVQU52   0                        EOM
-  00001C02  0760FF80  226401F4...0001005A       34     500.0 MB   V GRD         
-    211  00  ZVQU50   0                        EOM
-  00001C02  0760FF80  226401C2...0001005A       34     450.0 MB   V GRD
-    211  00  ZVQU45   0                        EOM
-  00001C02  0760FF80  22640190...0001005A       34     400.0 MB   V GRD         
-    211  00  ZVQU40   0                        EOM
-  00001C02  0760FF80  2264015E...0001005A       34     350.0 MB   V GRD
-    211  00  ZVQU35   0                        EOM
-  00001C02  0760FF80  2264012C...0001005A       34     300.0 MB   V GRD         
-    211  00  ZVQU30   0                        EOM
-  00001C02  0760FF80  226400FA...0001005A       34     250.0 MB   V GRD         
-    211  00  ZVQU25   0                        EOM
-  00001C02  0760FF80  226400C8...0001005A       34     200.0 MB   V GRD         
-    211  00  ZVQU20   0                        EOM
-  00001C02  0760FF80  22640096...0001005A       34     150.0 MB   V GRD         
-    211  00  ZVQU15   0                        EOM
-  00001C02  0760FF80  22640064...0001005A       34     100.0 MB   V GRD         
-    211  00  ZVQU10   0                        EOM
-  00001C02  0760FF80  02660000...0001005A       02           MSL  PRMSL         
-    211  00  ZPQU89   0                        EOM
-  00001C02  0760FF80  346403E8...0001005A       52    1000.0 MB   R H
-    211  00  ZRQU99   0                        EOM
-  00001C02  0760FF80  346403CF...0001005A       52     975.0 MB   R H
-    211  00  ZRQU93   0                        EOM
-  00001C02  0760FF80  346403B6...0001005A       52     950.0 MB   R H
-    211  00  ZRQU95   0                        EOM
-  00001C02  0760FF80  3464039D...0001005A       52     925.0 MB   R H
-    211  00  ZRQU92   0                        EOM
-  00001C02  0760FF80  34640384...0001005A       52     900.0 MB   R H
-    211  00  ZRQU90   0                        EOM
-  00001C02  0760FF80  3464036B...0001005A       52     875.0 MB   R H
-    211  00  ZRQU91   0                        EOM
-  00001C02  0760FF80  34640352...0001005A       52     850.0 MB   R H           
-    211  00  ZRQU85   0                        EOM
-  00001C02  0760FF80  34640339...0001005A       52     825.0 MB   R H
-    211  00  ZRQU82   0                        EOM
-  00001C02  0760FF80  34640320...0001005A       52     800.0 MB   R H
-    211  00  ZRQU80   0                        EOM
-  00001C02  0760FF80  34640307...0001005A       52     775.0 MB   R H
-    211  00  ZRQU77   0                        EOM
-  00001C02  0760FF80  346402EE...0001005A       52     750.0 MB   R H
-    211  00  ZRQU75   0                        EOM
-  00001C02  0760FF80  346402D5...0001005A       52     725.0 MB   R H
-    211  00  ZRQU72   0                        EOM
-  00001C02  0760FF80  346402BC...0001005A       52     700.0 MB   R H           
-    211  00  ZRQU70   0                        EOM
-  00001C02  0760FF80  346402A3...0001005A       52     675.0 MB   R H
-    211  00  ZRQU67   0                        EOM
-  00001C02  0760FF80  3464028A...0001005A       52     650.0 MB   R H
-    211  00  ZRQU65   0                        EOM
-  00001C02  0760FF80  34640271...0001005A       52     625.0 MB   R H
-    211  00  ZRQU62   0                        EOM
-  00001C02  0760FF80  34640258...0001005A       52     600.0 MB   R H
-    211  00  ZRQU60   0                        EOM
-  00001C02  0760FF80  3464023F...0001005A       52     575.0 MB   R H
-    211  00  ZRQU57   0                        EOM
-  00001C02  0760FF80  34640226...0001005A       52     550.0 MB   R H
-    211  00  ZRQU55   0                        EOM
-  00001C02  0760FF80  3464020D...0001005A       52     525.0 MB   R H
-    211  00  ZRQU52   0                        EOM
-  00001C02  0760FF80  346401F4...0001005A       52     500.0 MB   R H           
-    211  00  ZRQU50   0                        EOM
-  00001C02  0760FF80  346401C2...0001005A       52     450.0 MB   R H
-    211  00  ZRQU45   0                        EOM
-  00001C02  0760FF80  34640190...0001005A       52     400.0 MB   R H           
-    211  00  ZRQU40   0                        EOM
-  00001C02  0760FF80  3464015E...0001005A       52     350.0 MB   R H
-    211  00  ZRQU35   0                        EOM
-  00001C02  0760FF80  3464012C...0001005A       52     300.0 MB   R H           
-    211  00  ZRQU30   0                        EOM
-  00001C02  0760FF80  346400FA...0001005A       52     250.0 MB   R H
-    211  00  ZRQU25   0                        EOM
-  00001C02  0760FF80  346400C8...0001005A       52     200.0 MB   R H
-    211  00  ZRQU20   0                        EOM
-  00001C02  0760FF80  34640096...0001005A       52     150.0 MB   R H
-    211  00  ZRQU15   0                        EOM
-  00001C02  0760FF80  34640064...0001005A       52     100.0 MB   R H
-    211  00  ZRQU10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001005A       11    1000.0 MB   TMP
-    211  00  ZTQU99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001005A       11     975.0 MB   TMP
-    211  00  ZTQU93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001005A       11     950.0 MB   TMP
-    211  00  ZTQU95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001005A       11     925.0 MB   TMP
-    211  00  ZTQU92   0                        EOM
-  00001C02  0760FF80  0B640384...0001005A       11     900.0 MB   TMP
-    211  00  ZTQU90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001005A       11     875.0 MB   TMP
-    211  00  ZTQU91   0                        EOM
-  00001C02  0760FF80  0B640352...0001005A       11     850.0 MB   TMP           
-    211  00  ZTQU85   0                        EOM
-  00001C02  0760FF80  0B640339...0001005A       11     825.0 MB   TMP
-    211  00  ZTQU82   0                        EOM
-  00001C02  0760FF80  0B640320...0001005A       11     800.0 MB   TMP
-    211  00  ZTQU80   0                        EOM
-  00001C02  0760FF80  0B640307...0001005A       11     775.0 MB   TMP
-    211  00  ZTQU77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001005A       11     750.0 MB   TMP
-    211  00  ZTQU75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001005A       11     725.0 MB   TMP
-    211  00  ZTQU72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001005A       11     700.0 MB   TMP           
-    211  00  ZTQU70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001005A       11     675.0 MB   TMP
-    211  00  ZTQU67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001005A       11     650.0 MB   TMP
-    211  00  ZTQU65   0                        EOM
-  00001C02  0760FF80  0B640271...0001005A       11     625.0 MB   TMP
-    211  00  ZTQU62   0                        EOM
-  00001C02  0760FF80  0B640258...0001005A       11     600.0 MB   TMP
-    211  00  ZTQU60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001005A       11     575.0 MB   TMP
-    211  00  ZTQU57   0                        EOM
-  00001C02  0760FF80  0B640226...0001005A       11     550.0 MB   TMP
-    211  00  ZTQU55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001005A       11     525.0 MB   TMP
-    211  00  ZTQU52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001005A       11     500.0 MB   TMP           
-    211  00  ZTQU50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001005A       11     450.0 MB   TMP
-    211  00  ZTQU45   0                        EOM
-  00001C02  0760FF80  0B640190...0001005A       11     400.0 MB   TMP           
-    211  00  ZTQU40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001005A       11     350.0 MB   TMP
-    211  00  ZTQU35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001005A       11     300.0 MB   TMP           
-    211  00  ZTQU30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001005A       11     250.0 MB   TMP           
-    211  00  ZTQU25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001005A       11     200.0 MB   TMP           
-    211  00  ZTQU20   0                        EOM
-  00001C02  0760FF80  0B640096...0001005A       11     150.0 MB   TMP           
-    211  00  ZTQU15   0                        EOM
-  00001C02  0760FF80  0B640064...0001005A       11     100.0 MB   TMP           
-    211  00  ZTQU10   0                        EOM
-  00001C02  0760FF80  28640352...0001005A       40     850.0 MB  DZDT           
-    211  00  ZOQU85   0                        EOM
-  00001C02  0760FF80  286402BC...0001005A       40     700.0 MB  DZDT           
-    211  00  ZOQU70   0                        EOM
-  00001C02  0760FF80  286401F4...0001005A       40     500.0 MB  DZDT           
-    211  00  ZOQU50   0                        EOM
-  00001C02  0760FF80  28640190...0001005A       40     400.0 MB  DZDT           
-    211  00  ZOQU40   0                        EOM
-  00001C02  0760FF80  2864012C...0001005A       40     300.0 MB  DZDT           
-    211  00  ZOQU30   0                        EOM
-  00001C02  0760FF80  286400FA...0001005A       40     250.0 MB  DZDT           
-    211  00  ZOQU25   0                        EOM
-  00001C02  0760FF80  286400C8...0001005A       40     200.0 MB  DZDT           
-    211  00  ZOQU20   0                        EOM
-  00001C02  0760FF80  28640096...0001005A       40     150.0 MB  DZDT           
-    211  00  ZOQU15   0                        EOM
-  00001C02  0760FF80  28640064...0001005A       40     100.0 MB  DZDT           
-    211  00  ZOQU10   0                        EOM
-  00001C02  0760FF80  01010000...0001005A       01          SFC  PRES           
-    211  00  ZPQU98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001005A       52        44/100  R H           
-    211  00  ZRQU00   0                        EOM
-  00001C02  0760FF80  36C80000...0001005A       54          EATM  P WAT         
-    211  00  ZFQU00   0                        EOM
-  00001C02  0760FF80  0B690002...0001005A       11          2m/SFC TMP         
-    211  00  ZTQU98   0                        EOM
-  00001C02  0760FF80  34741E00...0001005A       52      BNDRY/SPD  R H          
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  0B070000...0001005A       11            TRO TMP           
-    211  00  ZTQU97   0                        EOM
-  00001C02  0760FF80  01070000...0001005A       01            TRO PRES          
-    211  00  ZPQU97   0                        EOM
-  00001C02  0760FF80  21741E00...0001005A       33           SPD  U GRD         
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  22741E00...0001005A       34           SPD  V GRD         
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  21070000...0001005A       33            TRO U GRD         
-    211  00  ZUQU97   0                        EOM
-  00001C02  0760FF80  22070000...0001005A       34            TRO V GRD         
-    211  00  ZVQU97   0                        EOM
-  00001C02  0760FF80  88070000...0001005A      136            TRO VW SH         
-    211  00  ZBQU97   0                        EOM
-  00001C02  0760FF80  3D010000...0001005A       61            SFC A PCP         
-    211  00  ZEQU98   0                        EOM
-  00001C02  0760FF80  83010000...0001005A      131            SFC LFT X         
-    211  00  ZXQU98   0                        EOM
-  00001C02  0760FF80  29640352...0001005A       41    850.0 MB    ABS V         
-    211  00  ZCQU85   0                        EOM
-  00001C02  0760FF80  296402BC...0001005A       41    700.0 MB    ABS V         
-    211  00  ZCQU70   0                        EOM
-  00001C02  0760FF80  296401F4...0001005A       41    500.0 MB    ABS V         
-    211  00  ZCQU50   0                        EOM
-  00001C02  0760FF80  296400FA...0001005A       41    250.0 MB    ABS V         
-    211  00  ZCQU25   0                        EOM
-  00001C02  0760FF80  9D010000...0001005A      157          SFC   CAPE
-    211  00  ZWQU98   0                        EOM
-  00001C02  0760FF80  9C010000...0001005A      156          SFC   CIN
-    211  00  ZYQU98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001005A      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQU86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001005A      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQU86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001005A       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001005A       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001005A       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001005A       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  0B749678...0001005A       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001005A       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQU86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001005A       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001005A       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  3474785A...0001005A       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  34749678...0001005A       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  3474B496...0001005A       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQU86   0                        EOM
-  00001C02  0760FF80  21741E00...0001005A       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001005A       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001005A       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  2174785A...0001005A       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  21749678...0001005A       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  2174B496...0001005A       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQU86   0                        EOM
-  00001C02  0760FF80  22741E00...0001005A       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001005A       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001005A       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  2274785A...0001005A       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  22749678...0001005A       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  2274B496...0001005A       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQU86   0                        EOM
-  00001C02  0760FF80  0B690002...0001005A       11    2  HTGL     TMP
-    211  00  ZTQU98   0                        EOM
-  00001C02  0760FF80  34690002...0001005A       52    2  HTGL     R H
-    211  00  ZRQU98   0                        EOM
-  00001C02  0760FF80  2169000A...0001005A       33   10  HTGL     U GRD
-    211  00  ZUQU98   0                        EOM
-  00001C02  0760FF80  2269000A...0001005A       34   10  HTGL     V GRD
-    211  00  ZVQU98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs096.211 b/parm/wmo/grib_awpgfs096.211
deleted file mode 100755
index 9045788e83..0000000000
--- a/parm/wmo/grib_awpgfs096.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010060       07    1000.0 MB   HGT           
-    211  00  YHQM99   0                        EOM
-  00001C02  0760FF80  076403CF...00010060       07     975.0 MB   HGT
-    211  00  YHQM93   0                        EOM
-  00001C02  0760FF80  076403B6...00010060       07     950.0 MB   HGT
-    211  00  YHQM95   0                        EOM
-  00001C02  0760FF80  0764039D...00010060       07     925.0 MB   HGT
-    211  00  YHQM92   0                        EOM
-  00001C02  0760FF80  07640384...00010060       07     900.0 MB   HGT
-    211  00  YHQM90   0                        EOM
-  00001C02  0760FF80  0764036B...00010060       07     875.0 MB   HGT
-    211  00  YHQM91   0                        EOM
-  00001C02  0760FF80  07640352...00010060       07     850.0 MB   HGT           
-    211  00  YHQM85   0                        EOM
-  00001C02  0760FF80  07640339...00010060       07     825.0 MB   HGT
-    211  00  YHQM82   0                        EOM
-  00001C02  0760FF80  07640320...00010060       07     800.0 MB   HGT
-    211  00  YHQM80   0                        EOM
-  00001C02  0760FF80  07640307...00010060       07     775.0 MB   HGT
-    211  00  YHQM77   0                        EOM
-  00001C02  0760FF80  076402EE...00010060       07     750.0 MB   HGT
-    211  00  YHQM75   0                        EOM
-  00001C02  0760FF80  076402D5...00010060       07     725.0 MB   HGT
-    211  00  YHQM72   0                        EOM
-  00001C02  0760FF80  076402BC...00010060       07     700.0 MB   HGT           
-    211  00  YHQM70   0                        EOM
-  00001C02  0760FF80  076402A3...00010060       07     675.0 MB   HGT
-    211  00  YHQM67   0                        EOM
-  00001C02  0760FF80  0764028A...00010060       07     650.0 MB   HGT
-    211  00  YHQM65   0                        EOM
-  00001C02  0760FF80  07640271...00010060       07     625.0 MB   HGT
-    211  00  YHQM62   0                        EOM
-  00001C02  0760FF80  07640258...00010060       07     600.0 MB   HGT
-    211  00  YHQM60   0                        EOM
-  00001C02  0760FF80  0764023F...00010060       07     575.0 MB   HGT
-    211  00  YHQM57   0                        EOM
-  00001C02  0760FF80  07640226...00010060       07     550.0 MB   HGT
-    211  00  YHQM55   0                        EOM
-  00001C02  0760FF80  0764020D...00010060       07     525.0 MB   HGT
-    211  00  YHQM52   0                        EOM
-  00001C02  0760FF80  076401F4...00010060       07     500.0 MB   HGT           
-    211  00  YHQM50   0                        EOM
-  00001C02  0760FF80  076401C2...00010060       07     450.0 MB   HGT
-    211  00  YHQM45   0                        EOM
-  00001C02  0760FF80  07640190...00010060       07     400.0 MB   HGT           
-    211  00  YHQM40   0                        EOM
-  00001C02  0760FF80  0764015E...00010060       07     350.0 MB   HGT
-    211  00  YHQM35   0                        EOM
-  00001C02  0760FF80  0764012C...00010060       07     300.0 MB   HGT           
-    211  00  YHQM30   0                        EOM
-  00001C02  0760FF80  076400FA...00010060       07     250.0 MB   HGT           
-    211  00  YHQM25   0                        EOM
-  00001C02  0760FF80  076400C8...00010060       07     200.0 MB   HGT           
-    211  00  YHQM20   0                        EOM
-  00001C02  0760FF80  07640096...00010060       07     150.0 MB   HGT           
-    211  00  YHQM15   0                        EOM
-  00001C02  0760FF80  07640064...00010060       07     100.0 MB   HGT           
-    211  00  YHQM10   0                        EOM
-  00001C02  0760FF80  216403E8...00010060       33    1000.0 MB   U GRD
-    211  00  YUQM99   0                        EOM
-  00001C02  0760FF80  216403CF...00010060       33     975.0 MB   U GRD
-    211  00  YUQM93   0                        EOM
-  00001C02  0760FF80  216403B6...00010060       33     950.0 MB   U GRD
-    211  00  YUQM95   0                        EOM
-  00001C02  0760FF80  2164039D...00010060       33     925.0 MB   U GRD
-    211  00  YUQM92   0                        EOM
-  00001C02  0760FF80  21640384...00010060       33     900.0 MB   U GRD
-    211  00  YUQM90   0                        EOM
-  00001C02  0760FF80  2164036B...00010060       33     875.0 MB   U GRD
-    211  00  YUQM91   0                        EOM
-  00001C02  0760FF80  21640352...00010060       33     850.0 MB   U GRD         
-    211  00  YUQM85   0                        EOM
-  00001C02  0760FF80  21640339...00010060       33     825.0 MB   U GRD
-    211  00  YUQM82   0                        EOM
-  00001C02  0760FF80  21640320...00010060       33     800.0 MB   U GRD
-    211  00  YUQM80   0                        EOM
-  00001C02  0760FF80  21640307...00010060       33     775.0 MB   U GRD
-    211  00  YUQM77   0                        EOM
-  00001C02  0760FF80  216402EE...00010060       33     750.0 MB   U GRD
-    211  00  YUQM75   0                        EOM
-  00001C02  0760FF80  216402D5...00010060       33     725.0 MB   U GRD
-    211  00  YUQM72   0                        EOM
-  00001C02  0760FF80  216402BC...00010060       33     700.0 MB   U GRD         
-    211  00  YUQM70   0                        EOM
-  00001C02  0760FF80  216402A3...00010060       33     675.0 MB   U GRD
-    211  00  YUQM67   0                        EOM
-  00001C02  0760FF80  2164028A...00010060       33     650.0 MB   U GRD
-    211  00  YUQM65   0                        EOM
-  00001C02  0760FF80  21640271...00010060       33     625.0 MB   U GRD
-    211  00  YUQM62   0                        EOM
-  00001C02  0760FF80  21640258...00010060       33     600.0 MB   U GRD
-    211  00  YUQM60   0                        EOM
-  00001C02  0760FF80  2164023F...00010060       33     575.0 MB   U GRD
-    211  00  YUQM57   0                        EOM
-  00001C02  0760FF80  21640226...00010060       33     550.0 MB   U GRD
-    211  00  YUQM55   0                        EOM
-  00001C02  0760FF80  2164020D...00010060       33     525.0 MB   U GRD
-    211  00  YUQM52   0                        EOM
-  00001C02  0760FF80  216401F4...00010060       33     500.0 MB   U GRD         
-    211  00  YUQM50   0                        EOM
-  00001C02  0760FF80  216401C2...00010060       33     450.0 MB   U GRD
-    211  00  YUQM45   0                        EOM
-  00001C02  0760FF80  21640190...00010060       33     400.0 MB   U GRD         
-    211  00  YUQM40   0                        EOM
-  00001C02  0760FF80  2164015E...00010060       33     350.0 MB   U GRD
-    211  00  YUQM35   0                        EOM
-  00001C02  0760FF80  2164012C...00010060       33     300.0 MB   U GRD         
-    211  00  YUQM30   0                        EOM
-  00001C02  0760FF80  216400FA...00010060       33     250.0 MB   U GRD         
-    211  00  YUQM25   0                        EOM
-  00001C02  0760FF80  216400C8...00010060       33     200.0 MB   U GRD         
-    211  00  YUQM20   0                        EOM
-  00001C02  0760FF80  21640096...00010060       33     150.0 MB   U GRD         
-    211  00  YUQM15   0                        EOM
-  00001C02  0760FF80  21640064...00010060       33     100.0 MB   U GRD         
-    211  00  YUQM10   0                        EOM
-  00001C02  0760FF80  226403E8...00010060       34    1000.0 MB   V GRD
-    211  00  YVQM99   0                        EOM
-  00001C02  0760FF80  226403CF...00010060       34     975.0 MB   V GRD
-    211  00  YVQM93   0                        EOM
-  00001C02  0760FF80  226403B6...00010060       34     950.0 MB   V GRD
-    211  00  YVQM95   0                        EOM
-  00001C02  0760FF80  2264039D...00010060       34     925.0 MB   V GRD
-    211  00  YVQM92   0                        EOM
-  00001C02  0760FF80  22640384...00010060       34     900.0 MB   V GRD
-    211  00  YVQM90   0                        EOM
-  00001C02  0760FF80  2264036B...00010060       34     875.0 MB   V GRD
-    211  00  YVQM91   0                        EOM
-  00001C02  0760FF80  22640352...00010060       34     850.0 MB   V GRD         
-    211  00  YVQM85   0                        EOM
-  00001C02  0760FF80  22640339...00010060       34     825.0 MB   V GRD
-    211  00  YVQM82   0                        EOM
-  00001C02  0760FF80  22640320...00010060       34     800.0 MB   V GRD
-    211  00  YVQM80   0                        EOM
-  00001C02  0760FF80  22640307...00010060       34     775.0 MB   V GRD
-    211  00  YVQM77   0                        EOM
-  00001C02  0760FF80  226402EE...00010060       34     750.0 MB   V GRD
-    211  00  YVQM75   0                        EOM
-  00001C02  0760FF80  226402D5...00010060       34     725.0 MB   V GRD
-    211  00  YVQM72   0                        EOM
-  00001C02  0760FF80  226402BC...00010060       34     700.0 MB   V GRD         
-    211  00  YVQM70   0                        EOM
-  00001C02  0760FF80  226402A3...00010060       34     675.0 MB   V GRD
-    211  00  YVQM67   0                        EOM
-  00001C02  0760FF80  2264028A...00010060       34     650.0 MB   V GRD
-    211  00  YVQM65   0                        EOM
-  00001C02  0760FF80  22640271...00010060       34     625.0 MB   V GRD
-    211  00  YVQM62   0                        EOM
-  00001C02  0760FF80  22640258...00010060       34     600.0 MB   V GRD
-    211  00  YVQM60   0                        EOM
-  00001C02  0760FF80  2264023F...00010060       34     575.0 MB   V GRD
-    211  00  YVQM57   0                        EOM
-  00001C02  0760FF80  22640226...00010060       34     550.0 MB   V GRD
-    211  00  YVQM55   0                        EOM
-  00001C02  0760FF80  2264020D...00010060       34     525.0 MB   V GRD
-    211  00  YVQM52   0                        EOM
-  00001C02  0760FF80  226401F4...00010060       34     500.0 MB   V GRD         
-    211  00  YVQM50   0                        EOM
-  00001C02  0760FF80  226401C2...00010060       34     450.0 MB   V GRD
-    211  00  YVQM45   0                        EOM
-  00001C02  0760FF80  22640190...00010060       34     400.0 MB   V GRD         
-    211  00  YVQM40   0                        EOM
-  00001C02  0760FF80  2264015E...00010060       34     350.0 MB   V GRD
-    211  00  YVQM35   0                        EOM
-  00001C02  0760FF80  2264012C...00010060       34     300.0 MB   V GRD         
-    211  00  YVQM30   0                        EOM
-  00001C02  0760FF80  226400FA...00010060       34     250.0 MB   V GRD         
-    211  00  YVQM25   0                        EOM
-  00001C02  0760FF80  226400C8...00010060       34     200.0 MB   V GRD         
-    211  00  YVQM20   0                        EOM
-  00001C02  0760FF80  22640096...00010060       34     150.0 MB   V GRD         
-    211  00  YVQM15   0                        EOM
-  00001C02  0760FF80  22640064...00010060       34     100.0 MB   V GRD         
-    211  00  YVQM10   0                        EOM
-  00001C02  0760FF80  02660000...00010060       02           MSL  PRMSL         
-    211  00  YPQM89   0                        EOM
-  00001C02  0760FF80  346403E8...00010060       52    1000.0 MB   R H
-    211  00  YRQM99   0                        EOM
-  00001C02  0760FF80  346403CF...00010060       52     975.0 MB   R H
-    211  00  YRQM93   0                        EOM
-  00001C02  0760FF80  346403B6...00010060       52     950.0 MB   R H
-    211  00  YRQM95   0                        EOM
-  00001C02  0760FF80  3464039D...00010060       52     925.0 MB   R H
-    211  00  YRQM92   0                        EOM
-  00001C02  0760FF80  34640384...00010060       52     900.0 MB   R H
-    211  00  YRQM90   0                        EOM
-  00001C02  0760FF80  3464036B...00010060       52     875.0 MB   R H
-    211  00  YRQM91   0                        EOM
-  00001C02  0760FF80  34640352...00010060       52     850.0 MB   R H           
-    211  00  YRQM85   0                        EOM
-  00001C02  0760FF80  34640339...00010060       52     825.0 MB   R H
-    211  00  YRQM82   0                        EOM
-  00001C02  0760FF80  34640320...00010060       52     800.0 MB   R H
-    211  00  YRQM80   0                        EOM
-  00001C02  0760FF80  34640307...00010060       52     775.0 MB   R H
-    211  00  YRQM77   0                        EOM
-  00001C02  0760FF80  346402EE...00010060       52     750.0 MB   R H
-    211  00  YRQM75   0                        EOM
-  00001C02  0760FF80  346402D5...00010060       52     725.0 MB   R H
-    211  00  YRQM72   0                        EOM
-  00001C02  0760FF80  346402BC...00010060       52     700.0 MB   R H           
-    211  00  YRQM70   0                        EOM
-  00001C02  0760FF80  346402A3...00010060       52     675.0 MB   R H
-    211  00  YRQM67   0                        EOM
-  00001C02  0760FF80  3464028A...00010060       52     650.0 MB   R H
-    211  00  YRQM65   0                        EOM
-  00001C02  0760FF80  34640271...00010060       52     625.0 MB   R H
-    211  00  YRQM62   0                        EOM
-  00001C02  0760FF80  34640258...00010060       52     600.0 MB   R H
-    211  00  YRQM60   0                        EOM
-  00001C02  0760FF80  3464023F...00010060       52     575.0 MB   R H
-    211  00  YRQM57   0                        EOM
-  00001C02  0760FF80  34640226...00010060       52     550.0 MB   R H
-    211  00  YRQM55   0                        EOM
-  00001C02  0760FF80  3464020D...00010060       52     525.0 MB   R H
-    211  00  YRQM52   0                        EOM
-  00001C02  0760FF80  346401F4...00010060       52     500.0 MB   R H           
-    211  00  YRQM50   0                        EOM
-  00001C02  0760FF80  346401C2...00010060       52     450.0 MB   R H
-    211  00  YRQM45   0                        EOM
-  00001C02  0760FF80  34640190...00010060       52     400.0 MB   R H           
-    211  00  YRQM40   0                        EOM
-  00001C02  0760FF80  3464015E...00010060       52     350.0 MB   R H
-    211  00  YRQM35   0                        EOM
-  00001C02  0760FF80  3464012C...00010060       52     300.0 MB   R H           
-    211  00  YRQM30   0                        EOM
-  00001C02  0760FF80  346400FA...00010060       52     250.0 MB   R H
-    211  00  YRQM25   0                        EOM
-  00001C02  0760FF80  346400C8...00010060       52     200.0 MB   R H
-    211  00  YRQM20   0                        EOM
-  00001C02  0760FF80  34640096...00010060       52     150.0 MB   R H
-    211  00  YRQM15   0                        EOM
-  00001C02  0760FF80  34640064...00010060       52     100.0 MB   R H
-    211  00  YRQM10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010060       11    1000.0 MB   TMP
-    211  00  YTQM99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010060       11     975.0 MB   TMP
-    211  00  YTQM93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010060       11     950.0 MB   TMP
-    211  00  YTQM95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010060       11     925.0 MB   TMP
-    211  00  YTQM92   0                        EOM
-  00001C02  0760FF80  0B640384...00010060       11     900.0 MB   TMP
-    211  00  YTQM90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010060       11     875.0 MB   TMP
-    211  00  YTQM91   0                        EOM
-  00001C02  0760FF80  0B640352...00010060       11     850.0 MB   TMP           
-    211  00  YTQM85   0                        EOM
-  00001C02  0760FF80  0B640339...00010060       11     825.0 MB   TMP
-    211  00  YTQM82   0                        EOM
-  00001C02  0760FF80  0B640320...00010060       11     800.0 MB   TMP
-    211  00  YTQM80   0                        EOM
-  00001C02  0760FF80  0B640307...00010060       11     775.0 MB   TMP
-    211  00  YTQM77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010060       11     750.0 MB   TMP
-    211  00  YTQM75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010060       11     725.0 MB   TMP
-    211  00  YTQM72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010060       11     700.0 MB   TMP           
-    211  00  YTQM70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010060       11     675.0 MB   TMP
-    211  00  YTQM67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010060       11     650.0 MB   TMP
-    211  00  YTQM65   0                        EOM
-  00001C02  0760FF80  0B640271...00010060       11     625.0 MB   TMP
-    211  00  YTQM62   0                        EOM
-  00001C02  0760FF80  0B640258...00010060       11     600.0 MB   TMP
-    211  00  YTQM60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010060       11     575.0 MB   TMP
-    211  00  YTQM57   0                        EOM
-  00001C02  0760FF80  0B640226...00010060       11     550.0 MB   TMP
-    211  00  YTQM55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010060       11     525.0 MB   TMP
-    211  00  YTQM52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010060       11     500.0 MB   TMP           
-    211  00  YTQM50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010060       11     450.0 MB   TMP
-    211  00  YTQM45   0                        EOM
-  00001C02  0760FF80  0B640190...00010060       11     400.0 MB   TMP           
-    211  00  YTQM40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010060       11     350.0 MB   TMP
-    211  00  YTQM35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010060       11     300.0 MB   TMP           
-    211  00  YTQM30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010060       11     250.0 MB   TMP           
-    211  00  YTQM25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010060       11     200.0 MB   TMP           
-    211  00  YTQM20   0                        EOM
-  00001C02  0760FF80  0B640096...00010060       11     150.0 MB   TMP           
-    211  00  YTQM15   0                        EOM
-  00001C02  0760FF80  0B640064...00010060       11     100.0 MB   TMP           
-    211  00  YTQM10   0                        EOM
-  00001C02  0760FF80  28640352...00010060       40     850.0 MB  DZDT           
-    211  00  YOQM85   0                        EOM
-  00001C02  0760FF80  286402BC...00010060       40     700.0 MB  DZDT           
-    211  00  YOQM70   0                        EOM
-  00001C02  0760FF80  286401F4...00010060       40     500.0 MB  DZDT           
-    211  00  YOQM50   0                        EOM
-  00001C02  0760FF80  28640190...00010060       40     400.0 MB  DZDT           
-    211  00  YOQM40   0                        EOM
-  00001C02  0760FF80  2864012C...00010060       40     300.0 MB  DZDT           
-    211  00  YOQM30   0                        EOM
-  00001C02  0760FF80  286400FA...00010060       40     250.0 MB  DZDT           
-    211  00  YOQM25   0                        EOM
-  00001C02  0760FF80  286400C8...00010060       40     200.0 MB  DZDT           
-    211  00  YOQM20   0                        EOM
-  00001C02  0760FF80  28640096...00010060       40     150.0 MB  DZDT           
-    211  00  YOQM15   0                        EOM
-  00001C02  0760FF80  28640064...00010060       40     100.0 MB  DZDT           
-    211  00  YOQM10   0                        EOM
-  00001C02  0760FF80  01010000...00010060       01          SFC  PRES           
-    211  00  YPQM98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010060       52        44/100  R H           
-    211  00  YRQM00   0                        EOM
-  00001C02  0760FF80  36C80000...00010060       54          EATM  P WAT         
-    211  00  YFQM00   0                        EOM
-  00001C02  0760FF80  0B690002...00010060       11          2m/SFC TMP         
-    211  00  YTQM98   0                        EOM
-  00001C02  0760FF80  34741E00...00010060       52     BNDRY/SPD  R H           
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  0B070000...00010060       11            TRO TMP           
-    211  00  YTQM97   0                        EOM
-  00001C02  0760FF80  01070000...00010060       01            TRO PRES          
-    211  00  YPQM97   0                        EOM
-  00001C02  0760FF80  21741E00...00010060       33           SPD  U GRD         
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  22741E00...00010060       34           SPD  V GRD         
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  21070000...00010060       33            TRO U GRD         
-    211  00  YUQM97   0                        EOM
-  00001C02  0760FF80  22070000...00010060       34            TRO V GRD         
-    211  00  YVQM97   0                        EOM
-  00001C02  0760FF80  88070000...00010060      136            TRO VW SH         
-    211  00  YBQM97   0                        EOM
-  00001C02  0760FF80  3D010000...00010060       61            SFC A PCP         
-    211  00  YEQM98   0                        EOM
-  00001C02  0760FF80  83010000...00010060      131            SFC LFT X         
-    211  00  YXQM98   0                        EOM
-  00001C02  0760FF80  29640352...00010060       41    850.0 MB    ABS V         
-    211  00  YCQM85   0                        EOM
-  00001C02  0760FF80  296402BC...00010060       41    700.0 MB    ABS V         
-    211  00  YCQM70   0                        EOM
-  00001C02  0760FF80  296401F4...00010060       41    500.0 MB    ABS V         
-    211  00  YCQM50   0                        EOM
-  00001C02  0760FF80  296400FA...00010060       41    250.0 MB    ABS V         
-    211  00  YCQM25   0                        EOM
-  00001C02  0760FF80  9D010000...00010060      157          SFC   CAPE
-    211  00  YWQM98   0                        EOM
-  00001C02  0760FF80  9C010000...00010060      156          SFC   CIN
-    211  00  YYQM98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010060      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQM86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010060      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQM86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010060       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010060       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010060       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010060       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  0B749678...00010060       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010060       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQM86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010060       52   60 SPDY  30 SPDY  R H
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010060       52   90 SPDY  60 SPDY  R H
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  3474785A...00010060       52  120 SPDY  90 SPDY  R H
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  34749678...00010060       52  150 SPDY 120 SPDY  R H
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  3474B496...00010060       52  180 SPDY 150 SPDY  R H
-    211  00  YRQM86   0                        EOM
-  00001C02  0760FF80  21741E00...00010060       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010060       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010060       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  2174785A...00010060       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  21749678...00010060       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  2174B496...00010060       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQM86   0                        EOM
-  00001C02  0760FF80  22741E00...00010060       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010060       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010060       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  2274785A...00010060       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  22749678...00010060       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  2274B496...00010060       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQM86   0                        EOM
-  00001C02  0760FF80  0B690002...00010060       11    2  HTGL     TMP
-    211  00  YTQM98   0                        EOM
-  00001C02  0760FF80  34690002...00010060       52    2  HTGL     R H
-    211  00  YRQM98   0                        EOM
-  00001C02  0760FF80  2169000A...00010060       33   10  HTGL     U GRD
-    211  00  YUQM98   0                        EOM
-  00001C02  0760FF80  2269000A...00010060       34   10  HTGL     V GRD
-    211  00  YVQM98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs102.211 b/parm/wmo/grib_awpgfs102.211
deleted file mode 100755
index 02032ed8ae..0000000000
--- a/parm/wmo/grib_awpgfs102.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010066       07    1000.0 MB   HGT           
-    211  00  ZHQV99   0                        EOM
-  00001C02  0760FF80  076403CF...00010066       07     975.0 MB   HGT
-    211  00  ZHQV93   0                        EOM
-  00001C02  0760FF80  076403B6...00010066       07     950.0 MB   HGT
-    211  00  ZHQV95   0                        EOM
-  00001C02  0760FF80  0764039D...00010066       07     925.0 MB   HGT
-    211  00  ZHQV92   0                        EOM
-  00001C02  0760FF80  07640384...00010066       07     900.0 MB   HGT
-    211  00  ZHQV90   0                        EOM
-  00001C02  0760FF80  0764036B...00010066       07     875.0 MB   HGT
-    211  00  ZHQV91   0                        EOM
-  00001C02  0760FF80  07640352...00010066       07     850.0 MB   HGT           
-    211  00  ZHQV85   0                        EOM
-  00001C02  0760FF80  07640339...00010066       07     825.0 MB   HGT
-    211  00  ZHQV82   0                        EOM
-  00001C02  0760FF80  07640320...00010066       07     800.0 MB   HGT
-    211  00  ZHQV80   0                        EOM
-  00001C02  0760FF80  07640307...00010066       07     775.0 MB   HGT
-    211  00  ZHQV77   0                        EOM
-  00001C02  0760FF80  076402EE...00010066       07     750.0 MB   HGT
-    211  00  ZHQV75   0                        EOM
-  00001C02  0760FF80  076402D5...00010066       07     725.0 MB   HGT
-    211  00  ZHQV72   0                        EOM
-  00001C02  0760FF80  076402BC...00010066       07     700.0 MB   HGT           
-    211  00  ZHQV70   0                        EOM
-  00001C02  0760FF80  076402A3...00010066       07     675.0 MB   HGT
-    211  00  ZHQV67   0                        EOM
-  00001C02  0760FF80  0764028A...00010066       07     650.0 MB   HGT
-    211  00  ZHQV65   0                        EOM
-  00001C02  0760FF80  07640271...00010066       07     625.0 MB   HGT
-    211  00  ZHQV62   0                        EOM
-  00001C02  0760FF80  07640258...00010066       07     600.0 MB   HGT
-    211  00  ZHQV60   0                        EOM
-  00001C02  0760FF80  0764023F...00010066       07     575.0 MB   HGT
-    211  00  ZHQV57   0                        EOM
-  00001C02  0760FF80  07640226...00010066       07     550.0 MB   HGT
-    211  00  ZHQV55   0                        EOM
-  00001C02  0760FF80  0764020D...00010066       07     525.0 MB   HGT
-    211  00  ZHQV52   0                        EOM
-  00001C02  0760FF80  076401F4...00010066       07     500.0 MB   HGT           
-    211  00  ZHQV50   0                        EOM
-  00001C02  0760FF80  076401C2...00010066       07     450.0 MB   HGT
-    211  00  ZHQV45   0                        EOM
-  00001C02  0760FF80  07640190...00010066       07     400.0 MB   HGT           
-    211  00  ZHQV40   0                        EOM
-  00001C02  0760FF80  0764015E...00010066       07     350.0 MB   HGT
-    211  00  ZHQV35   0                        EOM
-  00001C02  0760FF80  0764012C...00010066       07     300.0 MB   HGT           
-    211  00  ZHQV30   0                        EOM
-  00001C02  0760FF80  076400FA...00010066       07     250.0 MB   HGT           
-    211  00  ZHQV25   0                        EOM
-  00001C02  0760FF80  076400C8...00010066       07     200.0 MB   HGT           
-    211  00  ZHQV20   0                        EOM
-  00001C02  0760FF80  07640096...00010066       07     150.0 MB   HGT           
-    211  00  ZHQV15   0                        EOM
-  00001C02  0760FF80  07640064...00010066       07     100.0 MB   HGT           
-    211  00  ZHQV10   0                        EOM
-  00001C02  0760FF80  216403E8...00010066       33    1000.0 MB   U GRD
-    211  00  ZUQV99   0                        EOM
-  00001C02  0760FF80  216403CF...00010066       33     975.0 MB   U GRD
-    211  00  ZUQV93   0                        EOM
-  00001C02  0760FF80  216403B6...00010066       33     950.0 MB   U GRD
-    211  00  ZUQV95   0                        EOM
-  00001C02  0760FF80  2164039D...00010066       33     925.0 MB   U GRD
-    211  00  ZUQV92   0                        EOM
-  00001C02  0760FF80  21640384...00010066       33     900.0 MB   U GRD
-    211  00  ZUQV90   0                        EOM
-  00001C02  0760FF80  2164036B...00010066       33     875.0 MB   U GRD
-    211  00  ZUQV91   0                        EOM
-  00001C02  0760FF80  21640352...00010066       33     850.0 MB   U GRD         
-    211  00  ZUQV85   0                        EOM
-  00001C02  0760FF80  21640339...00010066       33     825.0 MB   U GRD
-    211  00  ZUQV82   0                        EOM
-  00001C02  0760FF80  21640320...00010066       33     800.0 MB   U GRD
-    211  00  ZUQV80   0                        EOM
-  00001C02  0760FF80  21640307...00010066       33     775.0 MB   U GRD
-    211  00  ZUQV77   0                        EOM
-  00001C02  0760FF80  216402EE...00010066       33     750.0 MB   U GRD
-    211  00  ZUQV75   0                        EOM
-  00001C02  0760FF80  216402D5...00010066       33     725.0 MB   U GRD
-    211  00  ZUQV72   0                        EOM
-  00001C02  0760FF80  216402BC...00010066       33     700.0 MB   U GRD         
-    211  00  ZUQV70   0                        EOM
-  00001C02  0760FF80  216402A3...00010066       33     675.0 MB   U GRD
-    211  00  ZUQV67   0                        EOM
-  00001C02  0760FF80  2164028A...00010066       33     650.0 MB   U GRD
-    211  00  ZUQV65   0                        EOM
-  00001C02  0760FF80  21640271...00010066       33     625.0 MB   U GRD
-    211  00  ZUQV62   0                        EOM
-  00001C02  0760FF80  21640258...00010066       33     600.0 MB   U GRD
-    211  00  ZUQV60   0                        EOM
-  00001C02  0760FF80  2164023F...00010066       33     575.0 MB   U GRD
-    211  00  ZUQV57   0                        EOM
-  00001C02  0760FF80  21640226...00010066       33     550.0 MB   U GRD
-    211  00  ZUQV55   0                        EOM
-  00001C02  0760FF80  2164020D...00010066       33     525.0 MB   U GRD
-    211  00  ZUQV52   0                        EOM
-  00001C02  0760FF80  216401F4...00010066       33     500.0 MB   U GRD         
-    211  00  ZUQV50   0                        EOM
-  00001C02  0760FF80  216401C2...00010066       33     450.0 MB   U GRD
-    211  00  ZUQV45   0                        EOM
-  00001C02  0760FF80  21640190...00010066       33     400.0 MB   U GRD         
-    211  00  ZUQV40   0                        EOM
-  00001C02  0760FF80  2164015E...00010066       33     350.0 MB   U GRD
-    211  00  ZUQV35   0                        EOM
-  00001C02  0760FF80  2164012C...00010066       33     300.0 MB   U GRD         
-    211  00  ZUQV30   0                        EOM
-  00001C02  0760FF80  216400FA...00010066       33     250.0 MB   U GRD         
-    211  00  ZUQV25   0                        EOM
-  00001C02  0760FF80  216400C8...00010066       33     200.0 MB   U GRD         
-    211  00  ZUQV20   0                        EOM
-  00001C02  0760FF80  21640096...00010066       33     150.0 MB   U GRD         
-    211  00  ZUQV15   0                        EOM
-  00001C02  0760FF80  21640064...00010066       33     100.0 MB   U GRD         
-    211  00  ZUQV10   0                        EOM
-  00001C02  0760FF80  226403E8...00010066       34    1000.0 MB   V GRD
-    211  00  ZVQV99   0                        EOM
-  00001C02  0760FF80  226403CF...00010066       34     975.0 MB   V GRD
-    211  00  ZVQV93   0                        EOM
-  00001C02  0760FF80  226403B6...00010066       34     950.0 MB   V GRD
-    211  00  ZVQV95   0                        EOM
-  00001C02  0760FF80  2264039D...00010066       34     925.0 MB   V GRD
-    211  00  ZVQV92   0                        EOM
-  00001C02  0760FF80  22640384...00010066       34     900.0 MB   V GRD
-    211  00  ZVQV90   0                        EOM
-  00001C02  0760FF80  2264036B...00010066       34     875.0 MB   V GRD
-    211  00  ZVQV91   0                        EOM
-  00001C02  0760FF80  22640352...00010066       34     850.0 MB   V GRD         
-    211  00  ZVQV85   0                        EOM
-  00001C02  0760FF80  22640339...00010066       34     825.0 MB   V GRD
-    211  00  ZVQV82   0                        EOM
-  00001C02  0760FF80  22640320...00010066       34     800.0 MB   V GRD
-    211  00  ZVQV80   0                        EOM
-  00001C02  0760FF80  22640307...00010066       34     775.0 MB   V GRD
-    211  00  ZVQV77   0                        EOM
-  00001C02  0760FF80  226402EE...00010066       34     750.0 MB   V GRD
-    211  00  ZVQV75   0                        EOM
-  00001C02  0760FF80  226402D5...00010066       34     725.0 MB   V GRD
-    211  00  ZVQV72   0                        EOM
-  00001C02  0760FF80  226402BC...00010066       34     700.0 MB   V GRD         
-    211  00  ZVQV70   0                        EOM
-  00001C02  0760FF80  226402A3...00010066       34     675.0 MB   V GRD
-    211  00  ZVQV67   0                        EOM
-  00001C02  0760FF80  2264028A...00010066       34     650.0 MB   V GRD
-    211  00  ZVQV65   0                        EOM
-  00001C02  0760FF80  22640271...00010066       34     625.0 MB   V GRD
-    211  00  ZVQV62   0                        EOM
-  00001C02  0760FF80  22640258...00010066       34     600.0 MB   V GRD
-    211  00  ZVQV60   0                        EOM
-  00001C02  0760FF80  2264023F...00010066       34     575.0 MB   V GRD
-    211  00  ZVQV57   0                        EOM
-  00001C02  0760FF80  22640226...00010066       34     550.0 MB   V GRD
-    211  00  ZVQV55   0                        EOM
-  00001C02  0760FF80  2264020D...00010066       34     525.0 MB   V GRD
-    211  00  ZVQV52   0                        EOM
-  00001C02  0760FF80  226401F4...00010066       34     500.0 MB   V GRD         
-    211  00  ZVQV50   0                        EOM
-  00001C02  0760FF80  226401C2...00010066       34     450.0 MB   V GRD
-    211  00  ZVQV45   0                        EOM
-  00001C02  0760FF80  22640190...00010066       34     400.0 MB   V GRD         
-    211  00  ZVQV40   0                        EOM
-  00001C02  0760FF80  2264015E...00010066       34     350.0 MB   V GRD
-    211  00  ZVQV35   0                        EOM
-  00001C02  0760FF80  2264012C...00010066       34     300.0 MB   V GRD         
-    211  00  ZVQV30   0                        EOM
-  00001C02  0760FF80  226400FA...00010066       34     250.0 MB   V GRD         
-    211  00  ZVQV25   0                        EOM
-  00001C02  0760FF80  226400C8...00010066       34     200.0 MB   V GRD         
-    211  00  ZVQV20   0                        EOM
-  00001C02  0760FF80  22640096...00010066       34     150.0 MB   V GRD         
-    211  00  ZVQV15   0                        EOM
-  00001C02  0760FF80  22640064...00010066       34     100.0 MB   V GRD         
-    211  00  ZVQV10   0                        EOM
-  00001C02  0760FF80  02660000...00010066       02           MSL  PRMSL         
-    211  00  ZPQV89   0                        EOM
-  00001C02  0760FF80  346403E8...00010066       52    1000.0 MB   R H
-    211  00  ZRQV99   0                        EOM
-  00001C02  0760FF80  346403CF...00010066       52     975.0 MB   R H
-    211  00  ZRQV93   0                        EOM
-  00001C02  0760FF80  346403B6...00010066       52     950.0 MB   R H
-    211  00  ZRQV95   0                        EOM
-  00001C02  0760FF80  3464039D...00010066       52     925.0 MB   R H
-    211  00  ZRQV92   0                        EOM
-  00001C02  0760FF80  34640384...00010066       52     900.0 MB   R H
-    211  00  ZRQV90   0                        EOM
-  00001C02  0760FF80  3464036B...00010066       52     875.0 MB   R H
-    211  00  ZRQV91   0                        EOM
-  00001C02  0760FF80  34640352...00010066       52     850.0 MB   R H           
-    211  00  ZRQV85   0                        EOM
-  00001C02  0760FF80  34640339...00010066       52     825.0 MB   R H
-    211  00  ZRQV82   0                        EOM
-  00001C02  0760FF80  34640320...00010066       52     800.0 MB   R H
-    211  00  ZRQV80   0                        EOM
-  00001C02  0760FF80  34640307...00010066       52     775.0 MB   R H
-    211  00  ZRQV77   0                        EOM
-  00001C02  0760FF80  346402EE...00010066       52     750.0 MB   R H
-    211  00  ZRQV75   0                        EOM
-  00001C02  0760FF80  346402D5...00010066       52     725.0 MB   R H
-    211  00  ZRQV72   0                        EOM
-  00001C02  0760FF80  346402BC...00010066       52     700.0 MB   R H           
-    211  00  ZRQV70   0                        EOM
-  00001C02  0760FF80  346402A3...00010066       52     675.0 MB   R H
-    211  00  ZRQV67   0                        EOM
-  00001C02  0760FF80  3464028A...00010066       52     650.0 MB   R H
-    211  00  ZRQV65   0                        EOM
-  00001C02  0760FF80  34640271...00010066       52     625.0 MB   R H
-    211  00  ZRQV62   0                        EOM
-  00001C02  0760FF80  34640258...00010066       52     600.0 MB   R H
-    211  00  ZRQV60   0                        EOM
-  00001C02  0760FF80  3464023F...00010066       52     575.0 MB   R H
-    211  00  ZRQV57   0                        EOM
-  00001C02  0760FF80  34640226...00010066       52     550.0 MB   R H
-    211  00  ZRQV55   0                        EOM
-  00001C02  0760FF80  3464020D...00010066       52     525.0 MB   R H
-    211  00  ZRQV52   0                        EOM
-  00001C02  0760FF80  346401F4...00010066       52     500.0 MB   R H           
-    211  00  ZRQV50   0                        EOM
-  00001C02  0760FF80  346401C2...00010066       52     450.0 MB   R H
-    211  00  ZRQV45   0                        EOM
-  00001C02  0760FF80  34640190...00010066       52     400.0 MB   R H           
-    211  00  ZRQV40   0                        EOM
-  00001C02  0760FF80  3464015E...00010066       52     350.0 MB   R H
-    211  00  ZRQV35   0                        EOM
-  00001C02  0760FF80  3464012C...00010066       52     300.0 MB   R H           
-    211  00  ZRQV30   0                        EOM
-  00001C02  0760FF80  346400FA...00010066       52     250.0 MB   R H
-    211  00  ZRQV25   0                        EOM
-  00001C02  0760FF80  346400C8...00010066       52     200.0 MB   R H
-    211  00  ZRQV20   0                        EOM
-  00001C02  0760FF80  34640096...00010066       52     150.0 MB   R H
-    211  00  ZRQV15   0                        EOM
-  00001C02  0760FF80  34640064...00010066       52     100.0 MB   R H
-    211  00  ZRQV10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010066       11    1000.0 MB   TMP
-    211  00  ZTQV99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010066       11     975.0 MB   TMP
-    211  00  ZTQV93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010066       11     950.0 MB   TMP
-    211  00  ZTQV95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010066       11     925.0 MB   TMP
-    211  00  ZTQV92   0                        EOM
-  00001C02  0760FF80  0B640384...00010066       11     900.0 MB   TMP
-    211  00  ZTQV90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010066       11     875.0 MB   TMP
-    211  00  ZTQV91   0                        EOM
-  00001C02  0760FF80  0B640352...00010066       11     850.0 MB   TMP           
-    211  00  ZTQV85   0                        EOM
-  00001C02  0760FF80  0B640339...00010066       11     825.0 MB   TMP
-    211  00  ZTQV82   0                        EOM
-  00001C02  0760FF80  0B640320...00010066       11     800.0 MB   TMP
-    211  00  ZTQV80   0                        EOM
-  00001C02  0760FF80  0B640307...00010066       11     775.0 MB   TMP
-    211  00  ZTQV77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010066       11     750.0 MB   TMP
-    211  00  ZTQV75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010066       11     725.0 MB   TMP
-    211  00  ZTQV72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010066       11     700.0 MB   TMP           
-    211  00  ZTQV70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010066       11     675.0 MB   TMP
-    211  00  ZTQV67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010066       11     650.0 MB   TMP
-    211  00  ZTQV65   0                        EOM
-  00001C02  0760FF80  0B640271...00010066       11     625.0 MB   TMP
-    211  00  ZTQV62   0                        EOM
-  00001C02  0760FF80  0B640258...00010066       11     600.0 MB   TMP
-    211  00  ZTQV60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010066       11     575.0 MB   TMP
-    211  00  ZTQV57   0                        EOM
-  00001C02  0760FF80  0B640226...00010066       11     550.0 MB   TMP
-    211  00  ZTQV55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010066       11     525.0 MB   TMP
-    211  00  ZTQV52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010066       11     500.0 MB   TMP           
-    211  00  ZTQV50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010066       11     450.0 MB   TMP
-    211  00  ZTQV45   0                        EOM
-  00001C02  0760FF80  0B640190...00010066       11     400.0 MB   TMP           
-    211  00  ZTQV40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010066       11     350.0 MB   TMP
-    211  00  ZTQV35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010066       11     300.0 MB   TMP           
-    211  00  ZTQV30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010066       11     250.0 MB   TMP           
-    211  00  ZTQV25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010066       11     200.0 MB   TMP           
-    211  00  ZTQV20   0                        EOM
-  00001C02  0760FF80  0B640096...00010066       11     150.0 MB   TMP           
-    211  00  ZTQV15   0                        EOM
-  00001C02  0760FF80  0B640064...00010066       11     100.0 MB   TMP           
-    211  00  ZTQV10   0                        EOM
-  00001C02  0760FF80  28640352...00010066       40     850.0 MB  DZDT           
-    211  00  ZOQV85   0                        EOM
-  00001C02  0760FF80  286402BC...00010066       40     700.0 MB  DZDT           
-    211  00  ZOQV70   0                        EOM
-  00001C02  0760FF80  286401F4...00010066       40     500.0 MB  DZDT           
-    211  00  ZOQV50   0                        EOM
-  00001C02  0760FF80  28640190...00010066       40     400.0 MB  DZDT           
-    211  00  ZOQV40   0                        EOM
-  00001C02  0760FF80  2864012C...00010066       40     300.0 MB  DZDT           
-    211  00  ZOQV30   0                        EOM
-  00001C02  0760FF80  286400FA...00010066       40     250.0 MB  DZDT           
-    211  00  ZOQV25   0                        EOM
-  00001C02  0760FF80  286400C8...00010066       40     200.0 MB  DZDT           
-    211  00  ZOQV20   0                        EOM
-  00001C02  0760FF80  28640096...00010066       40     150.0 MB  DZDT           
-    211  00  ZOQV15   0                        EOM
-  00001C02  0760FF80  28640064...00010066       40     100.0 MB  DZDT           
-    211  00  ZOQV10   0                        EOM
-  00001C02  0760FF80  01010000...00010066       01          SFC  PRES           
-    211  00  ZPQV98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010066       52        44/100  R H           
-    211  00  ZRQV00   0                        EOM
-  00001C02  0760FF80  36C80000...00010066       54          EATM  P WAT         
-    211  00  ZFQV00   0                        EOM
-  00001C02  0760FF80  0B690002...00010066       11          2m/SFC TMP         
-    211  00  ZTQV98   0                        EOM
-  00001C02  0760FF80  34741E00...00010066       52      BNDRY/SPD  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  0B070000...00010066       11            TRO TMP           
-    211  00  ZTQV97   0                        EOM
-  00001C02  0760FF80  01070000...00010066       01            TRO PRES          
-    211  00  ZPQV97   0                        EOM
-  00001C02  0760FF80  21741E00...00010066       33           SPD  U GRD         
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  22741E00...00010066       34           SPD  V GRD         
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  21070000...00010066       33            TRO U GRD         
-    211  00  ZUQV97   0                        EOM
-  00001C02  0760FF80  22070000...00010066       34            TRO V GRD         
-    211  00  ZVQV97   0                        EOM
-  00001C02  0760FF80  88070000...00010066      136            TRO VW SH         
-    211  00  ZBQV97   0                        EOM
-  00001C02  0760FF80  3D010000...00010066       61            SFC A PCP         
-    211  00  ZEQV98   0                        EOM
-  00001C02  0760FF80  83010000...00010066      131            SFC LFT X         
-    211  00  ZXQV98   0                        EOM
-  00001C02  0760FF80  29640352...00010066       41    850.0 MB    ABS V         
-    211  00  ZCQV85   0                        EOM
-  00001C02  0760FF80  296402BC...00010066       41    700.0 MB    ABS V         
-    211  00  ZCQV70   0                        EOM
-  00001C02  0760FF80  296401F4...00010066       41    500.0 MB    ABS V         
-    211  00  ZCQV50   0                        EOM
-  00001C02  0760FF80  296400FA...00010066       41    250.0 MB    ABS V         
-    211  00  ZCQV25   0                        EOM
-  00001C02  0760FF80  9D010000...00010066      157          SFC   CAPE
-    211  00  ZWQV98   0                        EOM
-  00001C02  0760FF80  9C010000...00010066      156          SFC   CIN
-    211  00  ZYQV98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010066      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQV86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010066      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQV86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010066       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010066       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010066       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010066       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  0B749678...00010066       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010066       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQV86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010066       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010066       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  3474785A...00010066       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  34749678...00010066       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  3474B496...00010066       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQV86   0                        EOM
-  00001C02  0760FF80  21741E00...00010066       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010066       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010066       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  2174785A...00010066       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  21749678...00010066       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  2174B496...00010066       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQV86   0                        EOM
-  00001C02  0760FF80  22741E00...00010066       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010066       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010066       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  2274785A...00010066       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  22749678...00010066       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  2274B496...00010066       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQV86   0                        EOM
-  00001C02  0760FF80  0B690002...00010066       11    2  HTGL     TMP
-    211  00  ZTQV98   0                        EOM
-  00001C02  0760FF80  34690002...00010066       52    2  HTGL     R H
-    211  00  ZRQV98   0                        EOM
-  00001C02  0760FF80  2169000A...00010066       33   10  HTGL     U GRD
-    211  00  ZUQV98   0                        EOM
-  00001C02  0760FF80  2269000A...00010066       34   10  HTGL     V GRD
-    211  00  ZVQV98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs108.211 b/parm/wmo/grib_awpgfs108.211
deleted file mode 100755
index 6da0d5d8d4..0000000000
--- a/parm/wmo/grib_awpgfs108.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...0001006C       07    1000.0 MB   HGT           
-    211  00  YHQN99   0                        EOM
-  00001C02  0760FF80  076403CF...0001006C       07     975.0 MB   HGT
-    211  00  YHQN93   0                        EOM
-  00001C02  0760FF80  076403B6...0001006C       07     950.0 MB   HGT
-    211  00  YHQN95   0                        EOM
-  00001C02  0760FF80  0764039D...0001006C       07     925.0 MB   HGT
-    211  00  YHQN92   0                        EOM
-  00001C02  0760FF80  07640384...0001006C       07     900.0 MB   HGT
-    211  00  YHQN90   0                        EOM
-  00001C02  0760FF80  0764036B...0001006C       07     875.0 MB   HGT
-    211  00  YHQN91   0                        EOM
-  00001C02  0760FF80  07640352...0001006C       07     850.0 MB   HGT           
-    211  00  YHQN85   0                        EOM
-  00001C02  0760FF80  07640339...0001006C       07     825.0 MB   HGT
-    211  00  YHQN82   0                        EOM
-  00001C02  0760FF80  07640320...0001006C       07     800.0 MB   HGT
-    211  00  YHQN80   0                        EOM
-  00001C02  0760FF80  07640307...0001006C       07     775.0 MB   HGT
-    211  00  YHQN77   0                        EOM
-  00001C02  0760FF80  076402EE...0001006C       07     750.0 MB   HGT
-    211  00  YHQN75   0                        EOM
-  00001C02  0760FF80  076402D5...0001006C       07     725.0 MB   HGT
-    211  00  YHQN72   0                        EOM
-  00001C02  0760FF80  076402BC...0001006C       07     700.0 MB   HGT           
-    211  00  YHQN70   0                        EOM
-  00001C02  0760FF80  076402A3...0001006C       07     675.0 MB   HGT
-    211  00  YHQN67   0                        EOM
-  00001C02  0760FF80  0764028A...0001006C       07     650.0 MB   HGT
-    211  00  YHQN65   0                        EOM
-  00001C02  0760FF80  07640271...0001006C       07     625.0 MB   HGT
-    211  00  YHQN62   0                        EOM
-  00001C02  0760FF80  07640258...0001006C       07     600.0 MB   HGT
-    211  00  YHQN60   0                        EOM
-  00001C02  0760FF80  0764023F...0001006C       07     575.0 MB   HGT
-    211  00  YHQN57   0                        EOM
-  00001C02  0760FF80  07640226...0001006C       07     550.0 MB   HGT
-    211  00  YHQN55   0                        EOM
-  00001C02  0760FF80  0764020D...0001006C       07     525.0 MB   HGT
-    211  00  YHQN52   0                        EOM
-  00001C02  0760FF80  076401F4...0001006C       07     500.0 MB   HGT           
-    211  00  YHQN50   0                        EOM
-  00001C02  0760FF80  076401C2...0001006C       07     450.0 MB   HGT
-    211  00  YHQN45   0                        EOM
-  00001C02  0760FF80  07640190...0001006C       07     400.0 MB   HGT           
-    211  00  YHQN40   0                        EOM
-  00001C02  0760FF80  0764015E...0001006C       07     350.0 MB   HGT
-    211  00  YHQN35   0                        EOM
-  00001C02  0760FF80  0764012C...0001006C       07     300.0 MB   HGT           
-    211  00  YHQN30   0                        EOM
-  00001C02  0760FF80  076400FA...0001006C       07     250.0 MB   HGT           
-    211  00  YHQN25   0                        EOM
-  00001C02  0760FF80  076400C8...0001006C       07     200.0 MB   HGT           
-    211  00  YHQN20   0                        EOM
-  00001C02  0760FF80  07640096...0001006C       07     150.0 MB   HGT           
-    211  00  YHQN15   0                        EOM
-  00001C02  0760FF80  07640064...0001006C       07     100.0 MB   HGT           
-    211  00  YHQN10   0                        EOM
-  00001C02  0760FF80  216403E8...0001006C       33    1000.0 MB   U GRD
-    211  00  YUQN99   0                        EOM
-  00001C02  0760FF80  216403CF...0001006C       33     975.0 MB   U GRD
-    211  00  YUQN93   0                        EOM
-  00001C02  0760FF80  216403B6...0001006C       33     950.0 MB   U GRD
-    211  00  YUQN95   0                        EOM
-  00001C02  0760FF80  2164039D...0001006C       33     925.0 MB   U GRD
-    211  00  YUQN92   0                        EOM
-  00001C02  0760FF80  21640384...0001006C       33     900.0 MB   U GRD
-    211  00  YUQN90   0                        EOM
-  00001C02  0760FF80  2164036B...0001006C       33     875.0 MB   U GRD
-    211  00  YUQN91   0                        EOM
-  00001C02  0760FF80  21640352...0001006C       33     850.0 MB   U GRD         
-    211  00  YUQN85   0                        EOM
-  00001C02  0760FF80  21640339...0001006C       33     825.0 MB   U GRD
-    211  00  YUQN82   0                        EOM
-  00001C02  0760FF80  21640320...0001006C       33     800.0 MB   U GRD
-    211  00  YUQN80   0                        EOM
-  00001C02  0760FF80  21640307...0001006C       33     775.0 MB   U GRD
-    211  00  YUQN77   0                        EOM
-  00001C02  0760FF80  216402EE...0001006C       33     750.0 MB   U GRD
-    211  00  YUQN75   0                        EOM
-  00001C02  0760FF80  216402D5...0001006C       33     725.0 MB   U GRD
-    211  00  YUQN72   0                        EOM
-  00001C02  0760FF80  216402BC...0001006C       33     700.0 MB   U GRD         
-    211  00  YUQN70   0                        EOM
-  00001C02  0760FF80  216402A3...0001006C       33     675.0 MB   U GRD
-    211  00  YUQN67   0                        EOM
-  00001C02  0760FF80  2164028A...0001006C       33     650.0 MB   U GRD
-    211  00  YUQN65   0                        EOM
-  00001C02  0760FF80  21640271...0001006C       33     625.0 MB   U GRD
-    211  00  YUQN62   0                        EOM
-  00001C02  0760FF80  21640258...0001006C       33     600.0 MB   U GRD
-    211  00  YUQN60   0                        EOM
-  00001C02  0760FF80  2164023F...0001006C       33     575.0 MB   U GRD
-    211  00  YUQN57   0                        EOM
-  00001C02  0760FF80  21640226...0001006C       33     550.0 MB   U GRD
-    211  00  YUQN55   0                        EOM
-  00001C02  0760FF80  2164020D...0001006C       33     525.0 MB   U GRD
-    211  00  YUQN52   0                        EOM
-  00001C02  0760FF80  216401F4...0001006C       33     500.0 MB   U GRD         
-    211  00  YUQN50   0                        EOM
-  00001C02  0760FF80  216401C2...0001006C       33     450.0 MB   U GRD
-    211  00  YUQN45   0                        EOM
-  00001C02  0760FF80  21640190...0001006C       33     400.0 MB   U GRD         
-    211  00  YUQN40   0                        EOM
-  00001C02  0760FF80  2164015E...0001006C       33     350.0 MB   U GRD
-    211  00  YUQN35   0                        EOM
-  00001C02  0760FF80  2164012C...0001006C       33     300.0 MB   U GRD         
-    211  00  YUQN30   0                        EOM
-  00001C02  0760FF80  216400FA...0001006C       33     250.0 MB   U GRD         
-    211  00  YUQN25   0                        EOM
-  00001C02  0760FF80  216400C8...0001006C       33     200.0 MB   U GRD         
-    211  00  YUQN20   0                        EOM
-  00001C02  0760FF80  21640096...0001006C       33     150.0 MB   U GRD         
-    211  00  YUQN15   0                        EOM
-  00001C02  0760FF80  21640064...0001006C       33     100.0 MB   U GRD         
-    211  00  YUQN10   0                        EOM
-  00001C02  0760FF80  226403E8...0001006C       34    1000.0 MB   V GRD
-    211  00  YVQN99   0                        EOM
-  00001C02  0760FF80  226403CF...0001006C       34     975.0 MB   V GRD
-    211  00  YVQN93   0                        EOM
-  00001C02  0760FF80  226403B6...0001006C       34     950.0 MB   V GRD
-    211  00  YVQN95   0                        EOM
-  00001C02  0760FF80  2264039D...0001006C       34     925.0 MB   V GRD
-    211  00  YVQN92   0                        EOM
-  00001C02  0760FF80  22640384...0001006C       34     900.0 MB   V GRD
-    211  00  YVQN90   0                        EOM
-  00001C02  0760FF80  2264036B...0001006C       34     875.0 MB   V GRD
-    211  00  YVQN91   0                        EOM
-  00001C02  0760FF80  22640352...0001006C       34     850.0 MB   V GRD         
-    211  00  YVQN85   0                        EOM
-  00001C02  0760FF80  22640339...0001006C       34     825.0 MB   V GRD
-    211  00  YVQN82   0                        EOM
-  00001C02  0760FF80  22640320...0001006C       34     800.0 MB   V GRD
-    211  00  YVQN80   0                        EOM
-  00001C02  0760FF80  22640307...0001006C       34     775.0 MB   V GRD
-    211  00  YVQN77   0                        EOM
-  00001C02  0760FF80  226402EE...0001006C       34     750.0 MB   V GRD
-    211  00  YVQN75   0                        EOM
-  00001C02  0760FF80  226402D5...0001006C       34     725.0 MB   V GRD
-    211  00  YVQN72   0                        EOM
-  00001C02  0760FF80  226402BC...0001006C       34     700.0 MB   V GRD         
-    211  00  YVQN70   0                        EOM
-  00001C02  0760FF80  226402A3...0001006C       34     675.0 MB   V GRD
-    211  00  YVQN67   0                        EOM
-  00001C02  0760FF80  2264028A...0001006C       34     650.0 MB   V GRD
-    211  00  YVQN65   0                        EOM
-  00001C02  0760FF80  22640271...0001006C       34     625.0 MB   V GRD
-    211  00  YVQN62   0                        EOM
-  00001C02  0760FF80  22640258...0001006C       34     600.0 MB   V GRD
-    211  00  YVQN60   0                        EOM
-  00001C02  0760FF80  2264023F...0001006C       34     575.0 MB   V GRD
-    211  00  YVQN57   0                        EOM
-  00001C02  0760FF80  22640226...0001006C       34     550.0 MB   V GRD
-    211  00  YVQN55   0                        EOM
-  00001C02  0760FF80  2264020D...0001006C       34     525.0 MB   V GRD
-    211  00  YVQN52   0                        EOM
-  00001C02  0760FF80  226401F4...0001006C       34     500.0 MB   V GRD         
-    211  00  YVQN50   0                        EOM
-  00001C02  0760FF80  226401C2...0001006C       34     450.0 MB   V GRD
-    211  00  YVQN45   0                        EOM
-  00001C02  0760FF80  22640190...0001006C       34     400.0 MB   V GRD         
-    211  00  YVQN40   0                        EOM
-  00001C02  0760FF80  2264015E...0001006C       34     350.0 MB   V GRD
-    211  00  YVQN35   0                        EOM
-  00001C02  0760FF80  2264012C...0001006C       34     300.0 MB   V GRD         
-    211  00  YVQN30   0                        EOM
-  00001C02  0760FF80  226400FA...0001006C       34     250.0 MB   V GRD         
-    211  00  YVQN25   0                        EOM
-  00001C02  0760FF80  226400C8...0001006C       34     200.0 MB   V GRD         
-    211  00  YVQN20   0                        EOM
-  00001C02  0760FF80  22640096...0001006C       34     150.0 MB   V GRD         
-    211  00  YVQN15   0                        EOM
-  00001C02  0760FF80  22640064...0001006C       34     100.0 MB   V GRD         
-    211  00  YVQN10   0                        EOM
-  00001C02  0760FF80  02660000...0001006C       02           MSL  PRMSL         
-    211  00  YPQN89   0                        EOM
-  00001C02  0760FF80  346403E8...0001006C       52    1000.0 MB   R H
-    211  00  YRQN99   0                        EOM
-  00001C02  0760FF80  346403CF...0001006C       52     975.0 MB   R H
-    211  00  YRQN93   0                        EOM
-  00001C02  0760FF80  346403B6...0001006C       52     950.0 MB   R H
-    211  00  YRQN95   0                        EOM
-  00001C02  0760FF80  3464039D...0001006C       52     925.0 MB   R H
-    211  00  YRQN92   0                        EOM
-  00001C02  0760FF80  34640384...0001006C       52     900.0 MB   R H
-    211  00  YRQN90   0                        EOM
-  00001C02  0760FF80  3464036B...0001006C       52     875.0 MB   R H
-    211  00  YRQN91   0                        EOM
-  00001C02  0760FF80  34640352...0001006C       52     850.0 MB   R H           
-    211  00  YRQN85   0                        EOM
-  00001C02  0760FF80  34640339...0001006C       52     825.0 MB   R H
-    211  00  YRQN82   0                        EOM
-  00001C02  0760FF80  34640320...0001006C       52     800.0 MB   R H
-    211  00  YRQN80   0                        EOM
-  00001C02  0760FF80  34640307...0001006C       52     775.0 MB   R H
-    211  00  YRQN77   0                        EOM
-  00001C02  0760FF80  346402EE...0001006C       52     750.0 MB   R H
-    211  00  YRQN75   0                        EOM
-  00001C02  0760FF80  346402D5...0001006C       52     725.0 MB   R H
-    211  00  YRQN72   0                        EOM
-  00001C02  0760FF80  346402BC...0001006C       52     700.0 MB   R H           
-    211  00  YRQN70   0                        EOM
-  00001C02  0760FF80  346402A3...0001006C       52     675.0 MB   R H
-    211  00  YRQN67   0                        EOM
-  00001C02  0760FF80  3464028A...0001006C       52     650.0 MB   R H
-    211  00  YRQN65   0                        EOM
-  00001C02  0760FF80  34640271...0001006C       52     625.0 MB   R H
-    211  00  YRQN62   0                        EOM
-  00001C02  0760FF80  34640258...0001006C       52     600.0 MB   R H
-    211  00  YRQN60   0                        EOM
-  00001C02  0760FF80  3464023F...0001006C       52     575.0 MB   R H
-    211  00  YRQN57   0                        EOM
-  00001C02  0760FF80  34640226...0001006C       52     550.0 MB   R H
-    211  00  YRQN55   0                        EOM
-  00001C02  0760FF80  3464020D...0001006C       52     525.0 MB   R H
-    211  00  YRQN52   0                        EOM
-  00001C02  0760FF80  346401F4...0001006C       52     500.0 MB   R H           
-    211  00  YRQN50   0                        EOM
-  00001C02  0760FF80  346401C2...0001006C       52     450.0 MB   R H
-    211  00  YRQN45   0                        EOM
-  00001C02  0760FF80  34640190...0001006C       52     400.0 MB   R H           
-    211  00  YRQN40   0                        EOM
-  00001C02  0760FF80  3464015E...0001006C       52     350.0 MB   R H
-    211  00  YRQN35   0                        EOM
-  00001C02  0760FF80  3464012C...0001006C       52     300.0 MB   R H           
-    211  00  YRQN30   0                        EOM
-  00001C02  0760FF80  346400FA...0001006C       52     250.0 MB   R H
-    211  00  YRQN25   0                        EOM
-  00001C02  0760FF80  346400C8...0001006C       52     200.0 MB   R H
-    211  00  YRQN20   0                        EOM
-  00001C02  0760FF80  34640096...0001006C       52     150.0 MB   R H
-    211  00  YRQN15   0                        EOM
-  00001C02  0760FF80  34640064...0001006C       52     100.0 MB   R H
-    211  00  YRQN10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001006C       11    1000.0 MB   TMP
-    211  00  YTQN99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001006C       11     975.0 MB   TMP
-    211  00  YTQN93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001006C       11     950.0 MB   TMP
-    211  00  YTQN95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001006C       11     925.0 MB   TMP
-    211  00  YTQN92   0                        EOM
-  00001C02  0760FF80  0B640384...0001006C       11     900.0 MB   TMP
-    211  00  YTQN90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001006C       11     875.0 MB   TMP
-    211  00  YTQN91   0                        EOM
-  00001C02  0760FF80  0B640352...0001006C       11     850.0 MB   TMP           
-    211  00  YTQN85   0                        EOM
-  00001C02  0760FF80  0B640339...0001006C       11     825.0 MB   TMP
-    211  00  YTQN82   0                        EOM
-  00001C02  0760FF80  0B640320...0001006C       11     800.0 MB   TMP
-    211  00  YTQN80   0                        EOM
-  00001C02  0760FF80  0B640307...0001006C       11     775.0 MB   TMP
-    211  00  YTQN77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001006C       11     750.0 MB   TMP
-    211  00  YTQN75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001006C       11     725.0 MB   TMP
-    211  00  YTQN72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001006C       11     700.0 MB   TMP           
-    211  00  YTQN70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001006C       11     675.0 MB   TMP
-    211  00  YTQN67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001006C       11     650.0 MB   TMP
-    211  00  YTQN65   0                        EOM
-  00001C02  0760FF80  0B640271...0001006C       11     625.0 MB   TMP
-    211  00  YTQN62   0                        EOM
-  00001C02  0760FF80  0B640258...0001006C       11     600.0 MB   TMP
-    211  00  YTQN60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001006C       11     575.0 MB   TMP
-    211  00  YTQN57   0                        EOM
-  00001C02  0760FF80  0B640226...0001006C       11     550.0 MB   TMP
-    211  00  YTQN55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001006C       11     525.0 MB   TMP
-    211  00  YTQN52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001006C       11     500.0 MB   TMP           
-    211  00  YTQN50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001006C       11     450.0 MB   TMP
-    211  00  YTQN45   0                        EOM
-  00001C02  0760FF80  0B640190...0001006C       11     400.0 MB   TMP           
-    211  00  YTQN40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001006C       11     350.0 MB   TMP
-    211  00  YTQN35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001006C       11     300.0 MB   TMP           
-    211  00  YTQN30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001006C       11     250.0 MB   TMP           
-    211  00  YTQN25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001006C       11     200.0 MB   TMP           
-    211  00  YTQN20   0                        EOM
-  00001C02  0760FF80  0B640096...0001006C       11     150.0 MB   TMP           
-    211  00  YTQN15   0                        EOM
-  00001C02  0760FF80  0B640064...0001006C       11     100.0 MB   TMP           
-    211  00  YTQN10   0                        EOM
-  00001C02  0760FF80  28640352...0001006C       40     850.0 MB  DZDT           
-    211  00  YOQN85   0                        EOM
-  00001C02  0760FF80  286402BC...0001006C       40     700.0 MB  DZDT           
-    211  00  YOQN70   0                        EOM
-  00001C02  0760FF80  286401F4...0001006C       40     500.0 MB  DZDT           
-    211  00  YOQN50   0                        EOM
-  00001C02  0760FF80  28640190...0001006C       40     400.0 MB  DZDT           
-    211  00  YOQN40   0                        EOM
-  00001C02  0760FF80  2864012C...0001006C       40     300.0 MB  DZDT           
-    211  00  YOQN30   0                        EOM
-  00001C02  0760FF80  286400FA...0001006C       40     250.0 MB  DZDT           
-    211  00  YOQN25   0                        EOM
-  00001C02  0760FF80  286400C8...0001006C       40     200.0 MB  DZDT           
-    211  00  YOQN20   0                        EOM
-  00001C02  0760FF80  28640096...0001006C       40     150.0 MB  DZDT           
-    211  00  YOQN15   0                        EOM
-  00001C02  0760FF80  28640064...0001006C       40     100.0 MB  DZDT           
-    211  00  YOQN10   0                        EOM
-  00001C02  0760FF80  01010000...0001006C       01          SFC  PRES           
-    211  00  YPQN98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001006C       52        44/100  R H           
-    211  00  YRQN00   0                        EOM
-  00001C02  0760FF80  36C80000...0001006C       54          EATM  P WAT         
-    211  00  YFQN00   0                        EOM
-  00001C02  0760FF80  0B690002...0001006C       11          2m/SFC TMP         
-    211  00  YTQN98   0                        EOM
-  00001C02  0760FF80  34741E00...0001006C       52      BNDRY/SPD  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  0B070000...0001006C       11            TRO TMP           
-    211  00  YTQN97   0                        EOM
-  00001C02  0760FF80  01070000...0001006C       01            TRO PRES          
-    211  00  YPQN97   0                        EOM
-  00001C02  0760FF80  21741E00...0001006C       33           SPD  U GRD         
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  22741E00...0001006C       34           SPD  V GRD         
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  21070000...0001006C       33            TRO U GRD         
-    211  00  YUQN97   0                        EOM
-  00001C02  0760FF80  22070000...0001006C       34            TRO V GRD         
-    211  00  YVQN97   0                        EOM
-  00001C02  0760FF80  88070000...0001006C      136            TRO VW SH         
-    211  00  YBQN97   0                        EOM
-  00001C02  0760FF80  3D010000...0001006C       61            SFC A PCP         
-    211  00  YEQN98   0                        EOM
-  00001C02  0760FF80  83010000...0001006C      131            SFC LFT X         
-    211  00  YXQN98   0                        EOM
-  00001C02  0760FF80  29640352...0001006C       41    850.0 MB    ABS V         
-    211  00  YCQN85   0                        EOM
-  00001C02  0760FF80  296402BC...0001006C       41    700.0 MB    ABS V         
-    211  00  YCQN70   0                        EOM
-  00001C02  0760FF80  296401F4...0001006C       41    500.0 MB    ABS V         
-    211  00  YCQN50   0                        EOM
-  00001C02  0760FF80  296400FA...0001006C       41    250.0 MB    ABS V         
-    211  00  YCQN25   0                        EOM
-  00001C02  0760FF80  9D010000...0001006C      157          SFC   CAPE
-    211  00  YWQN98   0                        EOM
-  00001C02  0760FF80  9C010000...0001006C      156          SFC   CIN
-    211  00  YYQN98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001006C      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQN86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001006C      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQN86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001006C       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001006C       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001006C       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001006C       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  0B749678...0001006C       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001006C       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQN86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001006C       52   60 SPDY  30 SPDY  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001006C       52   90 SPDY  60 SPDY  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  3474785A...0001006C       52  120 SPDY  90 SPDY  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  34749678...0001006C       52  150 SPDY 120 SPDY  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  3474B496...0001006C       52  180 SPDY 150 SPDY  R H
-    211  00  YRQN86   0                        EOM
-  00001C02  0760FF80  21741E00...0001006C       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001006C       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001006C       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  2174785A...0001006C       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  21749678...0001006C       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  2174B496...0001006C       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQN86   0                        EOM
-  00001C02  0760FF80  22741E00...0001006C       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001006C       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001006C       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  2274785A...0001006C       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  22749678...0001006C       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  2274B496...0001006C       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQN86   0                        EOM
-  00001C02  0760FF80  0B690002...0001006C       11    2  HTGL     TMP
-    211  00  YTQN98   0                        EOM
-  00001C02  0760FF80  34690002...0001006C       52    2  HTGL     R H
-    211  00  YRQN98   0                        EOM
-  00001C02  0760FF80  2169000A...0001006C       33   10  HTGL     U GRD
-    211  00  YUQN98   0                        EOM
-  00001C02  0760FF80  2269000A...0001006C       34   10  HTGL     V GRD
-    211  00  YVQN98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs114.211 b/parm/wmo/grib_awpgfs114.211
deleted file mode 100755
index 4e4de7b561..0000000000
--- a/parm/wmo/grib_awpgfs114.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010072       07    1000.0 MB   HGT           
-    211  00  ZHQW99   0                        EOM
-  00001C02  0760FF80  076403CF...00010072       07     975.0 MB   HGT
-    211  00  ZHQW93   0                        EOM
-  00001C02  0760FF80  076403B6...00010072       07     950.0 MB   HGT
-    211  00  ZHQW95   0                        EOM
-  00001C02  0760FF80  0764039D...00010072       07     925.0 MB   HGT
-    211  00  ZHQW92   0                        EOM
-  00001C02  0760FF80  07640384...00010072       07     900.0 MB   HGT
-    211  00  ZHQW90   0                        EOM
-  00001C02  0760FF80  0764036B...00010072       07     875.0 MB   HGT
-    211  00  ZHQW91   0                        EOM
-  00001C02  0760FF80  07640352...00010072       07     850.0 MB   HGT           
-    211  00  ZHQW85   0                        EOM
-  00001C02  0760FF80  07640339...00010072       07     825.0 MB   HGT
-    211  00  ZHQW82   0                        EOM
-  00001C02  0760FF80  07640320...00010072       07     800.0 MB   HGT
-    211  00  ZHQW80   0                        EOM
-  00001C02  0760FF80  07640307...00010072       07     775.0 MB   HGT
-    211  00  ZHQW77   0                        EOM
-  00001C02  0760FF80  076402EE...00010072       07     750.0 MB   HGT
-    211  00  ZHQW75   0                        EOM
-  00001C02  0760FF80  076402D5...00010072       07     725.0 MB   HGT
-    211  00  ZHQW72   0                        EOM
-  00001C02  0760FF80  076402BC...00010072       07     700.0 MB   HGT           
-    211  00  ZHQW70   0                        EOM
-  00001C02  0760FF80  076402A3...00010072       07     675.0 MB   HGT
-    211  00  ZHQW67   0                        EOM
-  00001C02  0760FF80  0764028A...00010072       07     650.0 MB   HGT
-    211  00  ZHQW65   0                        EOM
-  00001C02  0760FF80  07640271...00010072       07     625.0 MB   HGT
-    211  00  ZHQW62   0                        EOM
-  00001C02  0760FF80  07640258...00010072       07     600.0 MB   HGT
-    211  00  ZHQW60   0                        EOM
-  00001C02  0760FF80  0764023F...00010072       07     575.0 MB   HGT
-    211  00  ZHQW57   0                        EOM
-  00001C02  0760FF80  07640226...00010072       07     550.0 MB   HGT
-    211  00  ZHQW55   0                        EOM
-  00001C02  0760FF80  0764020D...00010072       07     525.0 MB   HGT
-    211  00  ZHQW52   0                        EOM
-  00001C02  0760FF80  076401F4...00010072       07     500.0 MB   HGT           
-    211  00  ZHQW50   0                        EOM
-  00001C02  0760FF80  076401C2...00010072       07     450.0 MB   HGT
-    211  00  ZHQW45   0                        EOM
-  00001C02  0760FF80  0764015E...00010072       07     350.0 MB   HGT
-    211  00  ZHQW35   0                        EOM
-  00001C02  0760FF80  07640190...00010072       07     400.0 MB   HGT           
-    211  00  ZHQW40   0                        EOM
-  00001C02  0760FF80  0764012C...00010072       07     300.0 MB   HGT           
-    211  00  ZHQW30   0                        EOM
-  00001C02  0760FF80  076400FA...00010072       07     250.0 MB   HGT           
-    211  00  ZHQW25   0                        EOM
-  00001C02  0760FF80  076400C8...00010072       07     200.0 MB   HGT           
-    211  00  ZHQW20   0                        EOM
-  00001C02  0760FF80  07640096...00010072       07     150.0 MB   HGT           
-    211  00  ZHQW15   0                        EOM
-  00001C02  0760FF80  07640064...00010072       07     100.0 MB   HGT           
-    211  00  ZHQW10   0                        EOM
-  00001C02  0760FF80  216403E8...00010072       33    1000.0 MB   U GRD
-    211  00  ZUQW99   0                        EOM
-  00001C02  0760FF80  216403CF...00010072       33     975.0 MB   U GRD
-    211  00  ZUQW93   0                        EOM
-  00001C02  0760FF80  216403B6...00010072       33     950.0 MB   U GRD
-    211  00  ZUQW95   0                        EOM
-  00001C02  0760FF80  2164039D...00010072       33     925.0 MB   U GRD
-    211  00  ZUQW92   0                        EOM
-  00001C02  0760FF80  21640384...00010072       33     900.0 MB   U GRD
-    211  00  ZUQW90   0                        EOM
-  00001C02  0760FF80  2164036B...00010072       33     875.0 MB   U GRD
-    211  00  ZUQW91   0                        EOM
-  00001C02  0760FF80  21640352...00010072       33     850.0 MB   U GRD         
-    211  00  ZUQW85   0                        EOM
-  00001C02  0760FF80  21640339...00010072       33     825.0 MB   U GRD
-    211  00  ZUQW82   0                        EOM
-  00001C02  0760FF80  21640320...00010072       33     800.0 MB   U GRD
-    211  00  ZUQW80   0                        EOM
-  00001C02  0760FF80  21640307...00010072       33     775.0 MB   U GRD
-    211  00  ZUQW77   0                        EOM
-  00001C02  0760FF80  216402EE...00010072       33     750.0 MB   U GRD
-    211  00  ZUQW75   0                        EOM
-  00001C02  0760FF80  216402D5...00010072       33     725.0 MB   U GRD
-    211  00  ZUQW72   0                        EOM
-  00001C02  0760FF80  216402BC...00010072       33     700.0 MB   U GRD         
-    211  00  ZUQW70   0                        EOM
-  00001C02  0760FF80  216402A3...00010072       33     675.0 MB   U GRD
-    211  00  ZUQW67   0                        EOM
-  00001C02  0760FF80  2164028A...00010072       33     650.0 MB   U GRD
-    211  00  ZUQW65   0                        EOM
-  00001C02  0760FF80  21640271...00010072       33     625.0 MB   U GRD
-    211  00  ZUQW62   0                        EOM
-  00001C02  0760FF80  21640258...00010072       33     600.0 MB   U GRD
-    211  00  ZUQW60   0                        EOM
-  00001C02  0760FF80  2164023F...00010072       33     575.0 MB   U GRD
-    211  00  ZUQW57   0                        EOM
-  00001C02  0760FF80  21640226...00010072       33     550.0 MB   U GRD
-    211  00  ZUQW55   0                        EOM
-  00001C02  0760FF80  2164020D...00010072       33     525.0 MB   U GRD
-    211  00  ZUQW52   0                        EOM
-  00001C02  0760FF80  216401F4...00010072       33     500.0 MB   U GRD         
-    211  00  ZUQW50   0                        EOM
-  00001C02  0760FF80  216401C2...00010072       33     450.0 MB   U GRD
-    211  00  ZUQW45   0                        EOM
-  00001C02  0760FF80  21640190...00010072       33     400.0 MB   U GRD         
-    211  00  ZUQW40   0                        EOM
-  00001C02  0760FF80  2164015E...00010072       33     350.0 MB   U GRD
-    211  00  ZUQW35   0                        EOM
-  00001C02  0760FF80  2164012C...00010072       33     300.0 MB   U GRD         
-    211  00  ZUQW30   0                        EOM
-  00001C02  0760FF80  216400FA...00010072       33     250.0 MB   U GRD         
-    211  00  ZUQW25   0                        EOM
-  00001C02  0760FF80  216400C8...00010072       33     200.0 MB   U GRD         
-    211  00  ZUQW20   0                        EOM
-  00001C02  0760FF80  21640096...00010072       33     150.0 MB   U GRD         
-    211  00  ZUQW15   0                        EOM
-  00001C02  0760FF80  21640064...00010072       33     100.0 MB   U GRD         
-    211  00  ZUQW10   0                        EOM
-  00001C02  0760FF80  226403E8...00010072       34    1000.0 MB   V GRD
-    211  00  ZVQW99   0                        EOM
-  00001C02  0760FF80  226403CF...00010072       34     975.0 MB   V GRD
-    211  00  ZVQW93   0                        EOM
-  00001C02  0760FF80  226403B6...00010072       34     950.0 MB   V GRD
-    211  00  ZVQW95   0                        EOM
-  00001C02  0760FF80  2264039D...00010072       34     925.0 MB   V GRD
-    211  00  ZVQW92   0                        EOM
-  00001C02  0760FF80  22640384...00010072       34     900.0 MB   V GRD
-    211  00  ZVQW90   0                        EOM
-  00001C02  0760FF80  2264036B...00010072       34     875.0 MB   V GRD
-    211  00  ZVQW91   0                        EOM
-  00001C02  0760FF80  22640352...00010072       34     850.0 MB   V GRD         
-    211  00  ZVQW85   0                        EOM
-  00001C02  0760FF80  22640339...00010072       34     825.0 MB   V GRD
-    211  00  ZVQW82   0                        EOM
-  00001C02  0760FF80  22640320...00010072       34     800.0 MB   V GRD
-    211  00  ZVQW80   0                        EOM
-  00001C02  0760FF80  22640307...00010072       34     775.0 MB   V GRD
-    211  00  ZVQW77   0                        EOM
-  00001C02  0760FF80  226402EE...00010072       34     750.0 MB   V GRD
-    211  00  ZVQW75   0                        EOM
-  00001C02  0760FF80  226402D5...00010072       34     725.0 MB   V GRD
-    211  00  ZVQW72   0                        EOM
-  00001C02  0760FF80  226402BC...00010072       34     700.0 MB   V GRD         
-    211  00  ZVQW70   0                        EOM
-  00001C02  0760FF80  226402A3...00010072       34     675.0 MB   V GRD
-    211  00  ZVQW67   0                        EOM
-  00001C02  0760FF80  2264028A...00010072       34     650.0 MB   V GRD
-    211  00  ZVQW65   0                        EOM
-  00001C02  0760FF80  22640271...00010072       34     625.0 MB   V GRD
-    211  00  ZVQW62   0                        EOM
-  00001C02  0760FF80  22640258...00010072       34     600.0 MB   V GRD
-    211  00  ZVQW60   0                        EOM
-  00001C02  0760FF80  2264023F...00010072       34     575.0 MB   V GRD
-    211  00  ZVQW57   0                        EOM
-  00001C02  0760FF80  22640226...00010072       34     550.0 MB   V GRD
-    211  00  ZVQW55   0                        EOM
-  00001C02  0760FF80  2264020D...00010072       34     525.0 MB   V GRD
-    211  00  ZVQW52   0                        EOM
-  00001C02  0760FF80  226401F4...00010072       34     500.0 MB   V GRD         
-    211  00  ZVQW50   0                        EOM
-  00001C02  0760FF80  226401C2...00010072       34     450.0 MB   V GRD
-    211  00  ZVQW45   0                        EOM
-  00001C02  0760FF80  22640190...00010072       34     400.0 MB   V GRD         
-    211  00  ZVQW40   0                        EOM
-  00001C02  0760FF80  2264015E...00010072       34     350.0 MB   V GRD
-    211  00  ZVQW35   0                        EOM
-  00001C02  0760FF80  2264012C...00010072       34     300.0 MB   V GRD         
-    211  00  ZVQW30   0                        EOM
-  00001C02  0760FF80  226400FA...00010072       34     250.0 MB   V GRD         
-    211  00  ZVQW25   0                        EOM
-  00001C02  0760FF80  226400C8...00010072       34     200.0 MB   V GRD         
-    211  00  ZVQW20   0                        EOM
-  00001C02  0760FF80  22640096...00010072       34     150.0 MB   V GRD         
-    211  00  ZVQW15   0                        EOM
-  00001C02  0760FF80  22640064...00010072       34     100.0 MB   V GRD         
-    211  00  ZVQW10   0                        EOM
-  00001C02  0760FF80  02660000...00010072       02           MSL  PRMSL         
-    211  00  ZPQW89   0                        EOM
-  00001C02  0760FF80  346403E8...00010072       52    1000.0 MB   R H
-    211  00  ZRQW99   0                        EOM
-  00001C02  0760FF80  346403CF...00010072       52     975.0 MB   R H
-    211  00  ZRQW93   0                        EOM
-  00001C02  0760FF80  346403B6...00010072       52     950.0 MB   R H
-    211  00  ZRQW95   0                        EOM
-  00001C02  0760FF80  3464039D...00010072       52     925.0 MB   R H
-    211  00  ZRQW92   0                        EOM
-  00001C02  0760FF80  34640384...00010072       52     900.0 MB   R H
-    211  00  ZRQW90   0                        EOM
-  00001C02  0760FF80  3464036B...00010072       52     875.0 MB   R H
-    211  00  ZRQW91   0                        EOM
-  00001C02  0760FF80  34640352...00010072       52     850.0 MB   R H           
-    211  00  ZRQW85   0                        EOM
-  00001C02  0760FF80  34640339...00010072       52     825.0 MB   R H
-    211  00  ZRQW82   0                        EOM
-  00001C02  0760FF80  34640320...00010072       52     800.0 MB   R H
-    211  00  ZRQW80   0                        EOM
-  00001C02  0760FF80  34640307...00010072       52     775.0 MB   R H
-    211  00  ZRQW77   0                        EOM
-  00001C02  0760FF80  346402EE...00010072       52     750.0 MB   R H
-    211  00  ZRQW75   0                        EOM
-  00001C02  0760FF80  346402D5...00010072       52     725.0 MB   R H
-    211  00  ZRQW72   0                        EOM
-  00001C02  0760FF80  346402BC...00010072       52     700.0 MB   R H           
-    211  00  ZRQW70   0                        EOM
-  00001C02  0760FF80  346402A3...00010072       52     675.0 MB   R H
-    211  00  ZRQW67   0                        EOM
-  00001C02  0760FF80  3464028A...00010072       52     650.0 MB   R H
-    211  00  ZRQW65   0                        EOM
-  00001C02  0760FF80  34640271...00010072       52     625.0 MB   R H
-    211  00  ZRQW62   0                        EOM
-  00001C02  0760FF80  34640258...00010072       52     600.0 MB   R H
-    211  00  ZRQW60   0                        EOM
-  00001C02  0760FF80  3464023F...00010072       52     575.0 MB   R H
-    211  00  ZRQW57   0                        EOM
-  00001C02  0760FF80  34640226...00010072       52     550.0 MB   R H
-    211  00  ZRQW55   0                        EOM
-  00001C02  0760FF80  3464020D...00010072       52     525.0 MB   R H
-    211  00  ZRQW52   0                        EOM
-  00001C02  0760FF80  346401F4...00010072       52     500.0 MB   R H           
-    211  00  ZRQW50   0                        EOM
-  00001C02  0760FF80  346401C2...00010072       52     450.0 MB   R H
-    211  00  ZRQW45   0                        EOM
-  00001C02  0760FF80  34640190...00010072       52     400.0 MB   R H           
-    211  00  ZRQW40   0                        EOM
-  00001C02  0760FF80  3464015E...00010072       52     350.0 MB   R H
-    211  00  ZRQW35   0                        EOM
-  00001C02  0760FF80  3464012C...00010072       52     300.0 MB   R H           
-    211  00  ZRQW30   0                        EOM
-  00001C02  0760FF80  346400FA...00010072       52     250.0 MB   R H
-    211  00  ZRQW25   0                        EOM
-  00001C02  0760FF80  346400C8...00010072       52     200.0 MB   R H
-    211  00  ZRQW20   0                        EOM
-  00001C02  0760FF80  34640096...00010072       52     150.0 MB   R H
-    211  00  ZRQW15   0                        EOM
-  00001C02  0760FF80  34640064...00010072       52     100.0 MB   R H
-    211  00  ZRQW10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010072       11    1000.0 MB   TMP
-    211  00  ZTQW99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010072       11     975.0 MB   TMP
-    211  00  ZTQW93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010072       11     950.0 MB   TMP
-    211  00  ZTQW95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010072       11     925.0 MB   TMP
-    211  00  ZTQW92   0                        EOM
-  00001C02  0760FF80  0B640384...00010072       11     900.0 MB   TMP
-    211  00  ZTQW90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010072       11     875.0 MB   TMP
-    211  00  ZTQW91   0                        EOM
-  00001C02  0760FF80  0B640352...00010072       11     850.0 MB   TMP           
-    211  00  ZTQW85   0                        EOM
-  00001C02  0760FF80  0B640339...00010072       11     825.0 MB   TMP
-    211  00  ZTQW82   0                        EOM
-  00001C02  0760FF80  0B640320...00010072       11     800.0 MB   TMP
-    211  00  ZTQW80   0                        EOM
-  00001C02  0760FF80  0B640307...00010072       11     775.0 MB   TMP
-    211  00  ZTQW77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010072       11     750.0 MB   TMP
-    211  00  ZTQW75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010072       11     725.0 MB   TMP
-    211  00  ZTQW72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010072       11     700.0 MB   TMP           
-    211  00  ZTQW70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010072       11     675.0 MB   TMP
-    211  00  ZTQW67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010072       11     650.0 MB   TMP
-    211  00  ZTQW65   0                        EOM
-  00001C02  0760FF80  0B640271...00010072       11     625.0 MB   TMP
-    211  00  ZTQW62   0                        EOM
-  00001C02  0760FF80  0B640258...00010072       11     600.0 MB   TMP
-    211  00  ZTQW60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010072       11     575.0 MB   TMP
-    211  00  ZTQW57   0                        EOM
-  00001C02  0760FF80  0B640226...00010072       11     550.0 MB   TMP
-    211  00  ZTQW55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010072       11     525.0 MB   TMP
-    211  00  ZTQW52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010072       11     500.0 MB   TMP           
-    211  00  ZTQW50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010072       11     450.0 MB   TMP
-    211  00  ZTQW45   0                        EOM
-  00001C02  0760FF80  0B640190...00010072       11     400.0 MB   TMP           
-    211  00  ZTQW40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010072       11     350.0 MB   TMP
-    211  00  ZTQW35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010072       11     300.0 MB   TMP           
-    211  00  ZTQW30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010072       11     250.0 MB   TMP           
-    211  00  ZTQW25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010072       11     200.0 MB   TMP           
-    211  00  ZTQW20   0                        EOM
-  00001C02  0760FF80  0B640096...00010072       11     150.0 MB   TMP           
-    211  00  ZTQW15   0                        EOM
-  00001C02  0760FF80  0B640064...00010072       11     100.0 MB   TMP           
-    211  00  ZTQW10   0                        EOM
-  00001C02  0760FF80  28640352...00010072       40     850.0 MB  DZDT           
-    211  00  ZOQW85   0                        EOM
-  00001C02  0760FF80  286402BC...00010072       40     700.0 MB  DZDT           
-    211  00  ZOQW70   0                        EOM
-  00001C02  0760FF80  286401F4...00010072       40     500.0 MB  DZDT           
-    211  00  ZOQW50   0                        EOM
-  00001C02  0760FF80  28640190...00010072       40     400.0 MB  DZDT           
-    211  00  ZOQW40   0                        EOM
-  00001C02  0760FF80  2864012C...00010072       40     300.0 MB  DZDT           
-    211  00  ZOQW30   0                        EOM
-  00001C02  0760FF80  286400FA...00010072       40     250.0 MB  DZDT           
-    211  00  ZOQW25   0                        EOM
-  00001C02  0760FF80  286400C8...00010072       40     200.0 MB  DZDT           
-    211  00  ZOQW20   0                        EOM
-  00001C02  0760FF80  28640096...00010072       40     150.0 MB  DZDT           
-    211  00  ZOQW15   0                        EOM
-  00001C02  0760FF80  28640064...00010072       40     100.0 MB  DZDT           
-    211  00  ZOQW10   0                        EOM
-  00001C02  0760FF80  01010000...00010072       01          SFC  PRES           
-    211  00  ZPQW98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010072       52        44/100  R H           
-    211  00  ZRQW00   0                        EOM
-  00001C02  0760FF80  36C80000...00010072       54          EATM  P WAT         
-    211  00  ZFQW00   0                        EOM
-  00001C02  0760FF80  0B690002...00010072       11          2m/SFC TMP         
-    211  00  ZTQW98   0                        EOM
-  00001C02  0760FF80  34741E00...00010072       52      BNDRY/SPD  R H          
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  0B070000...00010072       11            TRO TMP           
-    211  00  ZTQW97   0                        EOM
-  00001C02  0760FF80  01070000...00010072       01            TRO PRES          
-    211  00  ZPQW97   0                        EOM
-  00001C02  0760FF80  21741E00...00010072       33           SPD  U GRD         
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  22741E00...00010072       34           SPD  V GRD         
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  21070000...00010072       33            TRO U GRD         
-    211  00  ZUQW97   0                        EOM
-  00001C02  0760FF80  22070000...00010072       34            TRO V GRD         
-    211  00  ZVQW97   0                        EOM
-  00001C02  0760FF80  88070000...00010072      136            TRO VW SH         
-    211  00  ZBQW97   0                        EOM
-  00001C02  0760FF80  3D010000...00010072       61            SFC A PCP         
-    211  00  ZEQW98   0                        EOM
-  00001C02  0760FF80  83010000...00010072      131            SFC LFT X         
-    211  00  ZXQW98   0                        EOM
-  00001C02  0760FF80  29640352...00010072       41    850.0 MB    ABS V         
-    211  00  ZCQW85   0                        EOM
-  00001C02  0760FF80  296402BC...00010072       41    700.0 MB    ABS V         
-    211  00  ZCQW70   0                        EOM
-  00001C02  0760FF80  296401F4...00010072       41    500.0 MB    ABS V         
-    211  00  ZCQW50   0                        EOM
-  00001C02  0760FF80  296400FA...00010072       41    250.0 MB    ABS V         
-    211  00  ZCQW25   0                        EOM
-  00001C02  0760FF80  9D010000...00010072      157          SFC   CAPE
-    211  00  ZWQW98   0                        EOM
-  00001C02  0760FF80  9C010000...00010072      156          SFC   CIN
-    211  00  ZYQW98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010072      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQW86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010072      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQW86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010072       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010072       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010072       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010072       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  0B749678...00010072       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010072       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQW86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010072       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010072       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  3474785A...00010072       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  34749678...00010072       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  3474B496...00010072       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQW86   0                        EOM
-  00001C02  0760FF80  21741E00...00010072       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010072       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010072       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  2174785A...00010072       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  21749678...00010072       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  2174B496...00010072       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQW86   0                        EOM
-  00001C02  0760FF80  22741E00...00010072       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010072       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010072       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  2274785A...00010072       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  22749678...00010072       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  2274B496...00010072       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQW86   0                        EOM
-  00001C02  0760FF80  0B690002...00010072       11    2  HTGL     TMP
-    211  00  ZTQW98   0                        EOM
-  00001C02  0760FF80  34690002...00010072       52    2  HTGL     R H
-    211  00  ZRQW98   0                        EOM
-  00001C02  0760FF80  2169000A...00010072       33   10  HTGL     U GRD
-    211  00  ZUQW98   0                        EOM
-  00001C02  0760FF80  2269000A...00010072       34   10  HTGL     V GRD
-    211  00  ZVQW98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs120.211 b/parm/wmo/grib_awpgfs120.211
deleted file mode 100755
index c77c293d91..0000000000
--- a/parm/wmo/grib_awpgfs120.211
+++ /dev/null
@@ -1,409 +0,0 @@
-  00001C02  0760FF80  076403E8...00010078       07    1000.0 MB   HGT           
-    211  00  YHQO99   0                        EOM
-  00001C02  0760FF80  076403CF...00010078       07     975.0 MB   HGT
-    211  00  YHQO93   0                        EOM
-  00001C02  0760FF80  076403B6...00010078       07     950.0 MB   HGT
-    211  00  YHQO95   0                        EOM
-  00001C02  0760FF80  0764039D...00010078       07     925.0 MB   HGT
-    211  00  YHQO92   0                        EOM
-  00001C02  0760FF80  07640384...00010078       07     900.0 MB   HGT
-    211  00  YHQO90   0                        EOM
-  00001C02  0760FF80  0764036B...00010078       07     875.0 MB   HGT
-    211  00  YHQO91   0                        EOM
-  00001C02  0760FF80  07640352...00010078       07     850.0 MB   HGT           
-    211  00  YHQO85   0                        EOM
-  00001C02  0760FF80  07640339...00010078       07     825.0 MB   HGT
-    211  00  YHQO82   0                        EOM
-  00001C02  0760FF80  07640320...00010078       07     800.0 MB   HGT
-    211  00  YHQO80   0                        EOM
-  00001C02  0760FF80  07640307...00010078       07     775.0 MB   HGT
-    211  00  YHQO77   0                        EOM
-  00001C02  0760FF80  076402EE...00010078       07     750.0 MB   HGT
-    211  00  YHQO75   0                        EOM
-  00001C02  0760FF80  076402D5...00010078       07     725.0 MB   HGT
-    211  00  YHQO72   0                        EOM
-  00001C02  0760FF80  076402BC...00010078       07     700.0 MB   HGT           
-    211  00  YHQO70   0                        EOM
-  00001C02  0760FF80  076402A3...00010078       07     675.0 MB   HGT
-    211  00  YHQO67   0                        EOM
-  00001C02  0760FF80  0764028A...00010078       07     650.0 MB   HGT
-    211  00  YHQO65   0                        EOM
-  00001C02  0760FF80  07640271...00010078       07     625.0 MB   HGT
-    211  00  YHQO62   0                        EOM
-  00001C02  0760FF80  07640258...00010078       07     600.0 MB   HGT
-    211  00  YHQO60   0                        EOM
-  00001C02  0760FF80  0764023F...00010078       07     575.0 MB   HGT
-    211  00  YHQO57   0                        EOM
-  00001C02  0760FF80  07640226...00010078       07     550.0 MB   HGT
-    211  00  YHQO55   0                        EOM
-  00001C02  0760FF80  0764020D...00010078       07     525.0 MB   HGT
-    211  00  YHQO52   0                        EOM
-  00001C02  0760FF80  076401F4...00010078       07     500.0 MB   HGT           
-    211  00  YHQO50   0                        EOM
-  00001C02  0760FF80  076401C2...00010078       07     450.0 MB   HGT
-    211  00  YHQO45   0                        EOM
-  00001C02  0760FF80  07640190...00010078       07     400.0 MB   HGT           
-    211  00  YHQO40   0                        EOM
-  00001C02  0760FF80  0764015E...00010078       07     350.0 MB   HGT
-    211  00  YHQO35   0                        EOM
-  00001C02  0760FF80  0764012C...00010078       07     300.0 MB   HGT           
-    211  00  YHQO30   0                        EOM
-  00001C02  0760FF80  076400FA...00010078       07     250.0 MB   HGT           
-    211  00  YHQO25   0                        EOM
-  00001C02  0760FF80  076400C8...00010078       07     200.0 MB   HGT           
-    211  00  YHQO20   0                        EOM
-  00001C02  0760FF80  07640096...00010078       07     150.0 MB   HGT           
-    211  00  YHQO15   0                        EOM
-  00001C02  0760FF80  07640064...00010078       07     100.0 MB   HGT           
-    211  00  YHQO10   0                        EOM
-  00001C02  0760FF80  216403E8...00010078       33    1000.0 MB   U GRD
-    211  00  YUQO99   0                        EOM
-  00001C02  0760FF80  216403CF...00010078       33     975.0 MB   U GRD
-    211  00  YUQO93   0                        EOM
-  00001C02  0760FF80  216403B6...00010078       33     950.0 MB   U GRD
-    211  00  YUQO95   0                        EOM
-  00001C02  0760FF80  2164039D...00010078       33     925.0 MB   U GRD
-    211  00  YUQO92   0                        EOM
-  00001C02  0760FF80  21640384...00010078       33     900.0 MB   U GRD
-    211  00  YUQO90   0                        EOM
-  00001C02  0760FF80  2164036B...00010078       33     875.0 MB   U GRD
-    211  00  YUQO91   0                        EOM
-  00001C02  0760FF80  21640352...00010078       33     850.0 MB   U GRD         
-    211  00  YUQO85   0                        EOM
-  00001C02  0760FF80  21640339...00010078       33     825.0 MB   U GRD
-    211  00  YUQO82   0                        EOM
-  00001C02  0760FF80  21640320...00010078       33     800.0 MB   U GRD
-    211  00  YUQO80   0                        EOM
-  00001C02  0760FF80  21640307...00010078       33     775.0 MB   U GRD
-    211  00  YUQO77   0                        EOM
-  00001C02  0760FF80  216402EE...00010078       33     750.0 MB   U GRD
-    211  00  YUQO75   0                        EOM
-  00001C02  0760FF80  216402D5...00010078       33     725.0 MB   U GRD
-    211  00  YUQO72   0                        EOM
-  00001C02  0760FF80  216402BC...00010078       33     700.0 MB   U GRD         
-    211  00  YUQO70   0                        EOM
-  00001C02  0760FF80  216402A3...00010078       33     675.0 MB   U GRD
-    211  00  YUQO67   0                        EOM
-  00001C02  0760FF80  2164028A...00010078       33     650.0 MB   U GRD
-    211  00  YUQO65   0                        EOM
-  00001C02  0760FF80  21640271...00010078       33     625.0 MB   U GRD
-    211  00  YUQO62   0                        EOM
-  00001C02  0760FF80  21640258...00010078       33     600.0 MB   U GRD
-    211  00  YUQO60   0                        EOM
-  00001C02  0760FF80  2164023F...00010078       33     575.0 MB   U GRD
-    211  00  YUQO57   0                        EOM
-  00001C02  0760FF80  21640226...00010078       33     550.0 MB   U GRD
-    211  00  YUQO55   0                        EOM
-  00001C02  0760FF80  2164020D...00010078       33     525.0 MB   U GRD
-    211  00  YUQO52   0                        EOM
-  00001C02  0760FF80  216401F4...00010078       33     500.0 MB   U GRD         
-    211  00  YUQO50   0                        EOM
-  00001C02  0760FF80  216401C2...00010078       33     450.0 MB   U GRD
-    211  00  YUQO45   0                        EOM
-  00001C02  0760FF80  21640190...00010078       33     400.0 MB   U GRD         
-    211  00  YUQO40   0                        EOM
-  00001C02  0760FF80  2164015E...00010078       33     350.0 MB   U GRD
-    211  00  YUQO35   0                        EOM
-  00001C02  0760FF80  2164012C...00010078       33     300.0 MB   U GRD         
-    211  00  YUQO30   0                        EOM
-  00001C02  0760FF80  216400FA...00010078       33     250.0 MB   U GRD         
-    211  00  YUQO25   0                        EOM
-  00001C02  0760FF80  216400C8...00010078       33     200.0 MB   U GRD         
-    211  00  YUQO20   0                        EOM
-  00001C02  0760FF80  21640096...00010078       33     150.0 MB   U GRD         
-    211  00  YUQO15   0                        EOM
-  00001C02  0760FF80  21640064...00010078       33     100.0 MB   U GRD         
-    211  00  YUQO10   0                        EOM
-  00001C02  0760FF80  226403E8...00010078       34    1000.0 MB   V GRD
-    211  00  YVQO99   0                        EOM
-  00001C02  0760FF80  226403CF...00010078       34     975.0 MB   V GRD
-    211  00  YVQO93   0                        EOM
-  00001C02  0760FF80  226403B6...00010078       34     950.0 MB   V GRD
-    211  00  YVQO95   0                        EOM
-  00001C02  0760FF80  2264039D...00010078       34     925.0 MB   V GRD
-    211  00  YVQO92   0                        EOM
-  00001C02  0760FF80  22640384...00010078       34     900.0 MB   V GRD
-    211  00  YVQO90   0                        EOM
-  00001C02  0760FF80  2264036B...00010078       34     875.0 MB   V GRD
-    211  00  YVQO91   0                        EOM
-  00001C02  0760FF80  22640352...00010078       34     850.0 MB   V GRD         
-    211  00  YVQO85   0                        EOM
-  00001C02  0760FF80  22640339...00010078       34     825.0 MB   V GRD
-    211  00  YVQO82   0                        EOM
-  00001C02  0760FF80  22640320...00010078       34     800.0 MB   V GRD
-    211  00  YVQO80   0                        EOM
-  00001C02  0760FF80  22640307...00010078       34     775.0 MB   V GRD
-    211  00  YVQO77   0                        EOM
-  00001C02  0760FF80  226402EE...00010078       34     750.0 MB   V GRD
-    211  00  YVQO75   0                        EOM
-  00001C02  0760FF80  226402D5...00010078       34     725.0 MB   V GRD
-    211  00  YVQO72   0                        EOM
-  00001C02  0760FF80  226402BC...00010078       34     700.0 MB   V GRD         
-    211  00  YVQO70   0                        EOM
-  00001C02  0760FF80  226402A3...00010078       34     675.0 MB   V GRD
-    211  00  YVQO67   0                        EOM
-  00001C02  0760FF80  2264028A...00010078       34     650.0 MB   V GRD
-    211  00  YVQO65   0                        EOM
-  00001C02  0760FF80  22640271...00010078       34     625.0 MB   V GRD
-    211  00  YVQO62   0                        EOM
-  00001C02  0760FF80  22640258...00010078       34     600.0 MB   V GRD
-    211  00  YVQO60   0                        EOM
-  00001C02  0760FF80  2264023F...00010078       34     575.0 MB   V GRD
-    211  00  YVQO57   0                        EOM
-  00001C02  0760FF80  22640226...00010078       34     550.0 MB   V GRD
-    211  00  YVQO55   0                        EOM
-  00001C02  0760FF80  2264020D...00010078       34     525.0 MB   V GRD
-    211  00  YVQO52   0                        EOM
-  00001C02  0760FF80  226401F4...00010078       34     500.0 MB   V GRD         
-    211  00  YVQO50   0                        EOM
-  00001C02  0760FF80  226401C2...00010078       34     450.0 MB   V GRD
-    211  00  YVQO45   0                        EOM
-  00001C02  0760FF80  22640190...00010078       34     400.0 MB   V GRD         
-    211  00  YVQO40   0                        EOM
-  00001C02  0760FF80  2264015E...00010078       34     350.0 MB   V GRD
-    211  00  YVQO35   0                        EOM
-  00001C02  0760FF80  2264012C...00010078       34     300.0 MB   V GRD         
-    211  00  YVQO30   0                        EOM
-  00001C02  0760FF80  226400FA...00010078       34     250.0 MB   V GRD         
-    211  00  YVQO25   0                        EOM
-  00001C02  0760FF80  226400C8...00010078       34     200.0 MB   V GRD         
-    211  00  YVQO20   0                        EOM
-  00001C02  0760FF80  22640096...00010078       34     150.0 MB   V GRD         
-    211  00  YVQO15   0                        EOM
-  00001C02  0760FF80  22640064...00010078       34     100.0 MB   V GRD         
-    211  00  YVQO10   0                        EOM
-  00001C02  0760FF80  02660000...00010078       02           MSL  PRMSL         
-    211  00  YPQO89   0                        EOM
-  00001C02  0760FF80  346403E8...00010078       52    1000.0 MB   R H
-    211  00  YRQO99   0                        EOM
-  00001C02  0760FF80  346403CF...00010078       52     975.0 MB   R H
-    211  00  YRQO93   0                        EOM
-  00001C02  0760FF80  346403B6...00010078       52     950.0 MB   R H
-    211  00  YRQO95   0                        EOM
-  00001C02  0760FF80  3464039D...00010078       52     925.0 MB   R H
-    211  00  YRQO92   0                        EOM
-  00001C02  0760FF80  34640384...00010078       52     900.0 MB   R H
-    211  00  YRQO90   0                        EOM
-  00001C02  0760FF80  3464036B...00010078       52     875.0 MB   R H
-    211  00  YRQO91   0                        EOM
-  00001C02  0760FF80  34640352...00010078       52     850.0 MB   R H           
-    211  00  YRQO85   0                        EOM
-  00001C02  0760FF80  34640339...00010078       52     825.0 MB   R H
-    211  00  YRQO82   0                        EOM
-  00001C02  0760FF80  34640320...00010078       52     800.0 MB   R H
-    211  00  YRQO80   0                        EOM
-  00001C02  0760FF80  34640307...00010078       52     775.0 MB   R H
-    211  00  YRQO77   0                        EOM
-  00001C02  0760FF80  346402EE...00010078       52     750.0 MB   R H
-    211  00  YRQO75   0                        EOM
-  00001C02  0760FF80  346402D5...00010078       52     725.0 MB   R H
-    211  00  YRQO72   0                        EOM
-  00001C02  0760FF80  346402BC...00010078       52     700.0 MB   R H           
-    211  00  YRQO70   0                        EOM
-  00001C02  0760FF80  346402A3...00010078       52     675.0 MB   R H
-    211  00  YRQO67   0                        EOM
-  00001C02  0760FF80  3464028A...00010078       52     650.0 MB   R H
-    211  00  YRQO65   0                        EOM
-  00001C02  0760FF80  34640271...00010078       52     625.0 MB   R H
-    211  00  YRQO62   0                        EOM
-  00001C02  0760FF80  34640258...00010078       52     600.0 MB   R H
-    211  00  YRQO60   0                        EOM
-  00001C02  0760FF80  3464023F...00010078       52     575.0 MB   R H
-    211  00  YRQO57   0                        EOM
-  00001C02  0760FF80  34640226...00010078       52     550.0 MB   R H
-    211  00  YRQO55   0                        EOM
-  00001C02  0760FF80  3464020D...00010078       52     525.0 MB   R H
-    211  00  YRQO52   0                        EOM
-  00001C02  0760FF80  346401F4...00010078       52     500.0 MB   R H           
-    211  00  YRQO50   0                        EOM
-  00001C02  0760FF80  346401C2...00010078       52     450.0 MB   R H
-    211  00  YRQO45   0                        EOM
-  00001C02  0760FF80  34640190...00010078       52     400.0 MB   R H           
-    211  00  YRQO40   0                        EOM
-  00001C02  0760FF80  3464015E...00010078       52     350.0 MB   R H
-    211  00  YRQO35   0                        EOM
-  00001C02  0760FF80  3464012C...00010078       52     300.0 MB   R H           
-    211  00  YRQO30   0                        EOM
-  00001C02  0760FF80  346400FA...00010078       52     250.0 MB   R H
-    211  00  YRQO25   0                        EOM
-  00001C02  0760FF80  346400C8...00010078       52     200.0 MB   R H
-    211  00  YRQO20   0                        EOM
-  00001C02  0760FF80  34640096...00010078       52     150.0 MB   R H
-    211  00  YRQO15   0                        EOM
-  00001C02  0760FF80  34640064...00010078       52     100.0 MB   R H
-    211  00  YRQO10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010078       11    1000.0 MB   TMP
-    211  00  YTQO99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010078       11     975.0 MB   TMP
-    211  00  YTQO93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010078       11     950.0 MB   TMP
-    211  00  YTQO95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010078       11     925.0 MB   TMP
-    211  00  YTQO92   0                        EOM
-  00001C02  0760FF80  0B640384...00010078       11     900.0 MB   TMP
-    211  00  YTQO90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010078       11     875.0 MB   TMP
-    211  00  YTQO91   0                        EOM
-  00001C02  0760FF80  0B640352...00010078       11     850.0 MB   TMP           
-    211  00  YTQO85   0                        EOM
-  00001C02  0760FF80  0B640339...00010078       11     825.0 MB   TMP
-    211  00  YTQO82   0                        EOM
-  00001C02  0760FF80  0B640320...00010078       11     800.0 MB   TMP
-    211  00  YTQO80   0                        EOM
-  00001C02  0760FF80  0B640307...00010078       11     775.0 MB   TMP
-    211  00  YTQO77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010078       11     750.0 MB   TMP
-    211  00  YTQO75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010078       11     725.0 MB   TMP
-    211  00  YTQO72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010078       11     700.0 MB   TMP           
-    211  00  YTQO70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010078       11     675.0 MB   TMP
-    211  00  YTQO67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010078       11     650.0 MB   TMP
-    211  00  YTQO65   0                        EOM
-  00001C02  0760FF80  0B640271...00010078       11     625.0 MB   TMP
-    211  00  YTQO62   0                        EOM
-  00001C02  0760FF80  0B640258...00010078       11     600.0 MB   TMP
-    211  00  YTQO60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010078       11     575.0 MB   TMP
-    211  00  YTQO57   0                        EOM
-  00001C02  0760FF80  0B640226...00010078       11     550.0 MB   TMP
-    211  00  YTQO55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010078       11     525.0 MB   TMP
-    211  00  YTQO52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010078       11     500.0 MB   TMP           
-    211  00  YTQO50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010078       11     450.0 MB   TMP
-    211  00  YTQO45   0                        EOM
-  00001C02  0760FF80  0B640190...00010078       11     400.0 MB   TMP           
-    211  00  YTQO40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010078       11     350.0 MB   TMP
-    211  00  YTQO35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010078       11     300.0 MB   TMP           
-    211  00  YTQO30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010078       11     250.0 MB   TMP           
-    211  00  YTQO25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010078       11     200.0 MB   TMP           
-    211  00  YTQO20   0                        EOM
-  00001C02  0760FF80  0B640096...00010078       11     150.0 MB   TMP           
-    211  00  YTQO15   0                        EOM
-  00001C02  0760FF80  0B640064...00010078       11     100.0 MB   TMP           
-    211  00  YTQO10   0                        EOM
-  00001C02  0760FF80  28640352...00010078       40     850.0 MB  DZDT           
-    211  00  YOQO85   0                        EOM
-  00001C02  0760FF80  286402BC...00010078       40     700.0 MB  DZDT           
-    211  00  YOQO70   0                        EOM
-  00001C02  0760FF80  286401F4...00010078       40     500.0 MB  DZDT           
-    211  00  YOQO50   0                        EOM
-  00001C02  0760FF80  28640190...00010078       40     400.0 MB  DZDT           
-    211  00  YOQO40   0                        EOM
-  00001C02  0760FF80  2864012C...00010078       40     300.0 MB  DZDT           
-    211  00  YOQO30   0                        EOM
-  00001C02  0760FF80  286400FA...00010078       40     250.0 MB  DZDT           
-    211  00  YOQO25   0                        EOM
-  00001C02  0760FF80  286400C8...00010078       40     200.0 MB  DZDT           
-    211  00  YOQO20   0                        EOM
-  00001C02  0760FF80  28640096...00010078       40     150.0 MB  DZDT           
-    211  00  YOQO15   0                        EOM
-  00001C02  0760FF80  28640064...00010078       40     100.0 MB  DZDT           
-    211  00  YOQO10   0                        EOM
-  00001C02  0760FF80  01010000...00010078       01          SFC  PRES           
-    211  00  YPQO98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010078       52        44/100  R H           
-    211  00  YRQO00   0                        EOM
-  00001C02  0760FF80  36C80000...00010078       54          EATM  P WAT         
-    211  00  YFQO00   0                        EOM
-  00001C02  0760FF80  0B690002...00010078       11          2m/SFC TMP         
-    211  00  YTQO98   0                        EOM
-  00001C02  0760FF80  34741E00...00010078       52     BNDRY/SPD  R H           
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  0B070000...00010078       11            TRO TMP           
-    211  00  YTQO97   0                        EOM
-  00001C02  0760FF80  01070000...00010078       01            TRO PRES          
-    211  00  YPQO97   0                        EOM
-  00001C02  0760FF80  21741E00...00010078       33           SPD  U GRD         
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  22741E00...00010078       34           SPD  V GRD         
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  21070000...00010078       33            TRO U GRD         
-    211  00  YUQO97   0                        EOM
-  00001C02  0760FF80  22070000...00010078       34            TRO V GRD         
-    211  00  YVQO97   0                        EOM
-  00001C02  0760FF80  88070000...00010078      136            TRO VW SH         
-    211  00  YBQO97   0                        EOM
-  00001C02  0760FF80  3D010000...00010078       61            SFC A PCP         
-    211  00  YEQO98   0                        EOM
-  00001C02  0760FF80  83010000...00010078      131            SFC LFT X         
-    211  00  YXQO98   0                        EOM
-  00001C02  0760FF80  29640352...00010078       41    850.0 MB    ABS V         
-    211  00  YCQO85   0                        EOM
-  00001C02  0760FF80  296402BC...00010078       41    700.0 MB    ABS V         
-    211  00  YCQO70   0                        EOM
-  00001C02  0760FF80  296401F4...00010078       41    500.0 MB    ABS V         
-    211  00  YCQO50   0                        EOM
-  00001C02  0760FF80  296400FA...00010078       41    250.0 MB    ABS V         
-    211  00  YCQO25   0                        EOM
-  00001C02  0760FF80  9D010000...00010078      157          SFC   CAPE
-    211  00  YWQO98   0                        EOM
-  00001C02  0760FF80  9C010000...00010078      156          SFC   CIN
-    211  00  YYQO98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010078      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQO86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010078      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQO86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010078       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010078       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010078       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010078       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  0B749678...00010078       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010078       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQO86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010078       52   60 SPDY  30 SPDY  R H
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010078       52   90 SPDY  60 SPDY  R H
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  3474785A...00010078       52  120 SPDY  90 SPDY  R H
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  34749678...00010078       52  150 SPDY 120 SPDY  R H
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  3474B496...00010078       52  180 SPDY 150 SPDY  R H
-    211  00  YRQO86   0                        EOM
-  00001C02  0760FF80  21741E00...00010078       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010078       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010078       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  2174785A...00010078       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  21749678...00010078       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  2174B496...00010078       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQO86   0                        EOM
-  00001C02  0760FF80  22741E00...00010078       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010078       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010078       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  2274785A...00010078       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  22749678...00010078       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  2274B496...00010078       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQO86   0                        EOM
-  00001C02  0760FF80  0B690002...00010078       11    2  HTGL     TMP
-    211  00  YTQO98   0                        EOM
-  00001C02  0760FF80  34690002...00010078       52    2  HTGL     R H
-    211  00  YRQO98   0                        EOM
-  00001C02  0760FF80  2169000A...00010078       33   10  HTGL     U GRD
-    211  00  YUQO98   0                        EOM
-  00001C02  0760FF80  2269000A...00010078       34   10  HTGL     V GRD
-    211  00  YVQO98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs126.211 b/parm/wmo/grib_awpgfs126.211
deleted file mode 100755
index b509bf9033..0000000000
--- a/parm/wmo/grib_awpgfs126.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...0001007E       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...0001007E       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...0001007E       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...0001007E       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...0001007E       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...0001007E       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...0001007E       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...0001007E       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...0001007E       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...0001007E       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...0001007E       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...0001007E       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...0001007E       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...0001007E       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...0001007E       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...0001007E       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...0001007E       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...0001007E       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...0001007E       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...0001007E       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...0001007E       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...0001007E       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...0001007E       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...0001007E       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...0001007E       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...0001007E       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...0001007E       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...0001007E       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...0001007E       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...0001007E       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...0001007E       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...0001007E       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...0001007E       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...0001007E       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...0001007E       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...0001007E       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...0001007E       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...0001007E       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...0001007E       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...0001007E       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...0001007E       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...0001007E       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...0001007E       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...0001007E       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...0001007E       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...0001007E       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...0001007E       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...0001007E       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...0001007E       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...0001007E       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...0001007E       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...0001007E       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...0001007E       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...0001007E       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...0001007E       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...0001007E       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...0001007E       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...0001007E       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...0001007E       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...0001007E       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...0001007E       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...0001007E       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...0001007E       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...0001007E       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...0001007E       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...0001007E       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...0001007E       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...0001007E       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...0001007E       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...0001007E       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...0001007E       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...0001007E       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...0001007E       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...0001007E       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...0001007E       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...0001007E       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...0001007E       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...0001007E       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...0001007E       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...0001007E       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...0001007E       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...0001007E       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...0001007E       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...0001007E       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...0001007E       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...0001007E       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...0001007E       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...0001007E       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...0001007E       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...0001007E       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...0001007E       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...0001007E       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...0001007E       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...0001007E       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...0001007E       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...0001007E       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...0001007E       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...0001007E       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...0001007E       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...0001007E       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...0001007E       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...0001007E       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...0001007E       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...0001007E       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...0001007E       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...0001007E       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...0001007E       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...0001007E       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...0001007E       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...0001007E       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...0001007E       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...0001007E       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...0001007E       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...0001007E       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...0001007E       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...0001007E       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...0001007E       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...0001007E       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001007E       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001007E       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001007E       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001007E       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...0001007E       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001007E       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...0001007E       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...0001007E       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...0001007E       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...0001007E       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001007E       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001007E       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001007E       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001007E       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001007E       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...0001007E       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...0001007E       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001007E       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...0001007E       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001007E       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001007E       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001007E       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...0001007E       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001007E       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001007E       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001007E       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001007E       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...0001007E       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...0001007E       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...0001007E       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...0001007E       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...0001007E       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...0001007E       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001007E       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...0001007E       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...0001007E      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...0001007E      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001007E      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001007E      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001007E       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001007E       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001007E       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001007E       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...0001007E       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001007E       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...0001007E       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001007E       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001007E       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...0001007E       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...0001007E       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...0001007E       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...0001007E       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001007E       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001007E       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...0001007E       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...0001007E       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...0001007E       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...0001007E       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001007E       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001007E       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...0001007E       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...0001007E       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...0001007E       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...0001007E       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...0001007E       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...0001007E       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...0001007E       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs132.211 b/parm/wmo/grib_awpgfs132.211
deleted file mode 100755
index f9a8c87ccf..0000000000
--- a/parm/wmo/grib_awpgfs132.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...00010084       07    1000.0 MB   HGT
-    211  00  YHQP99   0                        EOM
-  00001C02  0760FF80  076403CF...00010084       07     975.0 MB   HGT
-    211  00  YHQP93   0                        EOM
-  00001C02  0760FF80  076403B6...00010084       07     950.0 MB   HGT
-    211  00  YHQP95   0                        EOM
-  00001C02  0760FF80  0764039D...00010084       07     925.0 MB   HGT
-    211  00  YHQP92   0                        EOM
-  00001C02  0760FF80  07640384...00010084       07     900.0 MB   HGT
-    211  00  YHQP90   0                        EOM
-  00001C02  0760FF80  0764036B...00010084       07     875.0 MB   HGT
-    211  00  YHQP91   0                        EOM
-  00001C02  0760FF80  07640352...00010084       07     850.0 MB   HGT
-    211  00  YHQP85   0                        EOM
-  00001C02  0760FF80  07640339...00010084       07     825.0 MB   HGT
-    211  00  YHQP82   0                        EOM
-  00001C02  0760FF80  07640320...00010084       07     800.0 MB   HGT
-    211  00  YHQP80   0                        EOM
-  00001C02  0760FF80  07640307...00010084       07     775.0 MB   HGT
-    211  00  YHQP77   0                        EOM
-  00001C02  0760FF80  076402EE...00010084       07     750.0 MB   HGT
-    211  00  YHQP75   0                        EOM
-  00001C02  0760FF80  076402D5...00010084       07     725.0 MB   HGT
-    211  00  YHQP72   0                        EOM
-  00001C02  0760FF80  076402BC...00010084       07     700.0 MB   HGT
-    211  00  YHQP70   0                        EOM
-  00001C02  0760FF80  076402A3...00010084       07     675.0 MB   HGT
-    211  00  YHQP67   0                        EOM
-  00001C02  0760FF80  0764028A...00010084       07     650.0 MB   HGT
-    211  00  YHQP65   0                        EOM
-  00001C02  0760FF80  07640271...00010084       07     625.0 MB   HGT
-    211  00  YHQP62   0                        EOM
-  00001C02  0760FF80  07640258...00010084       07     600.0 MB   HGT
-    211  00  YHQP60   0                        EOM
-  00001C02  0760FF80  0764023F...00010084       07     575.0 MB   HGT
-    211  00  YHQP57   0                        EOM
-  00001C02  0760FF80  07640226...00010084       07     550.0 MB   HGT
-    211  00  YHQP55   0                        EOM
-  00001C02  0760FF80  0764020D...00010084       07     525.0 MB   HGT
-    211  00  YHQP52   0                        EOM
-  00001C02  0760FF80  076401F4...00010084       07     500.0 MB   HGT
-    211  00  YHQP50   0                        EOM
-  00001C02  0760FF80  07640190...00010084       07     400.0 MB   HGT
-    211  00  YHQP40   0                        EOM
-  00001C02  0760FF80  076401C2...00010084       07     450.0 MB   HGT
-    211  00  YHQP45   0                        EOM
-  00001C02  0760FF80  0764015E...00010084       07     350.0 MB   HGT
-    211  00  YHQP35   0                        EOM
-  00001C02  0760FF80  0764012C...00010084       07     300.0 MB   HGT
-    211  00  YHQP30   0                        EOM
-  00001C02  0760FF80  076400FA...00010084       07     250.0 MB   HGT
-    211  00  YHQP25   0                        EOM
-  00001C02  0760FF80  076400C8...00010084       07     200.0 MB   HGT
-    211  00  YHQP20   0                        EOM
-  00001C02  0760FF80  07640096...00010084       07     150.0 MB   HGT
-    211  00  YHQP15   0                        EOM
-  00001C02  0760FF80  07640064...00010084       07     100.0 MB   HGT
-    211  00  YHQP10   0                        EOM
-  00001C02  0760FF80  216403E8...00010084       33    1000.0 MB   U GRD 
-    211  00  YUQP99   0                        EOM 
-  00001C02  0760FF80  216403CF...00010084       33     975.0 MB   U GRD
-    211  00  YUQP93   0                        EOM 
-  00001C02  0760FF80  216403B6...00010084       33     950.0 MB   U GRD
-    211  00  YUQP95   0                        EOM
-  00001C02  0760FF80  2164039D...00010084       33     925.0 MB   U GRD
-    211  00  YUQP92   0                        EOM
-  00001C02  0760FF80  21640384...00010084       33     900.0 MB   U GRD
-    211  00  YUQP90   0                        EOM
-  00001C02  0760FF80  2164036B...00010084       33     875.0 MB   U GRD
-    211  00  YUQP91   0                        EOM
-  00001C02  0760FF80  21640352...00010084       33     850.0 MB   U GRD
-    211  00  YUQP85   0                        EOM
-  00001C02  0760FF80  21640339...00010084       33     825.0 MB   U GRD
-    211  00  YUQP82   0                        EOM
-  00001C02  0760FF80  21640320...00010084       33     800.0 MB   U GRD
-    211  00  YUQP80   0                        EOM
-  00001C02  0760FF80  21640307...00010084       33     775.0 MB   U GRD
-    211  00  YUQP77   0                        EOM
-  00001C02  0760FF80  216402EE...00010084       33     750.0 MB   U GRD
-    211  00  YUQP75   0                        EOM
-  00001C02  0760FF80  216402D5...00010084       33     725.0 MB   U GRD
-    211  00  YUQP72   0                        EOM
-  00001C02  0760FF80  216402BC...00010084       33     700.0 MB   U GRD
-    211  00  YUQP70   0                        EOM
-  00001C02  0760FF80  216402A3...00010084       33     675.0 MB   U GRD
-    211  00  YUQP67   0                        EOM
-  00001C02  0760FF80  2164028A...00010084       33     650.0 MB   U GRD
-    211  00  YUQP65   0                        EOM
-  00001C02  0760FF80  21640271...00010084       33     625.0 MB   U GRD
-    211  00  YUQP62   0                        EOM
-  00001C02  0760FF80  21640258...00010084       33     600.0 MB   U GRD
-    211  00  YUQP60   0                        EOM
-  00001C02  0760FF80  2164023F...00010084       33     575.0 MB   U GRD
-    211  00  YUQP57   0                        EOM
-  00001C02  0760FF80  21640226...00010084       33     550.0 MB   U GRD
-    211  00  YUQP55   0                        EOM
-  00001C02  0760FF80  2164020D...00010084       33     525.0 MB   U GRD
-    211  00  YUQP52   0                        EOM
-  00001C02  0760FF80  216401F4...00010084       33     500.0 MB   U GRD
-    211  00  YUQP50   0                        EOM
-  00001C02  0760FF80  216401C2...00010084       33     450.0 MB   U GRD
-    211  00  YUQP45   0                        EOM
-  00001C02  0760FF80  21640190...00010084       33     400.0 MB   U GRD
-    211  00  YUQP40   0                        EOM
-  00001C02  0760FF80  2164015E...00010084       33     350.0 MB   U GRD
-    211  00  YUQP35   0                        EOM
-  00001C02  0760FF80  2164012C...00010084       33     300.0 MB   U GRD
-    211  00  YUQP30   0                        EOM
-  00001C02  0760FF80  216400FA...00010084       33     250.0 MB   U GRD
-    211  00  YUQP25   0                        EOM
-  00001C02  0760FF80  216400C8...00010084       33     200.0 MB   U GRD
-    211  00  YUQP20   0                        EOM
-  00001C02  0760FF80  21640096...00010084       33     150.0 MB   U GRD
-    211  00  YUQP15   0                        EOM
-  00001C02  0760FF80  21640064...00010084       33     100.0 MB   U GRD
-    211  00  YUQP10   0                        EOM
-  00001C02  0760FF80  226403E8...00010084       34    1000.0 MB   V GRD
-    211  00  YVQP99   0                        EOM
-  00001C02  0760FF80  226403CF...00010084       34     975.0 MB   V GRD
-    211  00  YVQP93   0                        EOM
-  00001C02  0760FF80  226403B6...00010084       34     950.0 MB   V GRD
-    211  00  YVQP95   0                        EOM
-  00001C02  0760FF80  2264039D...00010084       34     925.0 MB   V GRD
-    211  00  YVQP92   0                        EOM
-  00001C02  0760FF80  22640384...00010084       34     900.0 MB   V GRD
-    211  00  YVQP90   0                        EOM
-  00001C02  0760FF80  2264036B...00010084       34     875.0 MB   V GRD
-    211  00  YVQP91   0                        EOM
-  00001C02  0760FF80  22640352...00010084       34     850.0 MB   V GRD
-    211  00  YVQP85   0                        EOM
-  00001C02  0760FF80  22640339...00010084       34     825.0 MB   V GRD
-    211  00  YVQP82   0                        EOM
-  00001C02  0760FF80  22640320...00010084       34     800.0 MB   V GRD
-    211  00  YVQP80   0                        EOM
-  00001C02  0760FF80  22640307...00010084       34     775.0 MB   V GRD
-    211  00  YVQP77   0                        EOM
-  00001C02  0760FF80  226402EE...00010084       34     750.0 MB   V GRD
-    211  00  YVQP75   0                        EOM
-  00001C02  0760FF80  226402D5...00010084       34     725.0 MB   V GRD
-    211  00  YVQP72   0                        EOM
-  00001C02  0760FF80  226402BC...00010084       34     700.0 MB   V GRD
-    211  00  YVQP70   0                        EOM
-  00001C02  0760FF80  226402A3...00010084       34     675.0 MB   V GRD
-    211  00  YVQP67   0                        EOM
-  00001C02  0760FF80  2264028A...00010084       34     650.0 MB   V GRD
-    211  00  YVQP65   0                        EOM
-  00001C02  0760FF80  22640271...00010084       34     625.0 MB   V GRD
-    211  00  YVQP62   0                        EOM
-  00001C02  0760FF80  22640258...00010084       34     600.0 MB   V GRD
-    211  00  YVQP60   0                        EOM
-  00001C02  0760FF80  2264023F...00010084       34     575.0 MB   V GRD
-    211  00  YVQP57   0                        EOM
-  00001C02  0760FF80  22640226...00010084       34     550.0 MB   V GRD
-    211  00  YVQP55   0                        EOM
-  00001C02  0760FF80  2264020D...00010084       34     525.0 MB   V GRD
-    211  00  YVQP52   0                        EOM
-  00001C02  0760FF80  226401F4...00010084       34     500.0 MB   V GRD
-    211  00  YVQP50   0                        EOM
-  00001C02  0760FF80  226401C2...00010084       34     450.0 MB   V GRD
-    211  00  YVQP45   0                        EOM
-  00001C02  0760FF80  22640190...00010084       34     400.0 MB   V GRD
-    211  00  YVQP40   0                        EOM
-  00001C02  0760FF80  2264015E...00010084       34     350.0 MB   V GRD
-    211  00  YVQP35   0                        EOM
-  00001C02  0760FF80  2264012C...00010084       34     300.0 MB   V GRD
-    211  00  YVQP30   0                        EOM
-  00001C02  0760FF80  226400FA...00010084       34     250.0 MB   V GRD
-    211  00  YVQP25   0                        EOM
-  00001C02  0760FF80  226400C8...00010084       34     200.0 MB   V GRD
-    211  00  YVQP20   0                        EOM
-  00001C02  0760FF80  22640096...00010084       34     150.0 MB   V GRD
-    211  00  YVQP15   0                        EOM
-  00001C02  0760FF80  22640064...00010084       34     100.0 MB   V GRD
-    211  00  YVQP10   0                        EOM
-  00001C02  0760FF80  02660000...00010084       02           MSL  PRMSL
-    211  00  YPQP89   0                        EOM
-  00001C02  0760FF80  3D010000...00010084       61           SFC  A PCP
-    211  00  YEQP98   0                        EOM
-  00001C02  0760FF80  346403E8...00010084       52    1000.0 MB   R H
-    211  00  YRQP99   0                        EOM
-  00001C02  0760FF80  346403CF...00010084       52     975.0 MB   R H
-    211  00  YRQP93   0                        EOM
-  00001C02  0760FF80  346403B6...00010084       52     950.0 MB   R H
-    211  00  YRQP95   0                        EOM
-  00001C02  0760FF80  3464039D...00010084       52     925.0 MB   R H
-    211  00  YRQP92   0                        EOM
-  00001C02  0760FF80  34640384...00010084       52     900.0 MB   R H
-    211  00  YRQP90   0                        EOM
-  00001C02  0760FF80  3464036B...00010084       52     875.0 MB   R H
-    211  00  YRQP91   0                        EOM
-  00001C02  0760FF80  34640352...00010084       52     850.0 MB   R H
-    211  00  YRQP85   0                        EOM
-  00001C02  0760FF80  34640339...00010084       52     825.0 MB   R H
-    211  00  YRQP82   0                        EOM
-  00001C02  0760FF80  34640320...00010084       52     800.0 MB   R H
-    211  00  YRQP80   0                        EOM
-  00001C02  0760FF80  34640307...00010084       52     775.0 MB   R H
-    211  00  YRQP77   0                        EOM
-  00001C02  0760FF80  346402EE...00010084       52     750.0 MB   R H
-    211  00  YRQP75   0                        EOM
-  00001C02  0760FF80  346402D5...00010084       52     725.0 MB   R H
-    211  00  YRQP72   0                        EOM
-  00001C02  0760FF80  346402BC...00010084       52     700.0 MB   R H
-    211  00  YRQP70   0                        EOM
-  00001C02  0760FF80  346402A3...00010084       52     675.0 MB   R H
-    211  00  YRQP67   0                        EOM
-  00001C02  0760FF80  3464028A...00010084       52     650.0 MB   R H
-    211  00  YRQP65   0                        EOM
-  00001C02  0760FF80  34640271...00010084       52     625.0 MB   R H
-    211  00  YRQP62   0                        EOM
-  00001C02  0760FF80  34640258...00010084       52     600.0 MB   R H
-    211  00  YRQP60   0                        EOM
-  00001C02  0760FF80  3464023F...00010084       52     575.0 MB   R H
-    211  00  YRQP57   0                        EOM
-  00001C02  0760FF80  34640226...00010084       52     550.0 MB   R H
-    211  00  YRQP55   0                        EOM
-  00001C02  0760FF80  3464020D...00010084       52     525.0 MB   R H
-    211  00  YRQP52   0                        EOM
-  00001C02  0760FF80  346401F4...00010084       52     500.0 MB   R H
-    211  00  YRQP50   0                        EOM
-  00001C02  0760FF80  346401C2...00010084       52     450.0 MB   R H
-    211  00  YRQP45   0                        EOM
-  00001C02  0760FF80  34640190...00010084       52     400.0 MB   R H
-    211  00  YRQP40   0                        EOM
-  00001C02  0760FF80  3464015E...00010084       52     350.0 MB   R H
-    211  00  YRQP35   0                        EOM
-  00001C02  0760FF80  3464012C...00010084       52     300.0 MB   R H
-    211  00  YRQP30   0                        EOM
-  00001C02  0760FF80  346400FA...00010084       52     250.0 MB   R H
-    211  00  YRQP25   0                        EOM
-  00001C02  0760FF80  346400C8...00010084       52     200.0 MB   R H
-    211  00  YRQP20   0                        EOM
-  00001C02  0760FF80  34640096...00010084       52     150.0 MB   R H
-    211  00  YRQP15   0                        EOM
-  00001C02  0760FF80  34640064...00010084       52     100.0 MB   R H
-    211  00  YRQP10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010084       11    1000.0 MB   TMP
-    211  00  YTQP99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010084       11     975.0 MB   TMP
-    211  00  YTQP93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010084       11     950.0 MB   TMP
-    211  00  YTQP95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010084       11     925.0 MB   TMP
-    211  00  YTQP92   0                        EOM
-  00001C02  0760FF80  0B640384...00010084       11     900.0 MB   TMP
-    211  00  YTQP90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010084       11     875.0 MB   TMP
-    211  00  YTQP91   0                        EOM
-  00001C02  0760FF80  0B640352...00010084       11     850.0 MB   TMP
-    211  00  YTQP85   0                        EOM
-  00001C02  0760FF80  0B640339...00010084       11     825.0 MB   TMP
-    211  00  YTQP82   0                        EOM
-  00001C02  0760FF80  0B640320...00010084       11     800.0 MB   TMP
-    211  00  YTQP80   0                        EOM
-  00001C02  0760FF80  0B640307...00010084       11     775.0 MB   TMP
-    211  00  YTQP77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010084       11     750.0 MB   TMP
-    211  00  YTQP75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010084       11     725.0 MB   TMP
-    211  00  YTQP72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010084       11     700.0 MB   TMP
-    211  00  YTQP70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010084       11     675.0 MB   TMP
-    211  00  YTQP67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010084       11     650.0 MB   TMP
-    211  00  YTQP65   0                        EOM
-  00001C02  0760FF80  0B640271...00010084       11     625.0 MB   TMP
-    211  00  YTQP62   0                        EOM
-  00001C02  0760FF80  0B640258...00010084       11     600.0 MB   TMP
-    211  00  YTQP60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010084       11     575.0 MB   TMP
-    211  00  YTQP57   0                        EOM
-  00001C02  0760FF80  0B640226...00010084       11     550.0 MB   TMP
-    211  00  YTQP55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010084       11     525.0 MB   TMP
-    211  00  YTQP52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010084       11     500.0 MB   TMP
-    211  00  YTQP50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010084       11     450.0 MB   TMP
-    211  00  YTQP45   0                        EOM
-  00001C02  0760FF80  0B640190...00010084       11     400.0 MB   TMP
-    211  00  YTQP40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010084       11     350.0 MB   TMP
-    211  00  YTQP35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010084       11     300.0 MB   TMP
-    211  00  YTQP30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010084       11     250.0 MB   TMP
-    211  00  YTQP25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010084       11     200.0 MB   TMP
-    211  00  YTQP20   0                        EOM
-  00001C02  0760FF80  0B640096...00010084       11     150.0 MB   TMP
-    211  00  YTQP15   0                        EOM
-  00001C02  0760FF80  0B640064...00010084       11     100.0 MB   TMP
-    211  00  YTQP10   0                        EOM
-  00001C02  0760FF80  28640352...00010084       40     850.0 MB  DZDT
-    211  00  YOQP85   0                        EOM
-  00001C02  0760FF80  286402BC...00010084       40     700.0 MB  DZDT
-    211  00  YOQP70   0                        EOM
-  00001C02  0760FF80  286401F4...00010084       40     500.0 MB  DZDT
-    211  00  YOQP50   0                        EOM
-  00001C02  0760FF80  01010000...00010084       01          SFC  PRES
-    211  00  YPQP98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010084       52        44/100  R H
-    211  00  YRQP00   0                        EOM
-  00001C02  0760FF80  296401F4...00010084       41     500.0 MB ABS V
-    211  00  YCQP50   0                        EOM
-  00001C02  0760FF80  9D010000...00010084      157          SFC   CAPE
-    211  00  YWQP98   0                        EOM
-  00001C02  0760FF80  9C010000...00010084      156          SFC   CIN
-    211  00  YYQP98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010084      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQP86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010084      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQP86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010084       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010084       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010084       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010084       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  0B749678...00010084       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010084       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQP86   0                        EOM
-  00001C02  0760FF80  34741E00...00010084       52   30 SPDY   0 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010084       52   60 SPDY  30 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010084       52   90 SPDY  60 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  3474785A...00010084       52  120 SPDY  90 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  34749678...00010084       52  150 SPDY 120 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  3474B496...00010084       52  180 SPDY 150 SPDY  R H
-    211  00  YRQP86   0                        EOM
-  00001C02  0760FF80  21741E00...00010084       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010084       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010084       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  2174785A...00010084       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  21749678...00010084       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  2174B496...00010084       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQP86   0                        EOM
-  00001C02  0760FF80  22741E00...00010084       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010084       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010084       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  2274785A...00010084       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  22749678...00010084       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  2274B496...00010084       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQP86   0                        EOM
-  00001C02  0760FF80  0B690002...00010084       11    2  HTGL     TMP
-    211  00  YTQP98   0                        EOM
-  00001C02  0760FF80  34690002...00010084       52    2  HTGL     R H
-    211  00  YRQP98   0                        EOM
-  00001C02  0760FF80  2169000A...00010084       33   10  HTGL     U GRD
-    211  00  YUQP98   0                        EOM
-  00001C02  0760FF80  2269000A...00010084       34   10  HTGL     V GRD
-    211  00  YVQP98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs138.211 b/parm/wmo/grib_awpgfs138.211
deleted file mode 100755
index 6b1bf437e8..0000000000
--- a/parm/wmo/grib_awpgfs138.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...0001008A       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...0001008A       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...0001008A       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...0001008A       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...0001008A       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...0001008A       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...0001008A       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...0001008A       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...0001008A       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...0001008A       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...0001008A       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...0001008A       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...0001008A       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...0001008A       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...0001008A       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...0001008A       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...0001008A       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...0001008A       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...0001008A       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...0001008A       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...0001008A       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...0001008A       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...0001008A       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...0001008A       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...0001008A       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...0001008A       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...0001008A       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...0001008A       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...0001008A       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...0001008A       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...0001008A       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...0001008A       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...0001008A       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...0001008A       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...0001008A       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...0001008A       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...0001008A       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...0001008A       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...0001008A       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...0001008A       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...0001008A       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...0001008A       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...0001008A       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...0001008A       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...0001008A       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...0001008A       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...0001008A       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...0001008A       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...0001008A       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...0001008A       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...0001008A       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...0001008A       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...0001008A       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...0001008A       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...0001008A       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...0001008A       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...0001008A       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...0001008A       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...0001008A       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...0001008A       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...0001008A       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...0001008A       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...0001008A       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...0001008A       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...0001008A       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...0001008A       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...0001008A       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...0001008A       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...0001008A       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...0001008A       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...0001008A       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...0001008A       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...0001008A       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...0001008A       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...0001008A       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...0001008A       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...0001008A       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...0001008A       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...0001008A       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...0001008A       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...0001008A       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...0001008A       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...0001008A       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...0001008A       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...0001008A       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...0001008A       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...0001008A       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...0001008A       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...0001008A       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...0001008A       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...0001008A       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...0001008A       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...0001008A       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...0001008A       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...0001008A       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...0001008A       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...0001008A       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...0001008A       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...0001008A       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...0001008A       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...0001008A       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...0001008A       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...0001008A       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...0001008A       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...0001008A       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...0001008A       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...0001008A       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...0001008A       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...0001008A       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...0001008A       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...0001008A       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...0001008A       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...0001008A       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...0001008A       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...0001008A       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...0001008A       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...0001008A       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...0001008A       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001008A       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001008A       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001008A       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001008A       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...0001008A       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001008A       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...0001008A       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...0001008A       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...0001008A       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...0001008A       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001008A       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001008A       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001008A       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001008A       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001008A       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...0001008A       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...0001008A       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001008A       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...0001008A       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001008A       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001008A       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001008A       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...0001008A       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001008A       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001008A       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001008A       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001008A       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...0001008A       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...0001008A       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...0001008A       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...0001008A       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...0001008A       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...0001008A       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001008A       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...0001008A       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...0001008A      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...0001008A      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001008A      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001008A      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001008A       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001008A       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001008A       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001008A       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...0001008A       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001008A       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...0001008A       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001008A       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001008A       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...0001008A       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...0001008A       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...0001008A       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...0001008A       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001008A       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001008A       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...0001008A       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...0001008A       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...0001008A       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...0001008A       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001008A       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001008A       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...0001008A       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...0001008A       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...0001008A       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...0001008A       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...0001008A       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...0001008A       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...0001008A       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs144.211 b/parm/wmo/grib_awpgfs144.211
deleted file mode 100755
index 032cf422aa..0000000000
--- a/parm/wmo/grib_awpgfs144.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...00010090       07    1000.0 MB   HGT
-    211  00  YHQQ99   0                        EOM
-  00001C02  0760FF80  076403CF...00010090       07     975.0 MB   HGT
-    211  00  YHQQ93   0                        EOM
-  00001C02  0760FF80  076403B6...00010090       07     950.0 MB   HGT
-    211  00  YHQQ95   0                        EOM
-  00001C02  0760FF80  0764039D...00010090       07     925.0 MB   HGT
-    211  00  YHQQ92   0                        EOM
-  00001C02  0760FF80  07640384...00010090       07     900.0 MB   HGT
-    211  00  YHQQ90   0                        EOM
-  00001C02  0760FF80  0764036B...00010090       07     875.0 MB   HGT
-    211  00  YHQQ91   0                        EOM
-  00001C02  0760FF80  07640352...00010090       07     850.0 MB   HGT
-    211  00  YHQQ85   0                        EOM
-  00001C02  0760FF80  07640339...00010090       07     825.0 MB   HGT
-    211  00  YHQQ82   0                        EOM
-  00001C02  0760FF80  07640320...00010090       07     800.0 MB   HGT
-    211  00  YHQQ80   0                        EOM
-  00001C02  0760FF80  07640307...00010090       07     775.0 MB   HGT
-    211  00  YHQQ77   0                        EOM
-  00001C02  0760FF80  076402EE...00010090       07     750.0 MB   HGT
-    211  00  YHQQ75   0                        EOM
-  00001C02  0760FF80  076402D5...00010090       07     725.0 MB   HGT
-    211  00  YHQQ72   0                        EOM
-  00001C02  0760FF80  076402BC...00010090       07     700.0 MB   HGT
-    211  00  YHQQ70   0                        EOM
-  00001C02  0760FF80  076402A3...00010090       07     675.0 MB   HGT
-    211  00  YHQQ67   0                        EOM
-  00001C02  0760FF80  0764028A...00010090       07     650.0 MB   HGT
-    211  00  YHQQ65   0                        EOM
-  00001C02  0760FF80  07640271...00010090       07     625.0 MB   HGT
-    211  00  YHQQ62   0                        EOM
-  00001C02  0760FF80  07640258...00010090       07     600.0 MB   HGT
-    211  00  YHQQ60   0                        EOM
-  00001C02  0760FF80  0764023F...00010090       07     575.0 MB   HGT
-    211  00  YHQQ57   0                        EOM
-  00001C02  0760FF80  07640226...00010090       07     550.0 MB   HGT
-    211  00  YHQQ55   0                        EOM
-  00001C02  0760FF80  0764020D...00010090       07     525.0 MB   HGT
-    211  00  YHQQ52   0                        EOM
-  00001C02  0760FF80  076401F4...00010090       07     500.0 MB   HGT
-    211  00  YHQQ50   0                        EOM
-  00001C02  0760FF80  076401C2...00010090       07     450.0 MB   HGT
-    211  00  YHQQ45   0                        EOM
-  00001C02  0760FF80  07640190...00010090       07     400.0 MB   HGT
-    211  00  YHQQ40   0                        EOM
-  00001C02  0760FF80  0764015E...00010090       07     350.0 MB   HGT
-    211  00  YHQQ35   0                        EOM
-  00001C02  0760FF80  0764012C...00010090       07     300.0 MB   HGT
-    211  00  YHQQ30   0                        EOM
-  00001C02  0760FF80  076400FA...00010090       07     250.0 MB   HGT
-    211  00  YHQQ25   0                        EOM
-  00001C02  0760FF80  076400C8...00010090       07     200.0 MB   HGT
-    211  00  YHQQ20   0                        EOM
-  00001C02  0760FF80  07640096...00010090       07     150.0 MB   HGT
-    211  00  YHQQ15   0                        EOM
-  00001C02  0760FF80  07640064...00010090       07     100.0 MB   HGT
-    211  00  YHQQ10   0                        EOM
-  00001C02  0760FF80  216403E8...00010090       33    1000.0 MB   U GRD 
-    211  00  YUQQ99   0                        EOM 
-  00001C02  0760FF80  216403CF...00010090       33     975.0 MB   U GRD
-    211  00  YUQQ93   0                        EOM 
-  00001C02  0760FF80  216403B6...00010090       33     950.0 MB   U GRD
-    211  00  YUQQ95   0                        EOM
-  00001C02  0760FF80  2164039D...00010090       33     925.0 MB   U GRD
-    211  00  YUQQ92   0                        EOM
-  00001C02  0760FF80  21640384...00010090       33     900.0 MB   U GRD
-    211  00  YUQQ90   0                        EOM
-  00001C02  0760FF80  2164036B...00010090       33     875.0 MB   U GRD
-    211  00  YUQQ91   0                        EOM
-  00001C02  0760FF80  21640352...00010090       33     850.0 MB   U GRD
-    211  00  YUQQ85   0                        EOM
-  00001C02  0760FF80  21640339...00010090       33     825.0 MB   U GRD
-    211  00  YUQQ82   0                        EOM
-  00001C02  0760FF80  21640320...00010090       33     800.0 MB   U GRD
-    211  00  YUQQ80   0                        EOM
-  00001C02  0760FF80  21640307...00010090       33     775.0 MB   U GRD
-    211  00  YUQQ77   0                        EOM
-  00001C02  0760FF80  216402EE...00010090       33     750.0 MB   U GRD
-    211  00  YUQQ75   0                        EOM
-  00001C02  0760FF80  216402D5...00010090       33     725.0 MB   U GRD
-    211  00  YUQQ72   0                        EOM
-  00001C02  0760FF80  216402BC...00010090       33     700.0 MB   U GRD
-    211  00  YUQQ70   0                        EOM
-  00001C02  0760FF80  216402A3...00010090       33     675.0 MB   U GRD
-    211  00  YUQQ67   0                        EOM
-  00001C02  0760FF80  2164028A...00010090       33     650.0 MB   U GRD
-    211  00  YUQQ65   0                        EOM
-  00001C02  0760FF80  21640271...00010090       33     625.0 MB   U GRD
-    211  00  YUQQ62   0                        EOM
-  00001C02  0760FF80  21640258...00010090       33     600.0 MB   U GRD
-    211  00  YUQQ60   0                        EOM
-  00001C02  0760FF80  2164023F...00010090       33     575.0 MB   U GRD
-    211  00  YUQQ57   0                        EOM
-  00001C02  0760FF80  21640226...00010090       33     550.0 MB   U GRD
-    211  00  YUQQ55   0                        EOM
-  00001C02  0760FF80  2164020D...00010090       33     525.0 MB   U GRD
-    211  00  YUQQ52   0                        EOM
-  00001C02  0760FF80  216401F4...00010090       33     500.0 MB   U GRD
-    211  00  YUQQ50   0                        EOM
-  00001C02  0760FF80  216401C2...00010090       33     450.0 MB   U GRD
-    211  00  YUQQ45   0                        EOM
-  00001C02  0760FF80  21640190...00010090       33     400.0 MB   U GRD
-    211  00  YUQQ40   0                        EOM
-  00001C02  0760FF80  2164015E...00010090       33     350.0 MB   U GRD
-    211  00  YUQQ35   0                        EOM
-  00001C02  0760FF80  2164012C...00010090       33     300.0 MB   U GRD
-    211  00  YUQQ30   0                        EOM
-  00001C02  0760FF80  216400FA...00010090       33     250.0 MB   U GRD
-    211  00  YUQQ25   0                        EOM
-  00001C02  0760FF80  216400C8...00010090       33     200.0 MB   U GRD
-    211  00  YUQQ20   0                        EOM
-  00001C02  0760FF80  21640096...00010090       33     150.0 MB   U GRD
-    211  00  YUQQ15   0                        EOM
-  00001C02  0760FF80  21640064...00010090       33     100.0 MB   U GRD
-    211  00  YUQQ10   0                        EOM
-  00001C02  0760FF80  226403E8...00010090       34    1000.0 MB   V GRD
-    211  00  YVQQ99   0                        EOM
-  00001C02  0760FF80  226403CF...00010090       34     975.0 MB   V GRD
-    211  00  YVQQ93   0                        EOM
-  00001C02  0760FF80  226403B6...00010090       34     950.0 MB   V GRD
-    211  00  YVQQ95   0                        EOM
-  00001C02  0760FF80  2264039D...00010090       34     925.0 MB   V GRD
-    211  00  YVQQ92   0                        EOM
-  00001C02  0760FF80  22640384...00010090       34     900.0 MB   V GRD
-    211  00  YVQQ90   0                        EOM
-  00001C02  0760FF80  2264036B...00010090       34     875.0 MB   V GRD
-    211  00  YVQQ91   0                        EOM
-  00001C02  0760FF80  22640352...00010090       34     850.0 MB   V GRD
-    211  00  YVQQ85   0                        EOM
-  00001C02  0760FF80  22640339...00010090       34     825.0 MB   V GRD
-    211  00  YVQQ82   0                        EOM
-  00001C02  0760FF80  22640320...00010090       34     800.0 MB   V GRD
-    211  00  YVQQ80   0                        EOM
-  00001C02  0760FF80  22640307...00010090       34     775.0 MB   V GRD
-    211  00  YVQQ77   0                        EOM
-  00001C02  0760FF80  226402EE...00010090       34     750.0 MB   V GRD
-    211  00  YVQQ75   0                        EOM
-  00001C02  0760FF80  226402D5...00010090       34     725.0 MB   V GRD
-    211  00  YVQQ72   0                        EOM
-  00001C02  0760FF80  226402BC...00010090       34     700.0 MB   V GRD
-    211  00  YVQQ70   0                        EOM
-  00001C02  0760FF80  226402A3...00010090       34     675.0 MB   V GRD
-    211  00  YVQQ67   0                        EOM
-  00001C02  0760FF80  2264028A...00010090       34     650.0 MB   V GRD
-    211  00  YVQQ65   0                        EOM
-  00001C02  0760FF80  22640271...00010090       34     625.0 MB   V GRD
-    211  00  YVQQ62   0                        EOM
-  00001C02  0760FF80  22640258...00010090       34     600.0 MB   V GRD
-    211  00  YVQQ60   0                        EOM
-  00001C02  0760FF80  2264023F...00010090       34     575.0 MB   V GRD
-    211  00  YVQQ57   0                        EOM
-  00001C02  0760FF80  22640226...00010090       34     550.0 MB   V GRD
-    211  00  YVQQ55   0                        EOM
-  00001C02  0760FF80  2264020D...00010090       34     525.0 MB   V GRD
-    211  00  YVQQ52   0                        EOM
-  00001C02  0760FF80  226401F4...00010090       34     500.0 MB   V GRD
-    211  00  YVQQ50   0                        EOM
-  00001C02  0760FF80  226401C2...00010090       34     450.0 MB   V GRD
-    211  00  YVQQ45   0                        EOM
-  00001C02  0760FF80  22640190...00010090       34     400.0 MB   V GRD
-    211  00  YVQQ40   0                        EOM
-  00001C02  0760FF80  2264015E...00010090       34     350.0 MB   V GRD
-    211  00  YVQQ35   0                        EOM
-  00001C02  0760FF80  2264012C...00010090       34     300.0 MB   V GRD
-    211  00  YVQQ30   0                        EOM
-  00001C02  0760FF80  226400FA...00010090       34     250.0 MB   V GRD
-    211  00  YVQQ25   0                        EOM
-  00001C02  0760FF80  226400C8...00010090       34     200.0 MB   V GRD
-    211  00  YVQQ20   0                        EOM
-  00001C02  0760FF80  22640096...00010090       34     150.0 MB   V GRD
-    211  00  YVQQ15   0                        EOM
-  00001C02  0760FF80  22640064...00010090       34     100.0 MB   V GRD
-    211  00  YVQQ10   0                        EOM
-  00001C02  0760FF80  02660000...00010090       02           MSL  PRMSL
-    211  00  YPQQ89   0                        EOM
-  00001C02  0760FF80  3D010000...00010090       61           SFC  A PCP
-    211  00  YEQQ98   0                        EOM
-  00001C02  0760FF80  346403E8...00010090       52    1000.0 MB   R H
-    211  00  YRQQ99   0                        EOM
-  00001C02  0760FF80  346403CF...00010090       52     975.0 MB   R H
-    211  00  YRQQ93   0                        EOM
-  00001C02  0760FF80  346403B6...00010090       52     950.0 MB   R H
-    211  00  YRQQ95   0                        EOM
-  00001C02  0760FF80  3464039D...00010090       52     925.0 MB   R H
-    211  00  YRQQ92   0                        EOM
-  00001C02  0760FF80  34640384...00010090       52     900.0 MB   R H
-    211  00  YRQQ90   0                        EOM
-  00001C02  0760FF80  3464036B...00010090       52     875.0 MB   R H
-    211  00  YRQQ91   0                        EOM
-  00001C02  0760FF80  34640352...00010090       52     850.0 MB   R H
-    211  00  YRQQ85   0                        EOM
-  00001C02  0760FF80  34640339...00010090       52     825.0 MB   R H
-    211  00  YRQQ82   0                        EOM
-  00001C02  0760FF80  34640320...00010090       52     800.0 MB   R H
-    211  00  YRQQ80   0                        EOM
-  00001C02  0760FF80  34640307...00010090       52     775.0 MB   R H
-    211  00  YRQQ77   0                        EOM
-  00001C02  0760FF80  346402EE...00010090       52     750.0 MB   R H
-    211  00  YRQQ75   0                        EOM
-  00001C02  0760FF80  346402D5...00010090       52     725.0 MB   R H
-    211  00  YRQQ72   0                        EOM
-  00001C02  0760FF80  346402BC...00010090       52     700.0 MB   R H
-    211  00  YRQQ70   0                        EOM
-  00001C02  0760FF80  346402A3...00010090       52     675.0 MB   R H
-    211  00  YRQQ67   0                        EOM
-  00001C02  0760FF80  3464028A...00010090       52     650.0 MB   R H
-    211  00  YRQQ65   0                        EOM
-  00001C02  0760FF80  34640271...00010090       52     625.0 MB   R H
-    211  00  YRQQ62   0                        EOM
-  00001C02  0760FF80  34640258...00010090       52     600.0 MB   R H
-    211  00  YRQQ60   0                        EOM
-  00001C02  0760FF80  3464023F...00010090       52     575.0 MB   R H
-    211  00  YRQQ57   0                        EOM
-  00001C02  0760FF80  34640226...00010090       52     550.0 MB   R H
-    211  00  YRQQ55   0                        EOM
-  00001C02  0760FF80  3464020D...00010090       52     525.0 MB   R H
-    211  00  YRQQ52   0                        EOM
-  00001C02  0760FF80  346401F4...00010090       52     500.0 MB   R H
-    211  00  YRQQ50   0                        EOM
-  00001C02  0760FF80  346401C2...00010090       52     450.0 MB   R H
-    211  00  YRQQ45   0                        EOM
-  00001C02  0760FF80  34640190...00010090       52     400.0 MB   R H
-    211  00  YRQQ40   0                        EOM
-  00001C02  0760FF80  3464015E...00010090       52     350.0 MB   R H
-    211  00  YRQQ35   0                        EOM
-  00001C02  0760FF80  3464012C...00010090       52     300.0 MB   R H
-    211  00  YRQQ30   0                        EOM
-  00001C02  0760FF80  346400FA...00010090       52     250.0 MB   R H
-    211  00  YRQQ25   0                        EOM
-  00001C02  0760FF80  346400C8...00010090       52     200.0 MB   R H
-    211  00  YRQQ20   0                        EOM
-  00001C02  0760FF80  34640096...00010090       52     150.0 MB   R H
-    211  00  YRQQ15   0                        EOM
-  00001C02  0760FF80  34640064...00010090       52     100.0 MB   R H
-    211  00  YRQQ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010090       11    1000.0 MB   TMP
-    211  00  YTQQ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010090       11     975.0 MB   TMP
-    211  00  YTQQ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010090       11     950.0 MB   TMP
-    211  00  YTQQ95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010090       11     925.0 MB   TMP
-    211  00  YTQQ92   0                        EOM
-  00001C02  0760FF80  0B640384...00010090       11     900.0 MB   TMP
-    211  00  YTQQ90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010090       11     875.0 MB   TMP
-    211  00  YTQQ91   0                        EOM
-  00001C02  0760FF80  0B640352...00010090       11     850.0 MB   TMP
-    211  00  YTQQ85   0                        EOM
-  00001C02  0760FF80  0B640339...00010090       11     825.0 MB   TMP
-    211  00  YTQQ82   0                        EOM
-  00001C02  0760FF80  0B640320...00010090       11     800.0 MB   TMP
-    211  00  YTQQ80   0                        EOM
-  00001C02  0760FF80  0B640307...00010090       11     775.0 MB   TMP
-    211  00  YTQQ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010090       11     750.0 MB   TMP
-    211  00  YTQQ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010090       11     725.0 MB   TMP
-    211  00  YTQQ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010090       11     700.0 MB   TMP
-    211  00  YTQQ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010090       11     675.0 MB   TMP
-    211  00  YTQQ67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010090       11     650.0 MB   TMP
-    211  00  YTQQ65   0                        EOM
-  00001C02  0760FF80  0B640271...00010090       11     625.0 MB   TMP
-    211  00  YTQQ62   0                        EOM
-  00001C02  0760FF80  0B640258...00010090       11     600.0 MB   TMP
-    211  00  YTQQ60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010090       11     575.0 MB   TMP
-    211  00  YTQQ57   0                        EOM
-  00001C02  0760FF80  0B640226...00010090       11     550.0 MB   TMP
-    211  00  YTQQ55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010090       11     525.0 MB   TMP
-    211  00  YTQQ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010090       11     500.0 MB   TMP
-    211  00  YTQQ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010090       11     450.0 MB   TMP
-    211  00  YTQQ45   0                        EOM
-  00001C02  0760FF80  0B640190...00010090       11     400.0 MB   TMP
-    211  00  YTQQ40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010090       11     350.0 MB   TMP
-    211  00  YTQQ35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010090       11     300.0 MB   TMP
-    211  00  YTQQ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010090       11     250.0 MB   TMP
-    211  00  YTQQ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010090       11     200.0 MB   TMP
-    211  00  YTQQ20   0                        EOM
-  00001C02  0760FF80  0B640096...00010090       11     150.0 MB   TMP
-    211  00  YTQQ15   0                        EOM
-  00001C02  0760FF80  0B640064...00010090       11     100.0 MB   TMP
-    211  00  YTQQ10   0                        EOM
-  00001C02  0760FF80  28640352...00010090       40     850.0 MB  DZDT
-    211  00  YOQQ85   0                        EOM
-  00001C02  0760FF80  286402BC...00010090       40     700.0 MB  DZDT
-    211  00  YOQQ70   0                        EOM
-  00001C02  0760FF80  286401F4...00010090       40     500.0 MB  DZDT
-    211  00  YOQQ50   0                        EOM
-  00001C02  0760FF80  01010000...00010090       01          SFC  PRES
-    211  00  YPQQ98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010090       52        44/100  R H
-    211  00  YRQQ00   0                        EOM
-  00001C02  0760FF80  296401F4...00010090       41     500.0 MB ABS V
-    211  00  YCQQ50   0                        EOM
-  00001C02  0760FF80  9D010000...00010090      157          SFC   CAPE
-    211  00  YWQQ98   0                        EOM
-  00001C02  0760FF80  9C010000...00010090      156          SFC   CIN
-    211  00  YYQQ98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010090      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQQ86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010090      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQQ86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010090       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010090       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010090       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010090       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  0B749678...00010090       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010090       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQQ86   0                        EOM
-  00001C02  0760FF80  34741E00...00010090       52   30 SPDY   0 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010090       52   60 SPDY  30 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010090       52   90 SPDY  60 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  3474785A...00010090       52  120 SPDY  90 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  34749678...00010090       52  150 SPDY 120 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  3474B496...00010090       52  180 SPDY 150 SPDY  R H
-    211  00  YRQQ86   0                        EOM
-  00001C02  0760FF80  21741E00...00010090       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010090       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010090       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  2174785A...00010090       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  21749678...00010090       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  2174B496...00010090       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQQ86   0                        EOM
-  00001C02  0760FF80  22741E00...00010090       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010090       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010090       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  2274785A...00010090       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  22749678...00010090       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  2274B496...00010090       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQQ86   0                        EOM
-  00001C02  0760FF80  0B690002...00010090       11    2  HTGL     TMP
-    211  00  YTQQ98   0                        EOM
-  00001C02  0760FF80  34690002...00010090       52    2  HTGL     R H
-    211  00  YRQQ98   0                        EOM
-  00001C02  0760FF80  2169000A...00010090       33   10  HTGL     U GRD
-    211  00  YUQQ98   0                        EOM
-  00001C02  0760FF80  2269000A...00010090       34   10  HTGL     V GRD
-    211  00  YVQQ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs150.211 b/parm/wmo/grib_awpgfs150.211
deleted file mode 100755
index 18598cd660..0000000000
--- a/parm/wmo/grib_awpgfs150.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...00010096       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...00010096       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...00010096       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...00010096       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...00010096       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...00010096       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...00010096       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...00010096       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...00010096       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...00010096       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...00010096       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...00010096       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...00010096       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...00010096       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...00010096       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...00010096       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...00010096       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...00010096       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...00010096       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...00010096       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...00010096       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...00010096       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...00010096       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...00010096       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...00010096       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...00010096       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...00010096       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...00010096       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...00010096       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...00010096       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...00010096       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...00010096       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...00010096       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...00010096       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...00010096       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...00010096       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...00010096       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...00010096       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...00010096       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...00010096       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...00010096       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...00010096       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...00010096       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...00010096       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...00010096       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...00010096       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...00010096       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...00010096       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...00010096       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...00010096       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...00010096       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...00010096       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...00010096       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...00010096       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...00010096       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...00010096       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...00010096       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...00010096       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...00010096       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...00010096       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...00010096       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...00010096       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...00010096       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...00010096       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...00010096       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...00010096       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...00010096       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...00010096       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...00010096       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...00010096       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...00010096       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...00010096       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...00010096       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...00010096       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...00010096       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...00010096       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...00010096       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...00010096       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...00010096       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...00010096       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...00010096       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...00010096       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...00010096       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...00010096       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...00010096       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...00010096       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...00010096       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...00010096       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...00010096       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...00010096       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...00010096       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...00010096       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...00010096       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...00010096       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...00010096       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...00010096       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...00010096       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...00010096       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...00010096       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...00010096       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...00010096       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...00010096       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...00010096       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...00010096       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...00010096       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...00010096       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...00010096       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...00010096       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...00010096       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...00010096       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...00010096       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...00010096       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...00010096       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...00010096       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...00010096       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...00010096       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...00010096       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...00010096       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...00010096       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...00010096       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...00010096       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...00010096       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...00010096       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...00010096       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...00010096       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...00010096       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...00010096       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...00010096       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...00010096       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...00010096       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...00010096       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...00010096       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...00010096       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...00010096       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...00010096       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...00010096       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...00010096       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...00010096       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...00010096       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...00010096       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...00010096       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...00010096       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...00010096       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...00010096       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...00010096       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...00010096       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...00010096       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...00010096       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...00010096       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...00010096       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...00010096       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...00010096       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...00010096       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...00010096      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...00010096      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...00010096      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...00010096      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...00010096       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...00010096       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...00010096       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...00010096       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...00010096       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...00010096       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...00010096       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...00010096       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...00010096       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...00010096       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...00010096       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...00010096       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...00010096       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...00010096       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...00010096       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...00010096       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...00010096       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...00010096       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...00010096       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...00010096       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...00010096       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...00010096       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...00010096       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...00010096       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...00010096       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...00010096       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...00010096       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...00010096       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs156.211 b/parm/wmo/grib_awpgfs156.211
deleted file mode 100755
index a283a51ce4..0000000000
--- a/parm/wmo/grib_awpgfs156.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...0001009C       07    1000.0 MB   HGT
-    211  00  YHQR99   0                        EOM
-  00001C02  0760FF80  076403CF...0001009C       07     975.0 MB   HGT
-    211  00  YHQR93   0                        EOM
-  00001C02  0760FF80  076403B6...0001009C       07     950.0 MB   HGT
-    211  00  YHQR95   0                        EOM
-  00001C02  0760FF80  0764039D...0001009C       07     925.0 MB   HGT
-    211  00  YHQR92   0                        EOM
-  00001C02  0760FF80  07640384...0001009C       07     900.0 MB   HGT
-    211  00  YHQR90   0                        EOM
-  00001C02  0760FF80  0764036B...0001009C       07     875.0 MB   HGT
-    211  00  YHQR91   0                        EOM
-  00001C02  0760FF80  07640352...0001009C       07     850.0 MB   HGT
-    211  00  YHQR85   0                        EOM
-  00001C02  0760FF80  07640339...0001009C       07     825.0 MB   HGT
-    211  00  YHQR82   0                        EOM
-  00001C02  0760FF80  07640320...0001009C       07     800.0 MB   HGT
-    211  00  YHQR80   0                        EOM
-  00001C02  0760FF80  07640307...0001009C       07     775.0 MB   HGT
-    211  00  YHQR77   0                        EOM
-  00001C02  0760FF80  076402EE...0001009C       07     750.0 MB   HGT
-    211  00  YHQR75   0                        EOM
-  00001C02  0760FF80  076402D5...0001009C       07     725.0 MB   HGT
-    211  00  YHQR72   0                        EOM
-  00001C02  0760FF80  076402BC...0001009C       07     700.0 MB   HGT
-    211  00  YHQR70   0                        EOM
-  00001C02  0760FF80  076402A3...0001009C       07     675.0 MB   HGT
-    211  00  YHQR67   0                        EOM
-  00001C02  0760FF80  0764028A...0001009C       07     650.0 MB   HGT
-    211  00  YHQR65   0                        EOM
-  00001C02  0760FF80  07640271...0001009C       07     625.0 MB   HGT
-    211  00  YHQR62   0                        EOM
-  00001C02  0760FF80  07640258...0001009C       07     600.0 MB   HGT
-    211  00  YHQR60   0                        EOM
-  00001C02  0760FF80  0764023F...0001009C       07     575.0 MB   HGT
-    211  00  YHQR57   0                        EOM
-  00001C02  0760FF80  07640226...0001009C       07     550.0 MB   HGT
-    211  00  YHQR55   0                        EOM
-  00001C02  0760FF80  0764020D...0001009C       07     525.0 MB   HGT
-    211  00  YHQR52   0                        EOM
-  00001C02  0760FF80  076401F4...0001009C       07     500.0 MB   HGT
-    211  00  YHQR50   0                        EOM
-  00001C02  0760FF80  07640190...0001009C       07     400.0 MB   HGT
-    211  00  YHQR40   0                        EOM
-  00001C02  0760FF80  076401C2...0001009C       07     450.0 MB   HGT
-    211  00  YHQR45   0                        EOM
-  00001C02  0760FF80  0764015E...0001009C       07     350.0 MB   HGT
-    211  00  YHQR35   0                        EOM
-  00001C02  0760FF80  0764012C...0001009C       07     300.0 MB   HGT
-    211  00  YHQR30   0                        EOM
-  00001C02  0760FF80  076400FA...0001009C       07     250.0 MB   HGT
-    211  00  YHQR25   0                        EOM
-  00001C02  0760FF80  076400C8...0001009C       07     200.0 MB   HGT
-    211  00  YHQR20   0                        EOM
-  00001C02  0760FF80  07640096...0001009C       07     150.0 MB   HGT
-    211  00  YHQR15   0                        EOM
-  00001C02  0760FF80  07640064...0001009C       07     100.0 MB   HGT
-    211  00  YHQR10   0                        EOM
-  00001C02  0760FF80  216403E8...0001009C       33    1000.0 MB   U GRD 
-    211  00  YUQR99   0                        EOM 
-  00001C02  0760FF80  216403CF...0001009C       33     975.0 MB   U GRD
-    211  00  YUQR93   0                        EOM 
-  00001C02  0760FF80  216403B6...0001009C       33     950.0 MB   U GRD
-    211  00  YUQR95   0                        EOM
-  00001C02  0760FF80  2164039D...0001009C       33     925.0 MB   U GRD
-    211  00  YUQR92   0                        EOM
-  00001C02  0760FF80  21640384...0001009C       33     900.0 MB   U GRD
-    211  00  YUQR90   0                        EOM
-  00001C02  0760FF80  2164036B...0001009C       33     875.0 MB   U GRD
-    211  00  YUQR91   0                        EOM
-  00001C02  0760FF80  21640352...0001009C       33     850.0 MB   U GRD
-    211  00  YUQR85   0                        EOM
-  00001C02  0760FF80  21640339...0001009C       33     825.0 MB   U GRD
-    211  00  YUQR82   0                        EOM
-  00001C02  0760FF80  21640320...0001009C       33     800.0 MB   U GRD
-    211  00  YUQR80   0                        EOM
-  00001C02  0760FF80  21640307...0001009C       33     775.0 MB   U GRD
-    211  00  YUQR77   0                        EOM
-  00001C02  0760FF80  216402EE...0001009C       33     750.0 MB   U GRD
-    211  00  YUQR75   0                        EOM
-  00001C02  0760FF80  216402D5...0001009C       33     725.0 MB   U GRD
-    211  00  YUQR72   0                        EOM
-  00001C02  0760FF80  216402BC...0001009C       33     700.0 MB   U GRD
-    211  00  YUQR70   0                        EOM
-  00001C02  0760FF80  216402A3...0001009C       33     675.0 MB   U GRD
-    211  00  YUQR67   0                        EOM
-  00001C02  0760FF80  2164028A...0001009C       33     650.0 MB   U GRD
-    211  00  YUQR65   0                        EOM
-  00001C02  0760FF80  21640271...0001009C       33     625.0 MB   U GRD
-    211  00  YUQR62   0                        EOM
-  00001C02  0760FF80  21640258...0001009C       33     600.0 MB   U GRD
-    211  00  YUQR60   0                        EOM
-  00001C02  0760FF80  2164023F...0001009C       33     575.0 MB   U GRD
-    211  00  YUQR57   0                        EOM
-  00001C02  0760FF80  21640226...0001009C       33     550.0 MB   U GRD
-    211  00  YUQR55   0                        EOM
-  00001C02  0760FF80  2164020D...0001009C       33     525.0 MB   U GRD
-    211  00  YUQR52   0                        EOM
-  00001C02  0760FF80  216401F4...0001009C       33     500.0 MB   U GRD
-    211  00  YUQR50   0                        EOM
-  00001C02  0760FF80  216401C2...0001009C       33     450.0 MB   U GRD
-    211  00  YUQR45   0                        EOM
-  00001C02  0760FF80  21640190...0001009C       33     400.0 MB   U GRD
-    211  00  YUQR40   0                        EOM
-  00001C02  0760FF80  2164015E...0001009C       33     350.0 MB   U GRD
-    211  00  YUQR35   0                        EOM
-  00001C02  0760FF80  2164012C...0001009C       33     300.0 MB   U GRD
-    211  00  YUQR30   0                        EOM
-  00001C02  0760FF80  216400FA...0001009C       33     250.0 MB   U GRD
-    211  00  YUQR25   0                        EOM
-  00001C02  0760FF80  216400C8...0001009C       33     200.0 MB   U GRD
-    211  00  YUQR20   0                        EOM
-  00001C02  0760FF80  21640096...0001009C       33     150.0 MB   U GRD
-    211  00  YUQR15   0                        EOM
-  00001C02  0760FF80  21640064...0001009C       33     100.0 MB   U GRD
-    211  00  YUQR10   0                        EOM
-  00001C02  0760FF80  226403E8...0001009C       34    1000.0 MB   V GRD
-    211  00  YVQR99   0                        EOM
-  00001C02  0760FF80  226403CF...0001009C       34     975.0 MB   V GRD
-    211  00  YVQR93   0                        EOM
-  00001C02  0760FF80  226403B6...0001009C       34     950.0 MB   V GRD
-    211  00  YVQR95   0                        EOM
-  00001C02  0760FF80  2264039D...0001009C       34     925.0 MB   V GRD
-    211  00  YVQR92   0                        EOM
-  00001C02  0760FF80  22640384...0001009C       34     900.0 MB   V GRD
-    211  00  YVQR90   0                        EOM
-  00001C02  0760FF80  2264036B...0001009C       34     875.0 MB   V GRD
-    211  00  YVQR91   0                        EOM
-  00001C02  0760FF80  22640352...0001009C       34     850.0 MB   V GRD
-    211  00  YVQR85   0                        EOM
-  00001C02  0760FF80  22640339...0001009C       34     825.0 MB   V GRD
-    211  00  YVQR82   0                        EOM
-  00001C02  0760FF80  22640320...0001009C       34     800.0 MB   V GRD
-    211  00  YVQR80   0                        EOM
-  00001C02  0760FF80  22640307...0001009C       34     775.0 MB   V GRD
-    211  00  YVQR77   0                        EOM
-  00001C02  0760FF80  226402EE...0001009C       34     750.0 MB   V GRD
-    211  00  YVQR75   0                        EOM
-  00001C02  0760FF80  226402D5...0001009C       34     725.0 MB   V GRD
-    211  00  YVQR72   0                        EOM
-  00001C02  0760FF80  226402BC...0001009C       34     700.0 MB   V GRD
-    211  00  YVQR70   0                        EOM
-  00001C02  0760FF80  226402A3...0001009C       34     675.0 MB   V GRD
-    211  00  YVQR67   0                        EOM
-  00001C02  0760FF80  2264028A...0001009C       34     650.0 MB   V GRD
-    211  00  YVQR65   0                        EOM
-  00001C02  0760FF80  22640271...0001009C       34     625.0 MB   V GRD
-    211  00  YVQR62   0                        EOM
-  00001C02  0760FF80  22640258...0001009C       34     600.0 MB   V GRD
-    211  00  YVQR60   0                        EOM
-  00001C02  0760FF80  2264023F...0001009C       34     575.0 MB   V GRD
-    211  00  YVQR57   0                        EOM
-  00001C02  0760FF80  22640226...0001009C       34     550.0 MB   V GRD
-    211  00  YVQR55   0                        EOM
-  00001C02  0760FF80  2264020D...0001009C       34     525.0 MB   V GRD
-    211  00  YVQR52   0                        EOM
-  00001C02  0760FF80  226401F4...0001009C       34     500.0 MB   V GRD
-    211  00  YVQR50   0                        EOM
-  00001C02  0760FF80  226401C2...0001009C       34     450.0 MB   V GRD
-    211  00  YVQR45   0                        EOM
-  00001C02  0760FF80  22640190...0001009C       34     400.0 MB   V GRD
-    211  00  YVQR40   0                        EOM
-  00001C02  0760FF80  2264015E...0001009C       34     350.0 MB   V GRD
-    211  00  YVQR35   0                        EOM
-  00001C02  0760FF80  2264012C...0001009C       34     300.0 MB   V GRD
-    211  00  YVQR30   0                        EOM
-  00001C02  0760FF80  226400FA...0001009C       34     250.0 MB   V GRD
-    211  00  YVQR25   0                        EOM
-  00001C02  0760FF80  226400C8...0001009C       34     200.0 MB   V GRD
-    211  00  YVQR20   0                        EOM
-  00001C02  0760FF80  22640096...0001009C       34     150.0 MB   V GRD
-    211  00  YVQR15   0                        EOM
-  00001C02  0760FF80  22640064...0001009C       34     100.0 MB   V GRD
-    211  00  YVQR10   0                        EOM
-  00001C02  0760FF80  02660000...0001009C       02           MSL  PRMSL
-    211  00  YPQR89   0                        EOM
-  00001C02  0760FF80  3D010000...0001009C       61           SFC  A PCP
-    211  00  YEQR98   0                        EOM
-  00001C02  0760FF80  346403E8...0001009C       52    1000.0 MB   R H
-    211  00  YRQR99   0                        EOM
-  00001C02  0760FF80  346403CF...0001009C       52     975.0 MB   R H
-    211  00  YRQR93   0                        EOM
-  00001C02  0760FF80  346403B6...0001009C       52     950.0 MB   R H
-    211  00  YRQR95   0                        EOM
-  00001C02  0760FF80  3464039D...0001009C       52     925.0 MB   R H
-    211  00  YRQR92   0                        EOM
-  00001C02  0760FF80  34640384...0001009C       52     900.0 MB   R H
-    211  00  YRQR90   0                        EOM
-  00001C02  0760FF80  3464036B...0001009C       52     875.0 MB   R H
-    211  00  YRQR91   0                        EOM
-  00001C02  0760FF80  34640352...0001009C       52     850.0 MB   R H
-    211  00  YRQR85   0                        EOM
-  00001C02  0760FF80  34640339...0001009C       52     825.0 MB   R H
-    211  00  YRQR82   0                        EOM
-  00001C02  0760FF80  34640320...0001009C       52     800.0 MB   R H
-    211  00  YRQR80   0                        EOM
-  00001C02  0760FF80  34640307...0001009C       52     775.0 MB   R H
-    211  00  YRQR77   0                        EOM
-  00001C02  0760FF80  346402EE...0001009C       52     750.0 MB   R H
-    211  00  YRQR75   0                        EOM
-  00001C02  0760FF80  346402D5...0001009C       52     725.0 MB   R H
-    211  00  YRQR72   0                        EOM
-  00001C02  0760FF80  346402BC...0001009C       52     700.0 MB   R H
-    211  00  YRQR70   0                        EOM
-  00001C02  0760FF80  346402A3...0001009C       52     675.0 MB   R H
-    211  00  YRQR67   0                        EOM
-  00001C02  0760FF80  3464028A...0001009C       52     650.0 MB   R H
-    211  00  YRQR65   0                        EOM
-  00001C02  0760FF80  34640271...0001009C       52     625.0 MB   R H
-    211  00  YRQR62   0                        EOM
-  00001C02  0760FF80  34640258...0001009C       52     600.0 MB   R H
-    211  00  YRQR60   0                        EOM
-  00001C02  0760FF80  3464023F...0001009C       52     575.0 MB   R H
-    211  00  YRQR57   0                        EOM
-  00001C02  0760FF80  34640226...0001009C       52     550.0 MB   R H
-    211  00  YRQR55   0                        EOM
-  00001C02  0760FF80  3464020D...0001009C       52     525.0 MB   R H
-    211  00  YRQR52   0                        EOM
-  00001C02  0760FF80  346401F4...0001009C       52     500.0 MB   R H
-    211  00  YRQR50   0                        EOM
-  00001C02  0760FF80  346401C2...0001009C       52     450.0 MB   R H
-    211  00  YRQR45   0                        EOM
-  00001C02  0760FF80  34640190...0001009C       52     400.0 MB   R H
-    211  00  YRQR40   0                        EOM
-  00001C02  0760FF80  3464015E...0001009C       52     350.0 MB   R H
-    211  00  YRQR35   0                        EOM
-  00001C02  0760FF80  3464012C...0001009C       52     300.0 MB   R H
-    211  00  YRQR30   0                        EOM
-  00001C02  0760FF80  346400FA...0001009C       52     250.0 MB   R H
-    211  00  YRQR25   0                        EOM
-  00001C02  0760FF80  346400C8...0001009C       52     200.0 MB   R H
-    211  00  YRQR20   0                        EOM
-  00001C02  0760FF80  34640096...0001009C       52     150.0 MB   R H
-    211  00  YRQR15   0                        EOM
-  00001C02  0760FF80  34640064...0001009C       52     100.0 MB   R H
-    211  00  YRQR10   0                        EOM
-  00001C02  0760FF80  0B6403E8...0001009C       11    1000.0 MB   TMP
-    211  00  YTQR99   0                        EOM
-  00001C02  0760FF80  0B6403CF...0001009C       11     975.0 MB   TMP
-    211  00  YTQR93   0                        EOM
-  00001C02  0760FF80  0B6403B6...0001009C       11     950.0 MB   TMP
-    211  00  YTQR95   0                        EOM
-  00001C02  0760FF80  0B64039D...0001009C       11     925.0 MB   TMP
-    211  00  YTQR92   0                        EOM
-  00001C02  0760FF80  0B640384...0001009C       11     900.0 MB   TMP
-    211  00  YTQR90   0                        EOM
-  00001C02  0760FF80  0B64036B...0001009C       11     875.0 MB   TMP
-    211  00  YTQR91   0                        EOM
-  00001C02  0760FF80  0B640352...0001009C       11     850.0 MB   TMP
-    211  00  YTQR85   0                        EOM
-  00001C02  0760FF80  0B640339...0001009C       11     825.0 MB   TMP
-    211  00  YTQR82   0                        EOM
-  00001C02  0760FF80  0B640320...0001009C       11     800.0 MB   TMP
-    211  00  YTQR80   0                        EOM
-  00001C02  0760FF80  0B640307...0001009C       11     775.0 MB   TMP
-    211  00  YTQR77   0                        EOM
-  00001C02  0760FF80  0B6402EE...0001009C       11     750.0 MB   TMP
-    211  00  YTQR75   0                        EOM
-  00001C02  0760FF80  0B6402D5...0001009C       11     725.0 MB   TMP
-    211  00  YTQR72   0                        EOM
-  00001C02  0760FF80  0B6402BC...0001009C       11     700.0 MB   TMP
-    211  00  YTQR70   0                        EOM
-  00001C02  0760FF80  0B6402A3...0001009C       11     675.0 MB   TMP
-    211  00  YTQR67   0                        EOM
-  00001C02  0760FF80  0B64028A...0001009C       11     650.0 MB   TMP
-    211  00  YTQR65   0                        EOM
-  00001C02  0760FF80  0B640271...0001009C       11     625.0 MB   TMP
-    211  00  YTQR62   0                        EOM
-  00001C02  0760FF80  0B640258...0001009C       11     600.0 MB   TMP
-    211  00  YTQR60   0                        EOM
-  00001C02  0760FF80  0B64023F...0001009C       11     575.0 MB   TMP
-    211  00  YTQR57   0                        EOM
-  00001C02  0760FF80  0B640226...0001009C       11     550.0 MB   TMP
-    211  00  YTQR55   0                        EOM
-  00001C02  0760FF80  0B64020D...0001009C       11     525.0 MB   TMP
-    211  00  YTQR52   0                        EOM
-  00001C02  0760FF80  0B6401F4...0001009C       11     500.0 MB   TMP
-    211  00  YTQR50   0                        EOM
-  00001C02  0760FF80  0B6401C2...0001009C       11     450.0 MB   TMP
-    211  00  YTQR45   0                        EOM
-  00001C02  0760FF80  0B640190...0001009C       11     400.0 MB   TMP
-    211  00  YTQR40   0                        EOM
-  00001C02  0760FF80  0B64015E...0001009C       11     350.0 MB   TMP
-    211  00  YTQR35   0                        EOM
-  00001C02  0760FF80  0B64012C...0001009C       11     300.0 MB   TMP
-    211  00  YTQR30   0                        EOM
-  00001C02  0760FF80  0B6400FA...0001009C       11     250.0 MB   TMP
-    211  00  YTQR25   0                        EOM
-  00001C02  0760FF80  0B6400C8...0001009C       11     200.0 MB   TMP
-    211  00  YTQR20   0                        EOM
-  00001C02  0760FF80  0B640096...0001009C       11     150.0 MB   TMP
-    211  00  YTQR15   0                        EOM
-  00001C02  0760FF80  0B640064...0001009C       11     100.0 MB   TMP
-    211  00  YTQR10   0                        EOM
-  00001C02  0760FF80  28640352...0001009C       40     850.0 MB  DZDT
-    211  00  YOQR85   0                        EOM
-  00001C02  0760FF80  286402BC...0001009C       40     700.0 MB  DZDT
-    211  00  YOQR70   0                        EOM
-  00001C02  0760FF80  286401F4...0001009C       40     500.0 MB  DZDT
-    211  00  YOQR50   0                        EOM
-  00001C02  0760FF80  01010000...0001009C       01          SFC  PRES
-    211  00  YPQR98   0                        EOM
-  00001C02  0760FF80  346C2C64...0001009C       52        44/100  R H
-    211  00  YRQR00   0                        EOM
-  00001C02  0760FF80  296401F4...0001009C       41     500.0 MB ABS V
-    211  00  YCQR50   0                        EOM 
-  00001C02  0760FF80  9D010000...0001009C      157          SFC   CAPE
-    211  00  YWQR98   0                        EOM
-  00001C02  0760FF80  9C010000...0001009C      156          SFC   CIN
-    211  00  YYQR98   0                        EOM
-  00001C02  0760FF80  9D74B400...0001009C      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQR86   0                        EOM
-  00001C02  0760FF80  9C74B400...0001009C      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQR86   0                        EOM
-  00001C02  0760FF80  0B741E00...0001009C       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  0B743C1E...0001009C       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  0B745A3C...0001009C       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  0B74785A...0001009C       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  0B749678...0001009C       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  0B74B496...0001009C       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQR86   0                        EOM
-  00001C02  0760FF80  34741E00...0001009C       52   30 SPDY   0 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  34743C1E...0001009C       52   60 SPDY  30 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  34745A3C...0001009C       52   90 SPDY  60 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  3474785A...0001009C       52  120 SPDY  90 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  34749678...0001009C       52  150 SPDY 120 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  3474B496...0001009C       52  180 SPDY 150 SPDY  R H
-    211  00  YRQR86   0                        EOM
-  00001C02  0760FF80  21741E00...0001009C       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  21743C1E...0001009C       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  21745A3C...0001009C       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  2174785A...0001009C       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  21749678...0001009C       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  2174B496...0001009C       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQR86   0                        EOM
-  00001C02  0760FF80  22741E00...0001009C       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  22743C1E...0001009C       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  22745A3C...0001009C       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  2274785A...0001009C       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  22749678...0001009C       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  2274B496...0001009C       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQR86   0                        EOM
-  00001C02  0760FF80  0B690002...0001009C       11    2  HTGL     TMP
-    211  00  YTQR98   0                        EOM
-  00001C02  0760FF80  34690002...0001009C       52    2  HTGL     R H
-    211  00  YRQR98   0                        EOM
-  00001C02  0760FF80  2169000A...0001009C       33   10  HTGL     U GRD
-    211  00  YUQR98   0                        EOM
-  00001C02  0760FF80  2269000A...0001009C       34   10  HTGL     V GRD
-    211  00  YVQR98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs162.211 b/parm/wmo/grib_awpgfs162.211
deleted file mode 100755
index bac5998532..0000000000
--- a/parm/wmo/grib_awpgfs162.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100A2       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100A2       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100A2       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100A2       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100A2       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100A2       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100A2       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100A2       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100A2       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100A2       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100A2       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100A2       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100A2       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100A2       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100A2       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100A2       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100A2       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100A2       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100A2       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100A2       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100A2       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...000100A2       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...000100A2       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...000100A2       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100A2       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100A2       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100A2       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100A2       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100A2       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100A2       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100A2       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100A2       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100A2       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100A2       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100A2       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100A2       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100A2       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100A2       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100A2       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100A2       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100A2       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100A2       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100A2       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100A2       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100A2       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100A2       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100A2       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100A2       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100A2       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100A2       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100A2       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100A2       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100A2       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100A2       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100A2       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100A2       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100A2       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100A2       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100A2       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100A2       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100A2       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100A2       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100A2       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100A2       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100A2       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100A2       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100A2       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100A2       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100A2       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100A2       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100A2       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100A2       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100A2       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100A2       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100A2       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100A2       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100A2       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100A2       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100A2       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100A2       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100A2       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100A2       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100A2       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100A2       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100A2       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100A2       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100A2       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100A2       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100A2       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100A2       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100A2       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100A2       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100A2       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100A2       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100A2       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100A2       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100A2       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100A2       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100A2       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100A2       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100A2       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100A2       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100A2       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100A2       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100A2       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100A2       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100A2       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100A2       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100A2       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100A2       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100A2       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100A2       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100A2       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100A2       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100A2       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100A2       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100A2       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100A2       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100A2       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100A2       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100A2       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100A2       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100A2       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100A2       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100A2       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100A2       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100A2       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100A2       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100A2       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100A2       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100A2       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100A2       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100A2       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100A2       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100A2       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100A2       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100A2       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100A2       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100A2       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100A2       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100A2       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100A2       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100A2       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100A2       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100A2       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100A2       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100A2       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100A2       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100A2       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100A2       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100A2       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100A2       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100A2       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100A2      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100A2      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100A2      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100A2      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100A2       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100A2       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100A2       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100A2       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100A2       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100A2       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100A2       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100A2       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100A2       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100A2       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100A2       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100A2       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100A2       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100A2       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100A2       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100A2       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100A2       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100A2       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100A2       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100A2       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100A2       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100A2       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100A2       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100A2       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100A2       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100A2       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100A2       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100A2       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs168.211 b/parm/wmo/grib_awpgfs168.211
deleted file mode 100755
index d49a9885ef..0000000000
--- a/parm/wmo/grib_awpgfs168.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100A8       07    1000.0 MB   HGT
-    211  00  YHQS99   0                        EOM
-  00001C02  0760FF80  076403CF...000100A8       07     975.0 MB   HGT
-    211  00  YHQS93   0                        EOM
-  00001C02  0760FF80  076403B6...000100A8       07     950.0 MB   HGT
-    211  00  YHQS95   0                        EOM
-  00001C02  0760FF80  0764039D...000100A8       07     925.0 MB   HGT
-    211  00  YHQS92   0                        EOM
-  00001C02  0760FF80  07640384...000100A8       07     900.0 MB   HGT
-    211  00  YHQS90   0                        EOM
-  00001C02  0760FF80  0764036B...000100A8       07     875.0 MB   HGT
-    211  00  YHQS91   0                        EOM
-  00001C02  0760FF80  07640352...000100A8       07     850.0 MB   HGT
-    211  00  YHQS85   0                        EOM
-  00001C02  0760FF80  07640339...000100A8       07     825.0 MB   HGT
-    211  00  YHQS82   0                        EOM
-  00001C02  0760FF80  07640320...000100A8       07     800.0 MB   HGT
-    211  00  YHQS80   0                        EOM
-  00001C02  0760FF80  07640307...000100A8       07     775.0 MB   HGT
-    211  00  YHQS77   0                        EOM
-  00001C02  0760FF80  076402EE...000100A8       07     750.0 MB   HGT
-    211  00  YHQS75   0                        EOM
-  00001C02  0760FF80  076402D5...000100A8       07     725.0 MB   HGT
-    211  00  YHQS72   0                        EOM
-  00001C02  0760FF80  076402BC...000100A8       07     700.0 MB   HGT
-    211  00  YHQS70   0                        EOM
-  00001C02  0760FF80  076402A3...000100A8       07     675.0 MB   HGT
-    211  00  YHQS67   0                        EOM
-  00001C02  0760FF80  0764028A...000100A8       07     650.0 MB   HGT
-    211  00  YHQS65   0                        EOM
-  00001C02  0760FF80  07640271...000100A8       07     625.0 MB   HGT
-    211  00  YHQS62   0                        EOM
-  00001C02  0760FF80  07640258...000100A8       07     600.0 MB   HGT
-    211  00  YHQS60   0                        EOM
-  00001C02  0760FF80  0764023F...000100A8       07     575.0 MB   HGT
-    211  00  YHQS57   0                        EOM
-  00001C02  0760FF80  07640226...000100A8       07     550.0 MB   HGT
-    211  00  YHQS55   0                        EOM
-  00001C02  0760FF80  0764020D...000100A8       07     525.0 MB   HGT
-    211  00  YHQS52   0                        EOM
-  00001C02  0760FF80  076401F4...000100A8       07     500.0 MB   HGT
-    211  00  YHQS50   0                        EOM
-  00001C02  0760FF80  07640190...000100A8       07     400.0 MB   HGT
-    211  00  YHQS40   0                        EOM
-  00001C02  0760FF80  076401C2...000100A8       07     450.0 MB   HGT
-    211  00  YHQS45   0                        EOM
-  00001C02  0760FF80  0764015E...000100A8       07     350.0 MB   HGT
-    211  00  YHQS35   0                        EOM
-  00001C02  0760FF80  0764012C...000100A8       07     300.0 MB   HGT
-    211  00  YHQS30   0                        EOM
-  00001C02  0760FF80  076400FA...000100A8       07     250.0 MB   HGT
-    211  00  YHQS25   0                        EOM
-  00001C02  0760FF80  076400C8...000100A8       07     200.0 MB   HGT
-    211  00  YHQS20   0                        EOM
-  00001C02  0760FF80  07640096...000100A8       07     150.0 MB   HGT
-    211  00  YHQS15   0                        EOM
-  00001C02  0760FF80  07640064...000100A8       07     100.0 MB   HGT
-    211  00  YHQS10   0                        EOM
-  00001C02  0760FF80  216403E8...000100A8       33    1000.0 MB   U GRD 
-    211  00  YUQS99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100A8       33     975.0 MB   U GRD
-    211  00  YUQS93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100A8       33     950.0 MB   U GRD
-    211  00  YUQS95   0                        EOM
-  00001C02  0760FF80  2164039D...000100A8       33     925.0 MB   U GRD
-    211  00  YUQS92   0                        EOM
-  00001C02  0760FF80  21640384...000100A8       33     900.0 MB   U GRD
-    211  00  YUQS90   0                        EOM
-  00001C02  0760FF80  2164036B...000100A8       33     875.0 MB   U GRD
-    211  00  YUQS91   0                        EOM
-  00001C02  0760FF80  21640352...000100A8       33     850.0 MB   U GRD
-    211  00  YUQS85   0                        EOM
-  00001C02  0760FF80  21640339...000100A8       33     825.0 MB   U GRD
-    211  00  YUQS82   0                        EOM
-  00001C02  0760FF80  21640320...000100A8       33     800.0 MB   U GRD
-    211  00  YUQS80   0                        EOM
-  00001C02  0760FF80  21640307...000100A8       33     775.0 MB   U GRD
-    211  00  YUQS77   0                        EOM
-  00001C02  0760FF80  216402EE...000100A8       33     750.0 MB   U GRD
-    211  00  YUQS75   0                        EOM
-  00001C02  0760FF80  216402D5...000100A8       33     725.0 MB   U GRD
-    211  00  YUQS72   0                        EOM
-  00001C02  0760FF80  216402BC...000100A8       33     700.0 MB   U GRD
-    211  00  YUQS70   0                        EOM
-  00001C02  0760FF80  216402A3...000100A8       33     675.0 MB   U GRD
-    211  00  YUQS67   0                        EOM
-  00001C02  0760FF80  2164028A...000100A8       33     650.0 MB   U GRD
-    211  00  YUQS65   0                        EOM
-  00001C02  0760FF80  21640271...000100A8       33     625.0 MB   U GRD
-    211  00  YUQS62   0                        EOM
-  00001C02  0760FF80  21640258...000100A8       33     600.0 MB   U GRD
-    211  00  YUQS60   0                        EOM
-  00001C02  0760FF80  2164023F...000100A8       33     575.0 MB   U GRD
-    211  00  YUQS57   0                        EOM
-  00001C02  0760FF80  21640226...000100A8       33     550.0 MB   U GRD
-    211  00  YUQS55   0                        EOM
-  00001C02  0760FF80  2164020D...000100A8       33     525.0 MB   U GRD
-    211  00  YUQS52   0                        EOM
-  00001C02  0760FF80  216401F4...000100A8       33     500.0 MB   U GRD
-    211  00  YUQS50   0                        EOM
-  00001C02  0760FF80  216401C2...000100A8       33     450.0 MB   U GRD
-    211  00  YUQS45   0                        EOM
-  00001C02  0760FF80  21640190...000100A8       33     400.0 MB   U GRD
-    211  00  YUQS40   0                        EOM
-  00001C02  0760FF80  2164015E...000100A8       33     350.0 MB   U GRD
-    211  00  YUQS35   0                        EOM
-  00001C02  0760FF80  2164012C...000100A8       33     300.0 MB   U GRD
-    211  00  YUQS30   0                        EOM
-  00001C02  0760FF80  216400FA...000100A8       33     250.0 MB   U GRD
-    211  00  YUQS25   0                        EOM
-  00001C02  0760FF80  216400C8...000100A8       33     200.0 MB   U GRD
-    211  00  YUQS20   0                        EOM
-  00001C02  0760FF80  21640096...000100A8       33     150.0 MB   U GRD
-    211  00  YUQS15   0                        EOM
-  00001C02  0760FF80  21640064...000100A8       33     100.0 MB   U GRD
-    211  00  YUQS10   0                        EOM
-  00001C02  0760FF80  226403E8...000100A8       34    1000.0 MB   V GRD
-    211  00  YVQS99   0                        EOM
-  00001C02  0760FF80  226403CF...000100A8       34     975.0 MB   V GRD
-    211  00  YVQS93   0                        EOM
-  00001C02  0760FF80  226403B6...000100A8       34     950.0 MB   V GRD
-    211  00  YVQS95   0                        EOM
-  00001C02  0760FF80  2264039D...000100A8       34     925.0 MB   V GRD
-    211  00  YVQS92   0                        EOM
-  00001C02  0760FF80  22640384...000100A8       34     900.0 MB   V GRD
-    211  00  YVQS90   0                        EOM
-  00001C02  0760FF80  2264036B...000100A8       34     875.0 MB   V GRD
-    211  00  YVQS91   0                        EOM
-  00001C02  0760FF80  22640352...000100A8       34     850.0 MB   V GRD
-    211  00  YVQS85   0                        EOM
-  00001C02  0760FF80  22640339...000100A8       34     825.0 MB   V GRD
-    211  00  YVQS82   0                        EOM
-  00001C02  0760FF80  22640320...000100A8       34     800.0 MB   V GRD
-    211  00  YVQS80   0                        EOM
-  00001C02  0760FF80  22640307...000100A8       34     775.0 MB   V GRD
-    211  00  YVQS77   0                        EOM
-  00001C02  0760FF80  226402EE...000100A8       34     750.0 MB   V GRD
-    211  00  YVQS75   0                        EOM
-  00001C02  0760FF80  226402D5...000100A8       34     725.0 MB   V GRD
-    211  00  YVQS72   0                        EOM
-  00001C02  0760FF80  226402BC...000100A8       34     700.0 MB   V GRD
-    211  00  YVQS70   0                        EOM
-  00001C02  0760FF80  226402A3...000100A8       34     675.0 MB   V GRD
-    211  00  YVQS67   0                        EOM
-  00001C02  0760FF80  2264028A...000100A8       34     650.0 MB   V GRD
-    211  00  YVQS65   0                        EOM
-  00001C02  0760FF80  22640271...000100A8       34     625.0 MB   V GRD
-    211  00  YVQS62   0                        EOM
-  00001C02  0760FF80  22640258...000100A8       34     600.0 MB   V GRD
-    211  00  YVQS60   0                        EOM
-  00001C02  0760FF80  2264023F...000100A8       34     575.0 MB   V GRD
-    211  00  YVQS57   0                        EOM
-  00001C02  0760FF80  22640226...000100A8       34     550.0 MB   V GRD
-    211  00  YVQS55   0                        EOM
-  00001C02  0760FF80  2264020D...000100A8       34     525.0 MB   V GRD
-    211  00  YVQS52   0                        EOM
-  00001C02  0760FF80  226401F4...000100A8       34     500.0 MB   V GRD
-    211  00  YVQS50   0                        EOM
-  00001C02  0760FF80  226401C2...000100A8       34     450.0 MB   V GRD
-    211  00  YVQS45   0                        EOM
-  00001C02  0760FF80  22640190...000100A8       34     400.0 MB   V GRD
-    211  00  YVQS40   0                        EOM
-  00001C02  0760FF80  2264015E...000100A8       34     350.0 MB   V GRD
-    211  00  YVQS35   0                        EOM
-  00001C02  0760FF80  2264012C...000100A8       34     300.0 MB   V GRD
-    211  00  YVQS30   0                        EOM
-  00001C02  0760FF80  226400FA...000100A8       34     250.0 MB   V GRD
-    211  00  YVQS25   0                        EOM
-  00001C02  0760FF80  226400C8...000100A8       34     200.0 MB   V GRD
-    211  00  YVQS20   0                        EOM
-  00001C02  0760FF80  22640096...000100A8       34     150.0 MB   V GRD
-    211  00  YVQS15   0                        EOM
-  00001C02  0760FF80  22640064...000100A8       34     100.0 MB   V GRD
-    211  00  YVQS10   0                        EOM
-  00001C02  0760FF80  02660000...000100A8       02           MSL  PRMSL
-    211  00  YPQS89   0                        EOM
-  00001C02  0760FF80  3D010000...000100A8       61           SFC  A PCP
-    211  00  YEQS98   0                        EOM
-  00001C02  0760FF80  346403E8...000100A8       52    1000.0 MB   R H
-    211  00  YRQS99   0                        EOM
-  00001C02  0760FF80  346403CF...000100A8       52     975.0 MB   R H
-    211  00  YRQS93   0                        EOM
-  00001C02  0760FF80  346403B6...000100A8       52     950.0 MB   R H
-    211  00  YRQS95   0                        EOM
-  00001C02  0760FF80  3464039D...000100A8       52     925.0 MB   R H
-    211  00  YRQS92   0                        EOM
-  00001C02  0760FF80  34640384...000100A8       52     900.0 MB   R H
-    211  00  YRQS90   0                        EOM
-  00001C02  0760FF80  3464036B...000100A8       52     875.0 MB   R H
-    211  00  YRQS91   0                        EOM
-  00001C02  0760FF80  34640352...000100A8       52     850.0 MB   R H
-    211  00  YRQS85   0                        EOM
-  00001C02  0760FF80  34640339...000100A8       52     825.0 MB   R H
-    211  00  YRQS82   0                        EOM
-  00001C02  0760FF80  34640320...000100A8       52     800.0 MB   R H
-    211  00  YRQS80   0                        EOM
-  00001C02  0760FF80  34640307...000100A8       52     775.0 MB   R H
-    211  00  YRQS77   0                        EOM
-  00001C02  0760FF80  346402EE...000100A8       52     750.0 MB   R H
-    211  00  YRQS75   0                        EOM
-  00001C02  0760FF80  346402D5...000100A8       52     725.0 MB   R H
-    211  00  YRQS72   0                        EOM
-  00001C02  0760FF80  346402BC...000100A8       52     700.0 MB   R H
-    211  00  YRQS70   0                        EOM
-  00001C02  0760FF80  346402A3...000100A8       52     675.0 MB   R H
-    211  00  YRQS67   0                        EOM
-  00001C02  0760FF80  3464028A...000100A8       52     650.0 MB   R H
-    211  00  YRQS65   0                        EOM
-  00001C02  0760FF80  34640271...000100A8       52     625.0 MB   R H
-    211  00  YRQS62   0                        EOM
-  00001C02  0760FF80  34640258...000100A8       52     600.0 MB   R H
-    211  00  YRQS60   0                        EOM
-  00001C02  0760FF80  3464023F...000100A8       52     575.0 MB   R H
-    211  00  YRQS57   0                        EOM
-  00001C02  0760FF80  34640226...000100A8       52     550.0 MB   R H
-    211  00  YRQS55   0                        EOM
-  00001C02  0760FF80  3464020D...000100A8       52     525.0 MB   R H
-    211  00  YRQS52   0                        EOM
-  00001C02  0760FF80  346401F4...000100A8       52     500.0 MB   R H
-    211  00  YRQS50   0                        EOM
-  00001C02  0760FF80  346401C2...000100A8       52     450.0 MB   R H
-    211  00  YRQS45   0                        EOM
-  00001C02  0760FF80  34640190...000100A8       52     400.0 MB   R H
-    211  00  YRQS40   0                        EOM
-  00001C02  0760FF80  3464015E...000100A8       52     350.0 MB   R H
-    211  00  YRQS35   0                        EOM
-  00001C02  0760FF80  3464012C...000100A8       52     300.0 MB   R H
-    211  00  YRQS30   0                        EOM
-  00001C02  0760FF80  346400FA...000100A8       52     250.0 MB   R H
-    211  00  YRQS25   0                        EOM
-  00001C02  0760FF80  346400C8...000100A8       52     200.0 MB   R H
-    211  00  YRQS20   0                        EOM
-  00001C02  0760FF80  34640096...000100A8       52     150.0 MB   R H
-    211  00  YRQS15   0                        EOM
-  00001C02  0760FF80  34640064...000100A8       52     100.0 MB   R H
-    211  00  YRQS10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100A8       11    1000.0 MB   TMP
-    211  00  YTQS99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100A8       11     975.0 MB   TMP
-    211  00  YTQS93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100A8       11     950.0 MB   TMP
-    211  00  YTQS95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100A8       11     925.0 MB   TMP
-    211  00  YTQS92   0                        EOM
-  00001C02  0760FF80  0B640384...000100A8       11     900.0 MB   TMP
-    211  00  YTQS90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100A8       11     875.0 MB   TMP
-    211  00  YTQS91   0                        EOM
-  00001C02  0760FF80  0B640352...000100A8       11     850.0 MB   TMP
-    211  00  YTQS85   0                        EOM
-  00001C02  0760FF80  0B640339...000100A8       11     825.0 MB   TMP
-    211  00  YTQS82   0                        EOM
-  00001C02  0760FF80  0B640320...000100A8       11     800.0 MB   TMP
-    211  00  YTQS80   0                        EOM
-  00001C02  0760FF80  0B640307...000100A8       11     775.0 MB   TMP
-    211  00  YTQS77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100A8       11     750.0 MB   TMP
-    211  00  YTQS75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100A8       11     725.0 MB   TMP
-    211  00  YTQS72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100A8       11     700.0 MB   TMP
-    211  00  YTQS70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100A8       11     675.0 MB   TMP
-    211  00  YTQS67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100A8       11     650.0 MB   TMP
-    211  00  YTQS65   0                        EOM
-  00001C02  0760FF80  0B640271...000100A8       11     625.0 MB   TMP
-    211  00  YTQS62   0                        EOM
-  00001C02  0760FF80  0B640258...000100A8       11     600.0 MB   TMP
-    211  00  YTQS60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100A8       11     575.0 MB   TMP
-    211  00  YTQS57   0                        EOM
-  00001C02  0760FF80  0B640226...000100A8       11     550.0 MB   TMP
-    211  00  YTQS55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100A8       11     525.0 MB   TMP
-    211  00  YTQS52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100A8       11     500.0 MB   TMP
-    211  00  YTQS50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100A8       11     450.0 MB   TMP
-    211  00  YTQS45   0                        EOM
-  00001C02  0760FF80  0B640190...000100A8       11     400.0 MB   TMP
-    211  00  YTQS40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100A8       11     350.0 MB   TMP
-    211  00  YTQS35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100A8       11     300.0 MB   TMP
-    211  00  YTQS30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100A8       11     250.0 MB   TMP
-    211  00  YTQS25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100A8       11     200.0 MB   TMP
-    211  00  YTQS20   0                        EOM
-  00001C02  0760FF80  0B640096...000100A8       11     150.0 MB   TMP
-    211  00  YTQS15   0                        EOM
-  00001C02  0760FF80  0B640064...000100A8       11     100.0 MB   TMP
-    211  00  YTQS10   0                        EOM
-  00001C02  0760FF80  28640352...000100A8       40     850.0 MB  DZDT
-    211  00  YOQS85   0                        EOM
-  00001C02  0760FF80  286402BC...000100A8       40     700.0 MB  DZDT
-    211  00  YOQS70   0                        EOM
-  00001C02  0760FF80  286401F4...000100A8       40     500.0 MB  DZDT
-    211  00  YOQS50   0                        EOM
-  00001C02  0760FF80  01010000...000100A8       01          SFC  PRES
-    211  00  YPQS98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100A8       52        44/100  R H
-    211  00  YRQS00   0                        EOM
-  00001C02  0760FF80  296401F4...000100A8       41     500.0 MB ABS V
-    211  00  YCQS50   0                        EOM 
-  00001C02  0760FF80  9D010000...000100A8      157          SFC   CAPE
-    211  00  YWQS98   0                        EOM
-  00001C02  0760FF80  9C010000...000100A8      156          SFC   CIN
-    211  00  YYQS98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100A8      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQS86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100A8      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQS86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100A8       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100A8       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100A8       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100A8       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  0B749678...000100A8       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100A8       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQS86   0                        EOM
-  00001C02  0760FF80  34741E00...000100A8       52   30 SPDY   0 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100A8       52   60 SPDY  30 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100A8       52   90 SPDY  60 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  3474785A...000100A8       52  120 SPDY  90 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  34749678...000100A8       52  150 SPDY 120 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  3474B496...000100A8       52  180 SPDY 150 SPDY  R H
-    211  00  YRQS86   0                        EOM
-  00001C02  0760FF80  21741E00...000100A8       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100A8       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100A8       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  2174785A...000100A8       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  21749678...000100A8       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  2174B496...000100A8       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQS86   0                        EOM
-  00001C02  0760FF80  22741E00...000100A8       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100A8       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100A8       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  2274785A...000100A8       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  22749678...000100A8       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  2274B496...000100A8       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQS86   0                        EOM
-  00001C02  0760FF80  0B690002...000100A8       11    2  HTGL     TMP
-    211  00  YTQS98   0                        EOM
-  00001C02  0760FF80  34690002...000100A8       52    2  HTGL     R H
-    211  00  YRQS98   0                        EOM
-  00001C02  0760FF80  2169000A...000100A8       33   10  HTGL     U GRD
-    211  00  YUQS98   0                        EOM
-  00001C02  0760FF80  2269000A...000100A8       34   10  HTGL     V GRD
-    211  00  YVQS98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs174.211 b/parm/wmo/grib_awpgfs174.211
deleted file mode 100755
index f60ebbf8de..0000000000
--- a/parm/wmo/grib_awpgfs174.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100AE       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100AE       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100AE       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100AE       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100AE       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100AE       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100AE       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100AE       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100AE       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100AE       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100AE       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100AE       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100AE       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100AE       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100AE       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100AE       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100AE       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100AE       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100AE       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100AE       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100AE       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...000100AE       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...000100AE       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...000100AE       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100AE       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100AE       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100AE       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100AE       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100AE       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100AE       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100AE       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100AE       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100AE       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100AE       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100AE       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100AE       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100AE       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100AE       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100AE       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100AE       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100AE       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100AE       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100AE       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100AE       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100AE       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100AE       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100AE       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100AE       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100AE       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100AE       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100AE       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100AE       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100AE       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100AE       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100AE       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100AE       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100AE       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100AE       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100AE       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100AE       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100AE       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100AE       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100AE       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100AE       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100AE       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100AE       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100AE       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100AE       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100AE       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100AE       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100AE       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100AE       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100AE       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100AE       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100AE       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100AE       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100AE       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100AE       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100AE       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100AE       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100AE       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100AE       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100AE       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100AE       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100AE       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100AE       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100AE       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100AE       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100AE       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100AE       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100AE       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100AE       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100AE       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100AE       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100AE       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100AE       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100AE       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100AE       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100AE       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100AE       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100AE       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100AE       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100AE       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100AE       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100AE       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100AE       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100AE       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100AE       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100AE       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100AE       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100AE       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100AE       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100AE       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100AE       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100AE       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100AE       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100AE       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100AE       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100AE       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100AE       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100AE       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100AE       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100AE       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100AE       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100AE       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100AE       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100AE       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100AE       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100AE       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100AE       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100AE       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100AE       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100AE       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100AE       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100AE       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100AE       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100AE       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100AE       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100AE       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100AE       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100AE       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100AE       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100AE       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100AE       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100AE       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100AE       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100AE       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100AE       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100AE       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100AE       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100AE       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100AE       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100AE       41    500.0 MB    ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100AE      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100AE      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100AE      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100AE      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100AE       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100AE       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100AE       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100AE       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100AE       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100AE       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100AE       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100AE       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100AE       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100AE       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100AE       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100AE       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100AE       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100AE       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100AE       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100AE       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100AE       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100AE       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100AE       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100AE       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100AE       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100AE       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100AE       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100AE       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100AE       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100AE       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100AE       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100AE       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs180.211 b/parm/wmo/grib_awpgfs180.211
deleted file mode 100755
index 715cc8e13b..0000000000
--- a/parm/wmo/grib_awpgfs180.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100B4       07    1000.0 MB   HGT
-    211  00  YHQT99   0                        EOM
-  00001C02  0760FF80  076403CF...000100B4       07     975.0 MB   HGT
-    211  00  YHQT93   0                        EOM
-  00001C02  0760FF80  076403B6...000100B4       07     950.0 MB   HGT
-    211  00  YHQT95   0                        EOM
-  00001C02  0760FF80  0764039D...000100B4       07     925.0 MB   HGT
-    211  00  YHQT92   0                        EOM
-  00001C02  0760FF80  07640384...000100B4       07     900.0 MB   HGT
-    211  00  YHQT90   0                        EOM
-  00001C02  0760FF80  0764036B...000100B4       07     875.0 MB   HGT
-    211  00  YHQT91   0                        EOM
-  00001C02  0760FF80  07640352...000100B4       07     850.0 MB   HGT
-    211  00  YHQT85   0                        EOM
-  00001C02  0760FF80  07640339...000100B4       07     825.0 MB   HGT
-    211  00  YHQT82   0                        EOM
-  00001C02  0760FF80  07640320...000100B4       07     800.0 MB   HGT
-    211  00  YHQT80   0                        EOM
-  00001C02  0760FF80  07640307...000100B4       07     775.0 MB   HGT
-    211  00  YHQT77   0                        EOM
-  00001C02  0760FF80  076402EE...000100B4       07     750.0 MB   HGT
-    211  00  YHQT75   0                        EOM
-  00001C02  0760FF80  076402D5...000100B4       07     725.0 MB   HGT
-    211  00  YHQT72   0                        EOM
-  00001C02  0760FF80  076402BC...000100B4       07     700.0 MB   HGT
-    211  00  YHQT70   0                        EOM
-  00001C02  0760FF80  076402A3...000100B4       07     675.0 MB   HGT
-    211  00  YHQT67   0                        EOM
-  00001C02  0760FF80  0764028A...000100B4       07     650.0 MB   HGT
-    211  00  YHQT65   0                        EOM
-  00001C02  0760FF80  07640271...000100B4       07     625.0 MB   HGT
-    211  00  YHQT62   0                        EOM
-  00001C02  0760FF80  07640258...000100B4       07     600.0 MB   HGT
-    211  00  YHQT60   0                        EOM
-  00001C02  0760FF80  0764023F...000100B4       07     575.0 MB   HGT
-    211  00  YHQT57   0                        EOM
-  00001C02  0760FF80  07640226...000100B4       07     550.0 MB   HGT
-    211  00  YHQT55   0                        EOM
-  00001C02  0760FF80  0764020D...000100B4       07     525.0 MB   HGT
-    211  00  YHQT52   0                        EOM
-  00001C02  0760FF80  076401F4...000100B4       07     500.0 MB   HGT
-    211  00  YHQT50   0                        EOM
-  00001C02  0760FF80  07640190...000100B4       07     400.0 MB   HGT
-    211  00  YHQT40   0                        EOM
-  00001C02  0760FF80  076401C2...000100B4       07     450.0 MB   HGT
-    211  00  YHQT45   0                        EOM
-  00001C02  0760FF80  0764015E...000100B4       07     350.0 MB   HGT
-    211  00  YHQT35   0                        EOM
-  00001C02  0760FF80  0764012C...000100B4       07     300.0 MB   HGT
-    211  00  YHQT30   0                        EOM
-  00001C02  0760FF80  076400FA...000100B4       07     250.0 MB   HGT
-    211  00  YHQT25   0                        EOM
-  00001C02  0760FF80  076400C8...000100B4       07     200.0 MB   HGT
-    211  00  YHQT20   0                        EOM
-  00001C02  0760FF80  07640096...000100B4       07     150.0 MB   HGT
-    211  00  YHQT15   0                        EOM
-  00001C02  0760FF80  07640064...000100B4       07     100.0 MB   HGT
-    211  00  YHQT10   0                        EOM
-  00001C02  0760FF80  216403E8...000100B4       33    1000.0 MB   U GRD 
-    211  00  YUQT99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100B4       33     975.0 MB   U GRD
-    211  00  YUQT93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100B4       33     950.0 MB   U GRD
-    211  00  YUQT95   0                        EOM
-  00001C02  0760FF80  2164039D...000100B4       33     925.0 MB   U GRD
-    211  00  YUQT92   0                        EOM
-  00001C02  0760FF80  21640384...000100B4       33     900.0 MB   U GRD
-    211  00  YUQT90   0                        EOM
-  00001C02  0760FF80  2164036B...000100B4       33     875.0 MB   U GRD
-    211  00  YUQT91   0                        EOM
-  00001C02  0760FF80  21640352...000100B4       33     850.0 MB   U GRD
-    211  00  YUQT85   0                        EOM
-  00001C02  0760FF80  21640339...000100B4       33     825.0 MB   U GRD
-    211  00  YUQT82   0                        EOM
-  00001C02  0760FF80  21640320...000100B4       33     800.0 MB   U GRD
-    211  00  YUQT80   0                        EOM
-  00001C02  0760FF80  21640307...000100B4       33     775.0 MB   U GRD
-    211  00  YUQT77   0                        EOM
-  00001C02  0760FF80  216402EE...000100B4       33     750.0 MB   U GRD
-    211  00  YUQT75   0                        EOM
-  00001C02  0760FF80  216402D5...000100B4       33     725.0 MB   U GRD
-    211  00  YUQT72   0                        EOM
-  00001C02  0760FF80  216402BC...000100B4       33     700.0 MB   U GRD
-    211  00  YUQT70   0                        EOM
-  00001C02  0760FF80  216402A3...000100B4       33     675.0 MB   U GRD
-    211  00  YUQT67   0                        EOM
-  00001C02  0760FF80  2164028A...000100B4       33     650.0 MB   U GRD
-    211  00  YUQT65   0                        EOM
-  00001C02  0760FF80  21640271...000100B4       33     625.0 MB   U GRD
-    211  00  YUQT62   0                        EOM
-  00001C02  0760FF80  21640258...000100B4       33     600.0 MB   U GRD
-    211  00  YUQT60   0                        EOM
-  00001C02  0760FF80  2164023F...000100B4       33     575.0 MB   U GRD
-    211  00  YUQT57   0                        EOM
-  00001C02  0760FF80  21640226...000100B4       33     550.0 MB   U GRD
-    211  00  YUQT55   0                        EOM
-  00001C02  0760FF80  2164020D...000100B4       33     525.0 MB   U GRD
-    211  00  YUQT52   0                        EOM
-  00001C02  0760FF80  216401F4...000100B4       33     500.0 MB   U GRD
-    211  00  YUQT50   0                        EOM
-  00001C02  0760FF80  216401C2...000100B4       33     450.0 MB   U GRD
-    211  00  YUQT45   0                        EOM
-  00001C02  0760FF80  21640190...000100B4       33     400.0 MB   U GRD
-    211  00  YUQT40   0                        EOM
-  00001C02  0760FF80  2164015E...000100B4       33     350.0 MB   U GRD
-    211  00  YUQT35   0                        EOM
-  00001C02  0760FF80  2164012C...000100B4       33     300.0 MB   U GRD
-    211  00  YUQT30   0                        EOM
-  00001C02  0760FF80  216400FA...000100B4       33     250.0 MB   U GRD
-    211  00  YUQT25   0                        EOM
-  00001C02  0760FF80  216400C8...000100B4       33     200.0 MB   U GRD
-    211  00  YUQT20   0                        EOM
-  00001C02  0760FF80  21640096...000100B4       33     150.0 MB   U GRD
-    211  00  YUQT15   0                        EOM
-  00001C02  0760FF80  21640064...000100B4       33     100.0 MB   U GRD
-    211  00  YUQT10   0                        EOM
-  00001C02  0760FF80  226403E8...000100B4       34    1000.0 MB   V GRD
-    211  00  YVQT99   0                        EOM
-  00001C02  0760FF80  226403CF...000100B4       34     975.0 MB   V GRD
-    211  00  YVQT93   0                        EOM
-  00001C02  0760FF80  226403B6...000100B4       34     950.0 MB   V GRD
-    211  00  YVQT95   0                        EOM
-  00001C02  0760FF80  2264039D...000100B4       34     925.0 MB   V GRD
-    211  00  YVQT92   0                        EOM
-  00001C02  0760FF80  22640384...000100B4       34     900.0 MB   V GRD
-    211  00  YVQT90   0                        EOM
-  00001C02  0760FF80  2264036B...000100B4       34     875.0 MB   V GRD
-    211  00  YVQT91   0                        EOM
-  00001C02  0760FF80  22640352...000100B4       34     850.0 MB   V GRD
-    211  00  YVQT85   0                        EOM
-  00001C02  0760FF80  22640339...000100B4       34     825.0 MB   V GRD
-    211  00  YVQT82   0                        EOM
-  00001C02  0760FF80  22640320...000100B4       34     800.0 MB   V GRD
-    211  00  YVQT80   0                        EOM
-  00001C02  0760FF80  22640307...000100B4       34     775.0 MB   V GRD
-    211  00  YVQT77   0                        EOM
-  00001C02  0760FF80  226402EE...000100B4       34     750.0 MB   V GRD
-    211  00  YVQT75   0                        EOM
-  00001C02  0760FF80  226402D5...000100B4       34     725.0 MB   V GRD
-    211  00  YVQT72   0                        EOM
-  00001C02  0760FF80  226402BC...000100B4       34     700.0 MB   V GRD
-    211  00  YVQT70   0                        EOM
-  00001C02  0760FF80  226402A3...000100B4       34     675.0 MB   V GRD
-    211  00  YVQT67   0                        EOM
-  00001C02  0760FF80  2264028A...000100B4       34     650.0 MB   V GRD
-    211  00  YVQT65   0                        EOM
-  00001C02  0760FF80  22640271...000100B4       34     625.0 MB   V GRD
-    211  00  YVQT62   0                        EOM
-  00001C02  0760FF80  22640258...000100B4       34     600.0 MB   V GRD
-    211  00  YVQT60   0                        EOM
-  00001C02  0760FF80  2264023F...000100B4       34     575.0 MB   V GRD
-    211  00  YVQT57   0                        EOM
-  00001C02  0760FF80  22640226...000100B4       34     550.0 MB   V GRD
-    211  00  YVQT55   0                        EOM
-  00001C02  0760FF80  2264020D...000100B4       34     525.0 MB   V GRD
-    211  00  YVQT52   0                        EOM
-  00001C02  0760FF80  226401F4...000100B4       34     500.0 MB   V GRD
-    211  00  YVQT50   0                        EOM
-  00001C02  0760FF80  226401C2...000100B4       34     450.0 MB   V GRD
-    211  00  YVQT45   0                        EOM
-  00001C02  0760FF80  22640190...000100B4       34     400.0 MB   V GRD
-    211  00  YVQT40   0                        EOM
-  00001C02  0760FF80  2264015E...000100B4       34     350.0 MB   V GRD
-    211  00  YVQT35   0                        EOM
-  00001C02  0760FF80  2264012C...000100B4       34     300.0 MB   V GRD
-    211  00  YVQT30   0                        EOM
-  00001C02  0760FF80  226400FA...000100B4       34     250.0 MB   V GRD
-    211  00  YVQT25   0                        EOM
-  00001C02  0760FF80  226400C8...000100B4       34     200.0 MB   V GRD
-    211  00  YVQT20   0                        EOM
-  00001C02  0760FF80  22640096...000100B4       34     150.0 MB   V GRD
-    211  00  YVQT15   0                        EOM
-  00001C02  0760FF80  22640064...000100B4       34     100.0 MB   V GRD
-    211  00  YVQT10   0                        EOM
-  00001C02  0760FF80  02660000...000100B4       02           MSL  PRMSL
-    211  00  YPQT89   0                        EOM
-  00001C02  0760FF80  3D010000...000100B4       61           SFC  A PCP
-    211  00  YEQT98   0                        EOM
-  00001C02  0760FF80  346403E8...000100B4       52    1000.0 MB   R H
-    211  00  YRQT99   0                        EOM
-  00001C02  0760FF80  346403CF...000100B4       52     975.0 MB   R H
-    211  00  YRQT93   0                        EOM
-  00001C02  0760FF80  346403B6...000100B4       52     950.0 MB   R H
-    211  00  YRQT95   0                        EOM
-  00001C02  0760FF80  3464039D...000100B4       52     925.0 MB   R H
-    211  00  YRQT92   0                        EOM
-  00001C02  0760FF80  34640384...000100B4       52     900.0 MB   R H
-    211  00  YRQT90   0                        EOM
-  00001C02  0760FF80  3464036B...000100B4       52     875.0 MB   R H
-    211  00  YRQT91   0                        EOM
-  00001C02  0760FF80  34640352...000100B4       52     850.0 MB   R H
-    211  00  YRQT85   0                        EOM
-  00001C02  0760FF80  34640339...000100B4       52     825.0 MB   R H
-    211  00  YRQT82   0                        EOM
-  00001C02  0760FF80  34640320...000100B4       52     800.0 MB   R H
-    211  00  YRQT80   0                        EOM
-  00001C02  0760FF80  34640307...000100B4       52     775.0 MB   R H
-    211  00  YRQT77   0                        EOM
-  00001C02  0760FF80  346402EE...000100B4       52     750.0 MB   R H
-    211  00  YRQT75   0                        EOM
-  00001C02  0760FF80  346402D5...000100B4       52     725.0 MB   R H
-    211  00  YRQT72   0                        EOM
-  00001C02  0760FF80  346402BC...000100B4       52     700.0 MB   R H
-    211  00  YRQT70   0                        EOM
-  00001C02  0760FF80  346402A3...000100B4       52     675.0 MB   R H
-    211  00  YRQT67   0                        EOM
-  00001C02  0760FF80  3464028A...000100B4       52     650.0 MB   R H
-    211  00  YRQT65   0                        EOM
-  00001C02  0760FF80  34640271...000100B4       52     625.0 MB   R H
-    211  00  YRQT62   0                        EOM
-  00001C02  0760FF80  34640258...000100B4       52     600.0 MB   R H
-    211  00  YRQT60   0                        EOM
-  00001C02  0760FF80  3464023F...000100B4       52     575.0 MB   R H
-    211  00  YRQT57   0                        EOM
-  00001C02  0760FF80  34640226...000100B4       52     550.0 MB   R H
-    211  00  YRQT55   0                        EOM
-  00001C02  0760FF80  3464020D...000100B4       52     525.0 MB   R H
-    211  00  YRQT52   0                        EOM
-  00001C02  0760FF80  346401F4...000100B4       52     500.0 MB   R H
-    211  00  YRQT50   0                        EOM
-  00001C02  0760FF80  346401C2...000100B4       52     450.0 MB   R H
-    211  00  YRQT45   0                        EOM
-  00001C02  0760FF80  34640190...000100B4       52     400.0 MB   R H
-    211  00  YRQT40   0                        EOM
-  00001C02  0760FF80  3464015E...000100B4       52     350.0 MB   R H
-    211  00  YRQT35   0                        EOM
-  00001C02  0760FF80  3464012C...000100B4       52     300.0 MB   R H
-    211  00  YRQT30   0                        EOM
-  00001C02  0760FF80  346400FA...000100B4       52     250.0 MB   R H
-    211  00  YRQT25   0                        EOM
-  00001C02  0760FF80  346400C8...000100B4       52     200.0 MB   R H
-    211  00  YRQT20   0                        EOM
-  00001C02  0760FF80  34640096...000100B4       52     150.0 MB   R H
-    211  00  YRQT15   0                        EOM
-  00001C02  0760FF80  34640064...000100B4       52     100.0 MB   R H
-    211  00  YRQT10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100B4       11    1000.0 MB   TMP
-    211  00  YTQT99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100B4       11     975.0 MB   TMP
-    211  00  YTQT93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100B4       11     950.0 MB   TMP
-    211  00  YTQT95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100B4       11     925.0 MB   TMP
-    211  00  YTQT92   0                        EOM
-  00001C02  0760FF80  0B640384...000100B4       11     900.0 MB   TMP
-    211  00  YTQT90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100B4       11     875.0 MB   TMP
-    211  00  YTQT91   0                        EOM
-  00001C02  0760FF80  0B640352...000100B4       11     850.0 MB   TMP
-    211  00  YTQT85   0                        EOM
-  00001C02  0760FF80  0B640339...000100B4       11     825.0 MB   TMP
-    211  00  YTQT82   0                        EOM
-  00001C02  0760FF80  0B640320...000100B4       11     800.0 MB   TMP
-    211  00  YTQT80   0                        EOM
-  00001C02  0760FF80  0B640307...000100B4       11     775.0 MB   TMP
-    211  00  YTQT77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100B4       11     750.0 MB   TMP
-    211  00  YTQT75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100B4       11     725.0 MB   TMP
-    211  00  YTQT72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100B4       11     700.0 MB   TMP
-    211  00  YTQT70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100B4       11     675.0 MB   TMP
-    211  00  YTQT67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100B4       11     650.0 MB   TMP
-    211  00  YTQT65   0                        EOM
-  00001C02  0760FF80  0B640271...000100B4       11     625.0 MB   TMP
-    211  00  YTQT62   0                        EOM
-  00001C02  0760FF80  0B640258...000100B4       11     600.0 MB   TMP
-    211  00  YTQT60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100B4       11     575.0 MB   TMP
-    211  00  YTQT57   0                        EOM
-  00001C02  0760FF80  0B640226...000100B4       11     550.0 MB   TMP
-    211  00  YTQT55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100B4       11     525.0 MB   TMP
-    211  00  YTQT52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100B4       11     500.0 MB   TMP
-    211  00  YTQT50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100B4       11     450.0 MB   TMP
-    211  00  YTQT45   0                        EOM
-  00001C02  0760FF80  0B640190...000100B4       11     400.0 MB   TMP
-    211  00  YTQT40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100B4       11     350.0 MB   TMP
-    211  00  YTQT35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100B4       11     300.0 MB   TMP
-    211  00  YTQT30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100B4       11     250.0 MB   TMP
-    211  00  YTQT25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100B4       11     200.0 MB   TMP
-    211  00  YTQT20   0                        EOM
-  00001C02  0760FF80  0B640096...000100B4       11     150.0 MB   TMP
-    211  00  YTQT15   0                        EOM
-  00001C02  0760FF80  0B640064...000100B4       11     100.0 MB   TMP
-    211  00  YTQT10   0                        EOM
-  00001C02  0760FF80  28640352...000100B4       40     850.0 MB  DZDT
-    211  00  YOQT85   0                        EOM
-  00001C02  0760FF80  286402BC...000100B4       40     700.0 MB  DZDT
-    211  00  YOQT70   0                        EOM
-  00001C02  0760FF80  286401F4...000100B4       40     500.0 MB  DZDT
-    211  00  YOQT50   0                        EOM
-  00001C02  0760FF80  01010000...000100B4       01          SFC  PRES
-    211  00  YPQT98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100B4       52        44/100  R H
-    211  00  YRQT00   0                        EOM
-  00001C02  0760FF80  296401F4...000100B4       41     500.0 MB ABS V
-    211  00  YCQT50   0                        EOM
-  00001C02  0760FF80  9D010000...000100B4      157          SFC   CAPE
-    211  00  YWQT98   0                        EOM
-  00001C02  0760FF80  9C010000...000100B4      156          SFC   CIN
-    211  00  YYQT98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100B4      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQT86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100B4      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQT86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100B4       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100B4       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100B4       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100B4       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  0B749678...000100B4       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100B4       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQT86   0                        EOM
-  00001C02  0760FF80  34741E00...000100B4       52   30 SPDY   0 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100B4       52   60 SPDY  30 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100B4       52   90 SPDY  60 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  3474785A...000100B4       52  120 SPDY  90 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  34749678...000100B4       52  150 SPDY 120 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  3474B496...000100B4       52  180 SPDY 150 SPDY  R H
-    211  00  YRQT86   0                        EOM
-  00001C02  0760FF80  21741E00...000100B4       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100B4       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100B4       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  2174785A...000100B4       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  21749678...000100B4       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  2174B496...000100B4       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQT86   0                        EOM
-  00001C02  0760FF80  22741E00...000100B4       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100B4       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100B4       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  2274785A...000100B4       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  22749678...000100B4       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  2274B496...000100B4       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQT86   0                        EOM
-  00001C02  0760FF80  0B690002...000100B4       11    2  HTGL     TMP
-    211  00  YTQT98   0                        EOM
-  00001C02  0760FF80  34690002...000100B4       52    2  HTGL     R H
-    211  00  YRQT98   0                        EOM
-  00001C02  0760FF80  2169000A...000100B4       33   10  HTGL     U GRD
-    211  00  YUQT98   0                        EOM
-  00001C02  0760FF80  2269000A...000100B4       34   10  HTGL     V GRD
-    211  00  YVQT98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs186.211 b/parm/wmo/grib_awpgfs186.211
deleted file mode 100755
index edf45d28f6..0000000000
--- a/parm/wmo/grib_awpgfs186.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100BA       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100BA       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100BA       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100BA       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100BA       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100BA       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100BA       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100BA       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100BA       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100BA       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100BA       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100BA       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100BA       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100BA       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100BA       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100BA       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100BA       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100BA       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100BA       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100BA       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100BA       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  07640190...000100BA       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  076401C2...000100BA       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  0764015E...000100BA       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100BA       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100BA       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100BA       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100BA       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100BA       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100BA       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100BA       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100BA       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100BA       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100BA       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100BA       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100BA       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100BA       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100BA       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100BA       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100BA       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100BA       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100BA       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100BA       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100BA       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100BA       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100BA       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100BA       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100BA       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100BA       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100BA       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100BA       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100BA       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100BA       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100BA       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100BA       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100BA       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100BA       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100BA       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100BA       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100BA       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100BA       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100BA       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100BA       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100BA       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100BA       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100BA       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100BA       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100BA       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100BA       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100BA       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100BA       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100BA       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100BA       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100BA       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100BA       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100BA       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100BA       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100BA       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100BA       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100BA       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100BA       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100BA       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100BA       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100BA       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100BA       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100BA       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100BA       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100BA       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100BA       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100BA       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100BA       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100BA       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100BA       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100BA       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100BA       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100BA       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100BA       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100BA       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100BA       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100BA       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100BA       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100BA       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100BA       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100BA       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100BA       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100BA       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100BA       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100BA       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100BA       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100BA       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100BA       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100BA       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100BA       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100BA       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100BA       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100BA       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100BA       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100BA       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100BA       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100BA       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100BA       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100BA       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100BA       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100BA       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100BA       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100BA       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100BA       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100BA       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100BA       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100BA       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100BA       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100BA       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100BA       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100BA       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100BA       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100BA       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100BA       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100BA       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100BA       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100BA       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100BA       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100BA       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100BA       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100BA       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100BA       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100BA       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100BA       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100BA       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100BA       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100BA       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100BA       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100BA       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100BA       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100BA      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100BA      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100BA      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100BA      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100BA       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100BA       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100BA       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100BA       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100BA       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100BA       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100BA       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100BA       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100BA       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100BA       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100BA       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100BA       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100BA       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100BA       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100BA       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100BA       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100BA       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100BA       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100BA       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100BA       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100BA       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100BA       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100BA       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100BA       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100BA       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100BA       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100BA       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100BA       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs192.211 b/parm/wmo/grib_awpgfs192.211
deleted file mode 100755
index 75883df3da..0000000000
--- a/parm/wmo/grib_awpgfs192.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100C0       07    1000.0 MB   HGT
-    211  00  YHQU99   0                        EOM
-  00001C02  0760FF80  076403CF...000100C0       07     975.0 MB   HGT
-    211  00  YHQU93   0                        EOM
-  00001C02  0760FF80  076403B6...000100C0       07     950.0 MB   HGT
-    211  00  YHQU95   0                        EOM
-  00001C02  0760FF80  0764039D...000100C0       07     925.0 MB   HGT
-    211  00  YHQU92   0                        EOM
-  00001C02  0760FF80  07640384...000100C0       07     900.0 MB   HGT
-    211  00  YHQU90   0                        EOM
-  00001C02  0760FF80  0764036B...000100C0       07     875.0 MB   HGT
-    211  00  YHQU91   0                        EOM
-  00001C02  0760FF80  07640352...000100C0       07     850.0 MB   HGT
-    211  00  YHQU85   0                        EOM
-  00001C02  0760FF80  07640339...000100C0       07     825.0 MB   HGT
-    211  00  YHQU82   0                        EOM
-  00001C02  0760FF80  07640320...000100C0       07     800.0 MB   HGT
-    211  00  YHQU80   0                        EOM
-  00001C02  0760FF80  07640307...000100C0       07     775.0 MB   HGT
-    211  00  YHQU77   0                        EOM
-  00001C02  0760FF80  076402EE...000100C0       07     750.0 MB   HGT
-    211  00  YHQU75   0                        EOM
-  00001C02  0760FF80  076402D5...000100C0       07     725.0 MB   HGT
-    211  00  YHQU72   0                        EOM
-  00001C02  0760FF80  076402BC...000100C0       07     700.0 MB   HGT
-    211  00  YHQU70   0                        EOM
-  00001C02  0760FF80  076402A3...000100C0       07     675.0 MB   HGT
-    211  00  YHQU67   0                        EOM
-  00001C02  0760FF80  0764028A...000100C0       07     650.0 MB   HGT
-    211  00  YHQU65   0                        EOM
-  00001C02  0760FF80  07640271...000100C0       07     625.0 MB   HGT
-    211  00  YHQU62   0                        EOM
-  00001C02  0760FF80  07640258...000100C0       07     600.0 MB   HGT
-    211  00  YHQU60   0                        EOM
-  00001C02  0760FF80  0764023F...000100C0       07     575.0 MB   HGT
-    211  00  YHQU57   0                        EOM
-  00001C02  0760FF80  07640226...000100C0       07     550.0 MB   HGT
-    211  00  YHQU55   0                        EOM
-  00001C02  0760FF80  0764020D...000100C0       07     525.0 MB   HGT
-    211  00  YHQU52   0                        EOM
-  00001C02  0760FF80  076401F4...000100C0       07     500.0 MB   HGT
-    211  00  YHQU50   0                        EOM
-  00001C02  0760FF80  076401C2...000100C0       07     450.0 MB   HGT
-    211  00  YHQU45   0                        EOM
-  00001C02  0760FF80  07640190...000100C0       07     400.0 MB   HGT
-    211  00  YHQU40   0                        EOM
-  00001C02  0760FF80  0764015E...000100C0       07     350.0 MB   HGT
-    211  00  YHQU35   0                        EOM
-  00001C02  0760FF80  0764012C...000100C0       07     300.0 MB   HGT
-    211  00  YHQU30   0                        EOM
-  00001C02  0760FF80  076400FA...000100C0       07     250.0 MB   HGT
-    211  00  YHQU25   0                        EOM
-  00001C02  0760FF80  076400C8...000100C0       07     200.0 MB   HGT
-    211  00  YHQU20   0                        EOM
-  00001C02  0760FF80  07640096...000100C0       07     150.0 MB   HGT
-    211  00  YHQU15   0                        EOM
-  00001C02  0760FF80  07640064...000100C0       07     100.0 MB   HGT
-    211  00  YHQU10   0                        EOM
-  00001C02  0760FF80  216403E8...000100C0       33    1000.0 MB   U GRD 
-    211  00  YUQU99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100C0       33     975.0 MB   U GRD
-    211  00  YUQU93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100C0       33     950.0 MB   U GRD
-    211  00  YUQU95   0                        EOM
-  00001C02  0760FF80  2164039D...000100C0       33     925.0 MB   U GRD
-    211  00  YUQU92   0                        EOM
-  00001C02  0760FF80  21640384...000100C0       33     900.0 MB   U GRD
-    211  00  YUQU90   0                        EOM
-  00001C02  0760FF80  2164036B...000100C0       33     875.0 MB   U GRD
-    211  00  YUQU91   0                        EOM
-  00001C02  0760FF80  21640352...000100C0       33     850.0 MB   U GRD
-    211  00  YUQU85   0                        EOM
-  00001C02  0760FF80  21640339...000100C0       33     825.0 MB   U GRD
-    211  00  YUQU82   0                        EOM
-  00001C02  0760FF80  21640320...000100C0       33     800.0 MB   U GRD
-    211  00  YUQU80   0                        EOM
-  00001C02  0760FF80  21640307...000100C0       33     775.0 MB   U GRD
-    211  00  YUQU77   0                        EOM
-  00001C02  0760FF80  216402EE...000100C0       33     750.0 MB   U GRD
-    211  00  YUQU75   0                        EOM
-  00001C02  0760FF80  216402D5...000100C0       33     725.0 MB   U GRD
-    211  00  YUQU72   0                        EOM
-  00001C02  0760FF80  216402BC...000100C0       33     700.0 MB   U GRD
-    211  00  YUQU70   0                        EOM
-  00001C02  0760FF80  216402A3...000100C0       33     675.0 MB   U GRD
-    211  00  YUQU67   0                        EOM
-  00001C02  0760FF80  2164028A...000100C0       33     650.0 MB   U GRD
-    211  00  YUQU65   0                        EOM
-  00001C02  0760FF80  21640271...000100C0       33     625.0 MB   U GRD
-    211  00  YUQU62   0                        EOM
-  00001C02  0760FF80  21640258...000100C0       33     600.0 MB   U GRD
-    211  00  YUQU60   0                        EOM
-  00001C02  0760FF80  2164023F...000100C0       33     575.0 MB   U GRD
-    211  00  YUQU57   0                        EOM
-  00001C02  0760FF80  21640226...000100C0       33     550.0 MB   U GRD
-    211  00  YUQU55   0                        EOM
-  00001C02  0760FF80  2164020D...000100C0       33     525.0 MB   U GRD
-    211  00  YUQU52   0                        EOM
-  00001C02  0760FF80  216401F4...000100C0       33     500.0 MB   U GRD
-    211  00  YUQU50   0                        EOM
-  00001C02  0760FF80  216401C2...000100C0       33     450.0 MB   U GRD
-    211  00  YUQU45   0                        EOM
-  00001C02  0760FF80  21640190...000100C0       33     400.0 MB   U GRD
-    211  00  YUQU40   0                        EOM
-  00001C02  0760FF80  2164015E...000100C0       33     350.0 MB   U GRD
-    211  00  YUQU35   0                        EOM
-  00001C02  0760FF80  2164012C...000100C0       33     300.0 MB   U GRD
-    211  00  YUQU30   0                        EOM
-  00001C02  0760FF80  216400FA...000100C0       33     250.0 MB   U GRD
-    211  00  YUQU25   0                        EOM
-  00001C02  0760FF80  216400C8...000100C0       33     200.0 MB   U GRD
-    211  00  YUQU20   0                        EOM
-  00001C02  0760FF80  21640096...000100C0       33     150.0 MB   U GRD
-    211  00  YUQU15   0                        EOM
-  00001C02  0760FF80  21640064...000100C0       33     100.0 MB   U GRD
-    211  00  YUQU10   0                        EOM
-  00001C02  0760FF80  226403E8...000100C0       34    1000.0 MB   V GRD
-    211  00  YVQU99   0                        EOM
-  00001C02  0760FF80  226403CF...000100C0       34     975.0 MB   V GRD
-    211  00  YVQU93   0                        EOM
-  00001C02  0760FF80  226403B6...000100C0       34     950.0 MB   V GRD
-    211  00  YVQU95   0                        EOM
-  00001C02  0760FF80  2264039D...000100C0       34     925.0 MB   V GRD
-    211  00  YVQU92   0                        EOM
-  00001C02  0760FF80  22640384...000100C0       34     900.0 MB   V GRD
-    211  00  YVQU90   0                        EOM
-  00001C02  0760FF80  2264036B...000100C0       34     875.0 MB   V GRD
-    211  00  YVQU91   0                        EOM
-  00001C02  0760FF80  22640352...000100C0       34     850.0 MB   V GRD
-    211  00  YVQU85   0                        EOM
-  00001C02  0760FF80  22640339...000100C0       34     825.0 MB   V GRD
-    211  00  YVQU82   0                        EOM
-  00001C02  0760FF80  22640320...000100C0       34     800.0 MB   V GRD
-    211  00  YVQU80   0                        EOM
-  00001C02  0760FF80  22640307...000100C0       34     775.0 MB   V GRD
-    211  00  YVQU77   0                        EOM
-  00001C02  0760FF80  226402EE...000100C0       34     750.0 MB   V GRD
-    211  00  YVQU75   0                        EOM
-  00001C02  0760FF80  226402D5...000100C0       34     725.0 MB   V GRD
-    211  00  YVQU72   0                        EOM
-  00001C02  0760FF80  226402BC...000100C0       34     700.0 MB   V GRD
-    211  00  YVQU70   0                        EOM
-  00001C02  0760FF80  226402A3...000100C0       34     675.0 MB   V GRD
-    211  00  YVQU67   0                        EOM
-  00001C02  0760FF80  2264028A...000100C0       34     650.0 MB   V GRD
-    211  00  YVQU65   0                        EOM
-  00001C02  0760FF80  22640271...000100C0       34     625.0 MB   V GRD
-    211  00  YVQU62   0                        EOM
-  00001C02  0760FF80  22640258...000100C0       34     600.0 MB   V GRD
-    211  00  YVQU60   0                        EOM
-  00001C02  0760FF80  2264023F...000100C0       34     575.0 MB   V GRD
-    211  00  YVQU57   0                        EOM
-  00001C02  0760FF80  22640226...000100C0       34     550.0 MB   V GRD
-    211  00  YVQU55   0                        EOM
-  00001C02  0760FF80  2264020D...000100C0       34     525.0 MB   V GRD
-    211  00  YVQU52   0                        EOM
-  00001C02  0760FF80  226401F4...000100C0       34     500.0 MB   V GRD
-    211  00  YVQU50   0                        EOM
-  00001C02  0760FF80  226401C2...000100C0       34     450.0 MB   V GRD
-    211  00  YVQU45   0                        EOM
-  00001C02  0760FF80  22640190...000100C0       34     400.0 MB   V GRD
-    211  00  YVQU40   0                        EOM
-  00001C02  0760FF80  2264015E...000100C0       34     350.0 MB   V GRD
-    211  00  YVQU35   0                        EOM
-  00001C02  0760FF80  2264012C...000100C0       34     300.0 MB   V GRD
-    211  00  YVQU30   0                        EOM
-  00001C02  0760FF80  226400FA...000100C0       34     250.0 MB   V GRD
-    211  00  YVQU25   0                        EOM
-  00001C02  0760FF80  226400C8...000100C0       34     200.0 MB   V GRD
-    211  00  YVQU20   0                        EOM
-  00001C02  0760FF80  22640096...000100C0       34     150.0 MB   V GRD
-    211  00  YVQU15   0                        EOM
-  00001C02  0760FF80  22640064...000100C0       34     100.0 MB   V GRD
-    211  00  YVQU10   0                        EOM
-  00001C02  0760FF80  02660000...000100C0       02           MSL  PRMSL
-    211  00  YPQU89   0                        EOM
-  00001C02  0760FF80  3D010000...000100C0       61           SFC  A PCP
-    211  00  YEQU98   0                        EOM
-  00001C02  0760FF80  346403E8...000100C0       52    1000.0 MB   R H
-    211  00  YRQU99   0                        EOM
-  00001C02  0760FF80  346403CF...000100C0       52     975.0 MB   R H
-    211  00  YRQU93   0                        EOM
-  00001C02  0760FF80  346403B6...000100C0       52     950.0 MB   R H
-    211  00  YRQU95   0                        EOM
-  00001C02  0760FF80  3464039D...000100C0       52     925.0 MB   R H
-    211  00  YRQU92   0                        EOM
-  00001C02  0760FF80  34640384...000100C0       52     900.0 MB   R H
-    211  00  YRQU90   0                        EOM
-  00001C02  0760FF80  3464036B...000100C0       52     875.0 MB   R H
-    211  00  YRQU91   0                        EOM
-  00001C02  0760FF80  34640352...000100C0       52     850.0 MB   R H
-    211  00  YRQU85   0                        EOM
-  00001C02  0760FF80  34640339...000100C0       52     825.0 MB   R H
-    211  00  YRQU82   0                        EOM
-  00001C02  0760FF80  34640320...000100C0       52     800.0 MB   R H
-    211  00  YRQU80   0                        EOM
-  00001C02  0760FF80  34640307...000100C0       52     775.0 MB   R H
-    211  00  YRQU77   0                        EOM
-  00001C02  0760FF80  346402EE...000100C0       52     750.0 MB   R H
-    211  00  YRQU75   0                        EOM
-  00001C02  0760FF80  346402D5...000100C0       52     725.0 MB   R H
-    211  00  YRQU72   0                        EOM
-  00001C02  0760FF80  346402BC...000100C0       52     700.0 MB   R H
-    211  00  YRQU70   0                        EOM
-  00001C02  0760FF80  346402A3...000100C0       52     675.0 MB   R H
-    211  00  YRQU67   0                        EOM
-  00001C02  0760FF80  3464028A...000100C0       52     650.0 MB   R H
-    211  00  YRQU65   0                        EOM
-  00001C02  0760FF80  34640271...000100C0       52     625.0 MB   R H
-    211  00  YRQU62   0                        EOM
-  00001C02  0760FF80  34640258...000100C0       52     600.0 MB   R H
-    211  00  YRQU60   0                        EOM
-  00001C02  0760FF80  3464023F...000100C0       52     575.0 MB   R H
-    211  00  YRQU57   0                        EOM
-  00001C02  0760FF80  34640226...000100C0       52     550.0 MB   R H
-    211  00  YRQU55   0                        EOM
-  00001C02  0760FF80  3464020D...000100C0       52     525.0 MB   R H
-    211  00  YRQU52   0                        EOM
-  00001C02  0760FF80  346401F4...000100C0       52     500.0 MB   R H
-    211  00  YRQU50   0                        EOM
-  00001C02  0760FF80  346401C2...000100C0       52     450.0 MB   R H
-    211  00  YRQU45   0                        EOM
-  00001C02  0760FF80  34640190...000100C0       52     400.0 MB   R H
-    211  00  YRQU40   0                        EOM
-  00001C02  0760FF80  3464015E...000100C0       52     350.0 MB   R H
-    211  00  YRQU35   0                        EOM
-  00001C02  0760FF80  3464012C...000100C0       52     300.0 MB   R H
-    211  00  YRQU30   0                        EOM
-  00001C02  0760FF80  346400FA...000100C0       52     250.0 MB   R H
-    211  00  YRQU25   0                        EOM
-  00001C02  0760FF80  346400C8...000100C0       52     200.0 MB   R H
-    211  00  YRQU20   0                        EOM
-  00001C02  0760FF80  34640096...000100C0       52     150.0 MB   R H
-    211  00  YRQU15   0                        EOM
-  00001C02  0760FF80  34640064...000100C0       52     100.0 MB   R H
-    211  00  YRQU10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100C0       11    1000.0 MB   TMP
-    211  00  YTQU99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100C0       11     975.0 MB   TMP
-    211  00  YTQU93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100C0       11     950.0 MB   TMP
-    211  00  YTQU95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100C0       11     925.0 MB   TMP
-    211  00  YTQU92   0                        EOM
-  00001C02  0760FF80  0B640384...000100C0       11     900.0 MB   TMP
-    211  00  YTQU90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100C0       11     875.0 MB   TMP
-    211  00  YTQU91   0                        EOM
-  00001C02  0760FF80  0B640352...000100C0       11     850.0 MB   TMP
-    211  00  YTQU85   0                        EOM
-  00001C02  0760FF80  0B640339...000100C0       11     825.0 MB   TMP
-    211  00  YTQU82   0                        EOM
-  00001C02  0760FF80  0B640320...000100C0       11     800.0 MB   TMP
-    211  00  YTQU80   0                        EOM
-  00001C02  0760FF80  0B640307...000100C0       11     775.0 MB   TMP
-    211  00  YTQU77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100C0       11     750.0 MB   TMP
-    211  00  YTQU75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100C0       11     725.0 MB   TMP
-    211  00  YTQU72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100C0       11     700.0 MB   TMP
-    211  00  YTQU70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100C0       11     675.0 MB   TMP
-    211  00  YTQU67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100C0       11     650.0 MB   TMP
-    211  00  YTQU65   0                        EOM
-  00001C02  0760FF80  0B640271...000100C0       11     625.0 MB   TMP
-    211  00  YTQU62   0                        EOM
-  00001C02  0760FF80  0B640258...000100C0       11     600.0 MB   TMP
-    211  00  YTQU60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100C0       11     575.0 MB   TMP
-    211  00  YTQU57   0                        EOM
-  00001C02  0760FF80  0B640226...000100C0       11     550.0 MB   TMP
-    211  00  YTQU55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100C0       11     525.0 MB   TMP
-    211  00  YTQU52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100C0       11     500.0 MB   TMP
-    211  00  YTQU50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100C0       11     450.0 MB   TMP
-    211  00  YTQU45   0                        EOM
-  00001C02  0760FF80  0B640190...000100C0       11     400.0 MB   TMP
-    211  00  YTQU40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100C0       11     350.0 MB   TMP
-    211  00  YTQU35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100C0       11     300.0 MB   TMP
-    211  00  YTQU30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100C0       11     250.0 MB   TMP
-    211  00  YTQU25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100C0       11     200.0 MB   TMP
-    211  00  YTQU20   0                        EOM
-  00001C02  0760FF80  0B640096...000100C0       11     150.0 MB   TMP
-    211  00  YTQU15   0                        EOM
-  00001C02  0760FF80  0B640064...000100C0       11     100.0 MB   TMP
-    211  00  YTQU10   0                        EOM
-  00001C02  0760FF80  28640352...000100C0       40     850.0 MB  DZDT
-    211  00  YOQU85   0                        EOM
-  00001C02  0760FF80  286402BC...000100C0       40     700.0 MB  DZDT
-    211  00  YOQU70   0                        EOM
-  00001C02  0760FF80  286401F4...000100C0       40     500.0 MB  DZDT
-    211  00  YOQU50   0                        EOM
-  00001C02  0760FF80  01010000...000100C0       01          SFC  PRES
-    211  00  YPQU98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100C0       52        44/100  R H
-    211  00  YRQU00   0                        EOM
-  00001C02  0760FF80  296401F4...000100C0       41     500.0 MB ABS V
-    211  00  YCQU50   0                        EOM 
-  00001C02  0760FF80  9D010000...000100C0      157          SFC   CAPE
-    211  00  YWQU98   0                        EOM
-  00001C02  0760FF80  9C010000...000100C0      156          SFC   CIN
-    211  00  YYQU98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100C0      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQU86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100C0      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQU86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100C0       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100C0       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100C0       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100C0       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  0B749678...000100C0       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100C0       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQU86   0                        EOM
-  00001C02  0760FF80  34741E00...000100C0       52   30 SPDY   0 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100C0       52   60 SPDY  30 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100C0       52   90 SPDY  60 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  3474785A...000100C0       52  120 SPDY  90 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  34749678...000100C0       52  150 SPDY 120 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  3474B496...000100C0       52  180 SPDY 150 SPDY  R H
-    211  00  YRQU86   0                        EOM
-  00001C02  0760FF80  21741E00...000100C0       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100C0       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100C0       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  2174785A...000100C0       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  21749678...000100C0       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  2174B496...000100C0       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQU86   0                        EOM
-  00001C02  0760FF80  22741E00...000100C0       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100C0       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100C0       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  2274785A...000100C0       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  22749678...000100C0       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  2274B496...000100C0       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQU86   0                        EOM
-  00001C02  0760FF80  0B690002...000100C0       11    2  HTGL     TMP
-    211  00  YTQU98   0                        EOM
-  00001C02  0760FF80  34690002...000100C0       52    2  HTGL     R H
-    211  00  YRQU98   0                        EOM
-  00001C02  0760FF80  2169000A...000100C0       33   10  HTGL     U GRD
-    211  00  YUQU98   0                        EOM
-  00001C02  0760FF80  2269000A...000100C0       34   10  HTGL     V GRD
-    211  00  YVQU98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs198.211 b/parm/wmo/grib_awpgfs198.211
deleted file mode 100755
index 646a71c4b4..0000000000
--- a/parm/wmo/grib_awpgfs198.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100C6       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100C6       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100C6       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100C6       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100C6       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100C6       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100C6       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100C6       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100C6       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100C6       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100C6       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100C6       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100C6       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100C6       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100C6       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100C6       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100C6       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100C6       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100C6       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100C6       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100C6       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  076401C2...000100C6       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  07640190...000100C6       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  0764015E...000100C6       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100C6       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100C6       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100C6       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100C6       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100C6       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100C6       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100C6       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100C6       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100C6       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100C6       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100C6       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100C6       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100C6       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100C6       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100C6       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100C6       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100C6       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100C6       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100C6       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100C6       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100C6       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100C6       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100C6       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100C6       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100C6       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100C6       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100C6       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100C6       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100C6       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100C6       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100C6       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100C6       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100C6       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100C6       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100C6       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100C6       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100C6       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100C6       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100C6       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100C6       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100C6       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100C6       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100C6       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100C6       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100C6       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100C6       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100C6       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100C6       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100C6       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100C6       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100C6       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100C6       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100C6       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100C6       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100C6       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100C6       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100C6       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100C6       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100C6       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100C6       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100C6       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100C6       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100C6       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100C6       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100C6       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100C6       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100C6       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100C6       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100C6       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100C6       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100C6       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100C6       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100C6       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100C6       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100C6       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100C6       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100C6       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100C6       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100C6       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100C6       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100C6       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100C6       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100C6       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100C6       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100C6       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100C6       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100C6       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100C6       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100C6       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100C6       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100C6       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100C6       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100C6       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100C6       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100C6       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100C6       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100C6       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100C6       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100C6       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100C6       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100C6       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100C6       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100C6       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100C6       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100C6       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100C6       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100C6       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100C6       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100C6       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100C6       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100C6       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100C6       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100C6       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100C6       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100C6       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100C6       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100C6       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100C6       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100C6       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100C6       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100C6       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100C6       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100C6       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100C6       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100C6       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100C6       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100C6       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100C6       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100C6       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100C6      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100C6      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100C6      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100C6      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100C6       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100C6       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100C6       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100C6       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100C6       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100C6       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100C6       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100C6       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100C6       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100C6       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100C6       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100C6       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100C6       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100C6       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100C6       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100C6       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100C6       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100C6       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100C6       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100C6       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100C6       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100C6       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100C6       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100C6       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100C6       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100C6       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100C6       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100C6       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs204.211 b/parm/wmo/grib_awpgfs204.211
deleted file mode 100755
index 7cba856693..0000000000
--- a/parm/wmo/grib_awpgfs204.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100CC       07    1000.0 MB   HGT
-    211  00  YHQV99   0                        EOM
-  00001C02  0760FF80  076403CF...000100CC       07     975.0 MB   HGT
-    211  00  YHQV93   0                        EOM
-  00001C02  0760FF80  076403B6...000100CC       07     950.0 MB   HGT
-    211  00  YHQV95   0                        EOM
-  00001C02  0760FF80  0764039D...000100CC       07     925.0 MB   HGT
-    211  00  YHQV92   0                        EOM
-  00001C02  0760FF80  07640384...000100CC       07     900.0 MB   HGT
-    211  00  YHQV90   0                        EOM
-  00001C02  0760FF80  0764036B...000100CC       07     875.0 MB   HGT
-    211  00  YHQV91   0                        EOM
-  00001C02  0760FF80  07640352...000100CC       07     850.0 MB   HGT
-    211  00  YHQV85   0                        EOM
-  00001C02  0760FF80  07640339...000100CC       07     825.0 MB   HGT
-    211  00  YHQV82   0                        EOM
-  00001C02  0760FF80  07640320...000100CC       07     800.0 MB   HGT
-    211  00  YHQV80   0                        EOM
-  00001C02  0760FF80  07640307...000100CC       07     775.0 MB   HGT
-    211  00  YHQV77   0                        EOM
-  00001C02  0760FF80  076402EE...000100CC       07     750.0 MB   HGT
-    211  00  YHQV75   0                        EOM
-  00001C02  0760FF80  076402D5...000100CC       07     725.0 MB   HGT
-    211  00  YHQV72   0                        EOM
-  00001C02  0760FF80  076402BC...000100CC       07     700.0 MB   HGT
-    211  00  YHQV70   0                        EOM
-  00001C02  0760FF80  076402A3...000100CC       07     675.0 MB   HGT
-    211  00  YHQV67   0                        EOM
-  00001C02  0760FF80  0764028A...000100CC       07     650.0 MB   HGT
-    211  00  YHQV65   0                        EOM
-  00001C02  0760FF80  07640271...000100CC       07     625.0 MB   HGT
-    211  00  YHQV62   0                        EOM
-  00001C02  0760FF80  07640258...000100CC       07     600.0 MB   HGT
-    211  00  YHQV60   0                        EOM
-  00001C02  0760FF80  0764023F...000100CC       07     575.0 MB   HGT
-    211  00  YHQV57   0                        EOM
-  00001C02  0760FF80  07640226...000100CC       07     550.0 MB   HGT
-    211  00  YHQV55   0                        EOM
-  00001C02  0760FF80  0764020D...000100CC       07     525.0 MB   HGT
-    211  00  YHQV52   0                        EOM
-  00001C02  0760FF80  076401F4...000100CC       07     500.0 MB   HGT
-    211  00  YHQV50   0                        EOM
-  00001C02  0760FF80  076401C2...000100CC       07     450.0 MB   HGT
-    211  00  YHQV45   0                        EOM
-  00001C02  0760FF80  07640190...000100CC       07     400.0 MB   HGT
-    211  00  YHQV40   0                        EOM
-  00001C02  0760FF80  0764015E...000100CC       07     350.0 MB   HGT
-    211  00  YHQV35   0                        EOM
-  00001C02  0760FF80  0764012C...000100CC       07     300.0 MB   HGT
-    211  00  YHQV30   0                        EOM
-  00001C02  0760FF80  076400FA...000100CC       07     250.0 MB   HGT
-    211  00  YHQV25   0                        EOM
-  00001C02  0760FF80  076400C8...000100CC       07     200.0 MB   HGT
-    211  00  YHQV20   0                        EOM
-  00001C02  0760FF80  07640096...000100CC       07     150.0 MB   HGT
-    211  00  YHQV15   0                        EOM
-  00001C02  0760FF80  07640064...000100CC       07     100.0 MB   HGT
-    211  00  YHQV10   0                        EOM
-  00001C02  0760FF80  216403E8...000100CC       33    1000.0 MB   U GRD 
-    211  00  YUQV99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100CC       33     975.0 MB   U GRD
-    211  00  YUQV93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100CC       33     950.0 MB   U GRD
-    211  00  YUQV95   0                        EOM
-  00001C02  0760FF80  2164039D...000100CC       33     925.0 MB   U GRD
-    211  00  YUQV92   0                        EOM
-  00001C02  0760FF80  21640384...000100CC       33     900.0 MB   U GRD
-    211  00  YUQV90   0                        EOM
-  00001C02  0760FF80  2164036B...000100CC       33     875.0 MB   U GRD
-    211  00  YUQV91   0                        EOM
-  00001C02  0760FF80  21640352...000100CC       33     850.0 MB   U GRD
-    211  00  YUQV85   0                        EOM
-  00001C02  0760FF80  21640339...000100CC       33     825.0 MB   U GRD
-    211  00  YUQV82   0                        EOM
-  00001C02  0760FF80  21640320...000100CC       33     800.0 MB   U GRD
-    211  00  YUQV80   0                        EOM
-  00001C02  0760FF80  21640307...000100CC       33     775.0 MB   U GRD
-    211  00  YUQV77   0                        EOM
-  00001C02  0760FF80  216402EE...000100CC       33     750.0 MB   U GRD
-    211  00  YUQV75   0                        EOM
-  00001C02  0760FF80  216402D5...000100CC       33     725.0 MB   U GRD
-    211  00  YUQV72   0                        EOM
-  00001C02  0760FF80  216402BC...000100CC       33     700.0 MB   U GRD
-    211  00  YUQV70   0                        EOM
-  00001C02  0760FF80  216402A3...000100CC       33     675.0 MB   U GRD
-    211  00  YUQV67   0                        EOM
-  00001C02  0760FF80  2164028A...000100CC       33     650.0 MB   U GRD
-    211  00  YUQV65   0                        EOM
-  00001C02  0760FF80  21640271...000100CC       33     625.0 MB   U GRD
-    211  00  YUQV62   0                        EOM
-  00001C02  0760FF80  21640258...000100CC       33     600.0 MB   U GRD
-    211  00  YUQV60   0                        EOM
-  00001C02  0760FF80  2164023F...000100CC       33     575.0 MB   U GRD
-    211  00  YUQV57   0                        EOM
-  00001C02  0760FF80  21640226...000100CC       33     550.0 MB   U GRD
-    211  00  YUQV55   0                        EOM
-  00001C02  0760FF80  2164020D...000100CC       33     525.0 MB   U GRD
-    211  00  YUQV52   0                        EOM
-  00001C02  0760FF80  216401F4...000100CC       33     500.0 MB   U GRD
-    211  00  YUQV50   0                        EOM
-  00001C02  0760FF80  216401C2...000100CC       33     450.0 MB   U GRD
-    211  00  YUQV45   0                        EOM
-  00001C02  0760FF80  21640190...000100CC       33     400.0 MB   U GRD
-    211  00  YUQV40   0                        EOM
-  00001C02  0760FF80  2164015E...000100CC       33     350.0 MB   U GRD
-    211  00  YUQV35   0                        EOM
-  00001C02  0760FF80  2164012C...000100CC       33     300.0 MB   U GRD
-    211  00  YUQV30   0                        EOM
-  00001C02  0760FF80  216400FA...000100CC       33     250.0 MB   U GRD
-    211  00  YUQV25   0                        EOM
-  00001C02  0760FF80  216400C8...000100CC       33     200.0 MB   U GRD
-    211  00  YUQV20   0                        EOM
-  00001C02  0760FF80  21640096...000100CC       33     150.0 MB   U GRD
-    211  00  YUQV15   0                        EOM
-  00001C02  0760FF80  21640064...000100CC       33     100.0 MB   U GRD
-    211  00  YUQV10   0                        EOM
-  00001C02  0760FF80  226403E8...000100CC       34    1000.0 MB   V GRD
-    211  00  YVQV99   0                        EOM
-  00001C02  0760FF80  226403CF...000100CC       34     975.0 MB   V GRD
-    211  00  YVQV93   0                        EOM
-  00001C02  0760FF80  226403B6...000100CC       34     950.0 MB   V GRD
-    211  00  YVQV95   0                        EOM
-  00001C02  0760FF80  2264039D...000100CC       34     925.0 MB   V GRD
-    211  00  YVQV92   0                        EOM
-  00001C02  0760FF80  22640384...000100CC       34     900.0 MB   V GRD
-    211  00  YVQV90   0                        EOM
-  00001C02  0760FF80  2264036B...000100CC       34     875.0 MB   V GRD
-    211  00  YVQV91   0                        EOM
-  00001C02  0760FF80  22640352...000100CC       34     850.0 MB   V GRD
-    211  00  YVQV85   0                        EOM
-  00001C02  0760FF80  22640339...000100CC       34     825.0 MB   V GRD
-    211  00  YVQV82   0                        EOM
-  00001C02  0760FF80  22640320...000100CC       34     800.0 MB   V GRD
-    211  00  YVQV80   0                        EOM
-  00001C02  0760FF80  22640307...000100CC       34     775.0 MB   V GRD
-    211  00  YVQV77   0                        EOM
-  00001C02  0760FF80  226402EE...000100CC       34     750.0 MB   V GRD
-    211  00  YVQV75   0                        EOM
-  00001C02  0760FF80  226402D5...000100CC       34     725.0 MB   V GRD
-    211  00  YVQV72   0                        EOM
-  00001C02  0760FF80  226402BC...000100CC       34     700.0 MB   V GRD
-    211  00  YVQV70   0                        EOM
-  00001C02  0760FF80  226402A3...000100CC       34     675.0 MB   V GRD
-    211  00  YVQV67   0                        EOM
-  00001C02  0760FF80  2264028A...000100CC       34     650.0 MB   V GRD
-    211  00  YVQV65   0                        EOM
-  00001C02  0760FF80  22640271...000100CC       34     625.0 MB   V GRD
-    211  00  YVQV62   0                        EOM
-  00001C02  0760FF80  22640258...000100CC       34     600.0 MB   V GRD
-    211  00  YVQV60   0                        EOM
-  00001C02  0760FF80  2264023F...000100CC       34     575.0 MB   V GRD
-    211  00  YVQV57   0                        EOM
-  00001C02  0760FF80  22640226...000100CC       34     550.0 MB   V GRD
-    211  00  YVQV55   0                        EOM
-  00001C02  0760FF80  2264020D...000100CC       34     525.0 MB   V GRD
-    211  00  YVQV52   0                        EOM
-  00001C02  0760FF80  226401F4...000100CC       34     500.0 MB   V GRD
-    211  00  YVQV50   0                        EOM
-  00001C02  0760FF80  226401C2...000100CC       34     450.0 MB   V GRD
-    211  00  YVQV45   0                        EOM
-  00001C02  0760FF80  22640190...000100CC       34     400.0 MB   V GRD
-    211  00  YVQV40   0                        EOM
-  00001C02  0760FF80  2264015E...000100CC       34     350.0 MB   V GRD
-    211  00  YVQV35   0                        EOM
-  00001C02  0760FF80  2264012C...000100CC       34     300.0 MB   V GRD
-    211  00  YVQV30   0                        EOM
-  00001C02  0760FF80  226400FA...000100CC       34     250.0 MB   V GRD
-    211  00  YVQV25   0                        EOM
-  00001C02  0760FF80  226400C8...000100CC       34     200.0 MB   V GRD
-    211  00  YVQV20   0                        EOM
-  00001C02  0760FF80  22640096...000100CC       34     150.0 MB   V GRD
-    211  00  YVQV15   0                        EOM
-  00001C02  0760FF80  22640064...000100CC       34     100.0 MB   V GRD
-    211  00  YVQV10   0                        EOM
-  00001C02  0760FF80  02660000...000100CC       02           MSL  PRMSL
-    211  00  YPQV89   0                        EOM
-  00001C02  0760FF80  3D010000...000100CC       61           SFC  A PCP
-    211  00  YEQV98   0                        EOM
-  00001C02  0760FF80  346403E8...000100CC       52    1000.0 MB   R H
-    211  00  YRQV99   0                        EOM
-  00001C02  0760FF80  346403CF...000100CC       52     975.0 MB   R H
-    211  00  YRQV93   0                        EOM
-  00001C02  0760FF80  346403B6...000100CC       52     950.0 MB   R H
-    211  00  YRQV95   0                        EOM
-  00001C02  0760FF80  3464039D...000100CC       52     925.0 MB   R H
-    211  00  YRQV92   0                        EOM
-  00001C02  0760FF80  34640384...000100CC       52     900.0 MB   R H
-    211  00  YRQV90   0                        EOM
-  00001C02  0760FF80  3464036B...000100CC       52     875.0 MB   R H
-    211  00  YRQV91   0                        EOM
-  00001C02  0760FF80  34640352...000100CC       52     850.0 MB   R H
-    211  00  YRQV85   0                        EOM
-  00001C02  0760FF80  34640339...000100CC       52     825.0 MB   R H
-    211  00  YRQV82   0                        EOM
-  00001C02  0760FF80  34640320...000100CC       52     800.0 MB   R H
-    211  00  YRQV80   0                        EOM
-  00001C02  0760FF80  34640307...000100CC       52     775.0 MB   R H
-    211  00  YRQV77   0                        EOM
-  00001C02  0760FF80  346402EE...000100CC       52     750.0 MB   R H
-    211  00  YRQV75   0                        EOM
-  00001C02  0760FF80  346402D5...000100CC       52     725.0 MB   R H
-    211  00  YRQV72   0                        EOM
-  00001C02  0760FF80  346402BC...000100CC       52     700.0 MB   R H
-    211  00  YRQV70   0                        EOM
-  00001C02  0760FF80  346402A3...000100CC       52     675.0 MB   R H
-    211  00  YRQV67   0                        EOM
-  00001C02  0760FF80  3464028A...000100CC       52     650.0 MB   R H
-    211  00  YRQV65   0                        EOM
-  00001C02  0760FF80  34640271...000100CC       52     625.0 MB   R H
-    211  00  YRQV62   0                        EOM
-  00001C02  0760FF80  34640258...000100CC       52     600.0 MB   R H
-    211  00  YRQV60   0                        EOM
-  00001C02  0760FF80  3464023F...000100CC       52     575.0 MB   R H
-    211  00  YRQV57   0                        EOM
-  00001C02  0760FF80  34640226...000100CC       52     550.0 MB   R H
-    211  00  YRQV55   0                        EOM
-  00001C02  0760FF80  3464020D...000100CC       52     525.0 MB   R H
-    211  00  YRQV52   0                        EOM
-  00001C02  0760FF80  346401F4...000100CC       52     500.0 MB   R H
-    211  00  YRQV50   0                        EOM
-  00001C02  0760FF80  346401C2...000100CC       52     450.0 MB   R H
-    211  00  YRQV45   0                        EOM
-  00001C02  0760FF80  34640190...000100CC       52     400.0 MB   R H
-    211  00  YRQV40   0                        EOM
-  00001C02  0760FF80  3464015E...000100CC       52     350.0 MB   R H
-    211  00  YRQV35   0                        EOM
-  00001C02  0760FF80  3464012C...000100CC       52     300.0 MB   R H
-    211  00  YRQV30   0                        EOM
-  00001C02  0760FF80  346400FA...000100CC       52     250.0 MB   R H
-    211  00  YRQV25   0                        EOM
-  00001C02  0760FF80  346400C8...000100CC       52     200.0 MB   R H
-    211  00  YRQV20   0                        EOM
-  00001C02  0760FF80  34640096...000100CC       52     150.0 MB   R H
-    211  00  YRQV15   0                        EOM
-  00001C02  0760FF80  34640064...000100CC       52     100.0 MB   R H
-    211  00  YRQV10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100CC       11    1000.0 MB   TMP
-    211  00  YTQV99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100CC       11     975.0 MB   TMP
-    211  00  YTQV93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100CC       11     950.0 MB   TMP
-    211  00  YTQV95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100CC       11     925.0 MB   TMP
-    211  00  YTQV92   0                        EOM
-  00001C02  0760FF80  0B640384...000100CC       11     900.0 MB   TMP
-    211  00  YTQV90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100CC       11     875.0 MB   TMP
-    211  00  YTQV91   0                        EOM
-  00001C02  0760FF80  0B640352...000100CC       11     850.0 MB   TMP
-    211  00  YTQV85   0                        EOM
-  00001C02  0760FF80  0B640339...000100CC       11     825.0 MB   TMP
-    211  00  YTQV82   0                        EOM
-  00001C02  0760FF80  0B640320...000100CC       11     800.0 MB   TMP
-    211  00  YTQV80   0                        EOM
-  00001C02  0760FF80  0B640307...000100CC       11     775.0 MB   TMP
-    211  00  YTQV77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100CC       11     750.0 MB   TMP
-    211  00  YTQV75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100CC       11     725.0 MB   TMP
-    211  00  YTQV72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100CC       11     700.0 MB   TMP
-    211  00  YTQV70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100CC       11     675.0 MB   TMP
-    211  00  YTQV67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100CC       11     650.0 MB   TMP
-    211  00  YTQV65   0                        EOM
-  00001C02  0760FF80  0B640271...000100CC       11     625.0 MB   TMP
-    211  00  YTQV62   0                        EOM
-  00001C02  0760FF80  0B640258...000100CC       11     600.0 MB   TMP
-    211  00  YTQV60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100CC       11     575.0 MB   TMP
-    211  00  YTQV57   0                        EOM
-  00001C02  0760FF80  0B640226...000100CC       11     550.0 MB   TMP
-    211  00  YTQV55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100CC       11     525.0 MB   TMP
-    211  00  YTQV52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100CC       11     500.0 MB   TMP
-    211  00  YTQV50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100CC       11     450.0 MB   TMP
-    211  00  YTQV45   0                        EOM
-  00001C02  0760FF80  0B640190...000100CC       11     400.0 MB   TMP
-    211  00  YTQV40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100CC       11     350.0 MB   TMP
-    211  00  YTQV35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100CC       11     300.0 MB   TMP
-    211  00  YTQV30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100CC       11     250.0 MB   TMP
-    211  00  YTQV25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100CC       11     200.0 MB   TMP
-    211  00  YTQV20   0                        EOM
-  00001C02  0760FF80  0B640096...000100CC       11     150.0 MB   TMP
-    211  00  YTQV15   0                        EOM
-  00001C02  0760FF80  0B640064...000100CC       11     100.0 MB   TMP
-    211  00  YTQV10   0                        EOM
-  00001C02  0760FF80  28640352...000100CC       40     850.0 MB  DZDT
-    211  00  YOQV85   0                        EOM
-  00001C02  0760FF80  286402BC...000100CC       40     700.0 MB  DZDT
-    211  00  YOQV70   0                        EOM
-  00001C02  0760FF80  286401F4...000100CC       40     500.0 MB  DZDT
-    211  00  YOQV50   0                        EOM
-  00001C02  0760FF80  01010000...000100CC       01          SFC  PRES
-    211  00  YPQV98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100CC       52        44/100  R H
-    211  00  YRQV00   0                        EOM
-  00001C02  0760FF80  296401F4...000100CC       41     500.0 MB ABS V
-    211  00  YCQV50   0                        EOM
-  00001C02  0760FF80  9D010000...000100CC      157          SFC   CAPE
-    211  00  YWQV98   0                        EOM
-  00001C02  0760FF80  9C010000...000100CC      156          SFC   CIN
-    211  00  YYQV98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100CC      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQV86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100CC      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQV86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100CC       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100CC       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100CC       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100CC       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  0B749678...000100CC       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100CC       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQV86   0                        EOM
-  00001C02  0760FF80  34741E00...000100CC       52   30 SPDY   0 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100CC       52   60 SPDY  30 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100CC       52   90 SPDY  60 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  3474785A...000100CC       52  120 SPDY  90 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  34749678...000100CC       52  150 SPDY 120 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  3474B496...000100CC       52  180 SPDY 150 SPDY  R H
-    211  00  YRQV86   0                        EOM
-  00001C02  0760FF80  21741E00...000100CC       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100CC       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100CC       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  2174785A...000100CC       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  21749678...000100CC       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  2174B496...000100CC       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQV86   0                        EOM
-  00001C02  0760FF80  22741E00...000100CC       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100CC       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100CC       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  2274785A...000100CC       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  22749678...000100CC       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  2274B496...000100CC       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQV86   0                        EOM
-  00001C02  0760FF80  0B690002...000100CC       11    2  HTGL     TMP
-    211  00  YTQV98   0                        EOM
-  00001C02  0760FF80  34690002...000100CC       52    2  HTGL     R H
-    211  00  YRQV98   0                        EOM
-  00001C02  0760FF80  2169000A...000100CC       33   10  HTGL     U GRD
-    211  00  YUQV98   0                        EOM
-  00001C02  0760FF80  2269000A...000100CC       34   10  HTGL     V GRD
-    211  00  YVQV98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs210.211 b/parm/wmo/grib_awpgfs210.211
deleted file mode 100755
index 5d7cecb239..0000000000
--- a/parm/wmo/grib_awpgfs210.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100D2       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100D2       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100D2       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100D2       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100D2       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100D2       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100D2       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100D2       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100D2       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100D2       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100D2       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100D2       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100D2       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100D2       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100D2       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100D2       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100D2       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100D2       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100D2       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100D2       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100D2       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  076401C2...000100D2       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  07640190...000100D2       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  0764015E...000100D2       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100D2       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100D2       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100D2       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100D2       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100D2       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100D2       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100D2       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100D2       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100D2       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100D2       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100D2       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100D2       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100D2       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100D2       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100D2       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100D2       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100D2       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100D2       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100D2       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100D2       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100D2       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100D2       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100D2       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100D2       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100D2       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100D2       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100D2       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100D2       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100D2       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100D2       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100D2       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100D2       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100D2       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100D2       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100D2       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100D2       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100D2       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100D2       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100D2       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100D2       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100D2       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100D2       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100D2       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100D2       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100D2       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100D2       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100D2       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100D2       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100D2       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100D2       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100D2       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100D2       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100D2       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100D2       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100D2       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100D2       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100D2       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100D2       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100D2       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100D2       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100D2       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100D2       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100D2       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100D2       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100D2       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100D2       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100D2       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100D2       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100D2       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100D2       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100D2       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100D2       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100D2       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100D2       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100D2       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100D2       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100D2       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100D2       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100D2       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100D2       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100D2       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100D2       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100D2       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100D2       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100D2       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100D2       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100D2       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100D2       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100D2       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100D2       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100D2       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100D2       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100D2       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100D2       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100D2       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100D2       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100D2       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100D2       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100D2       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100D2       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100D2       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100D2       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100D2       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100D2       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100D2       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100D2       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100D2       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100D2       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100D2       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100D2       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100D2       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100D2       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100D2       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100D2       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100D2       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100D2       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100D2       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100D2       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100D2       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100D2       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100D2       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100D2       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100D2       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100D2       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100D2       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100D2       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100D2       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100D2       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100D2       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100D2      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100D2      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100D2      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100D2      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100D2       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100D2       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100D2       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100D2       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100D2       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100D2       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100D2       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100D2       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100D2       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100D2       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100D2       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100D2       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100D2       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100D2       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100D2       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100D2       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100D2       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100D2       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100D2       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100D2       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100D2       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100D2       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100D2       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100D2       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100D2       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100D2       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100D2       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100D2       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs216.211 b/parm/wmo/grib_awpgfs216.211
deleted file mode 100755
index d01c0b9023..0000000000
--- a/parm/wmo/grib_awpgfs216.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100D8       07    1000.0 MB   HGT
-    211  00  YHQW99   0                        EOM
-  00001C02  0760FF80  076403CF...000100D8       07     975.0 MB   HGT
-    211  00  YHQW93   0                        EOM
-  00001C02  0760FF80  076403B6...000100D8       07     950.0 MB   HGT
-    211  00  YHQW95   0                        EOM
-  00001C02  0760FF80  0764039D...000100D8       07     925.0 MB   HGT
-    211  00  YHQW92   0                        EOM
-  00001C02  0760FF80  07640384...000100D8       07     900.0 MB   HGT
-    211  00  YHQW90   0                        EOM
-  00001C02  0760FF80  0764036B...000100D8       07     875.0 MB   HGT
-    211  00  YHQW91   0                        EOM
-  00001C02  0760FF80  07640352...000100D8       07     850.0 MB   HGT
-    211  00  YHQW85   0                        EOM
-  00001C02  0760FF80  07640339...000100D8       07     825.0 MB   HGT
-    211  00  YHQW82   0                        EOM
-  00001C02  0760FF80  07640320...000100D8       07     800.0 MB   HGT
-    211  00  YHQW80   0                        EOM
-  00001C02  0760FF80  07640307...000100D8       07     775.0 MB   HGT
-    211  00  YHQW77   0                        EOM
-  00001C02  0760FF80  076402EE...000100D8       07     750.0 MB   HGT
-    211  00  YHQW75   0                        EOM
-  00001C02  0760FF80  076402D5...000100D8       07     725.0 MB   HGT
-    211  00  YHQW72   0                        EOM
-  00001C02  0760FF80  076402BC...000100D8       07     700.0 MB   HGT
-    211  00  YHQW70   0                        EOM
-  00001C02  0760FF80  076402A3...000100D8       07     675.0 MB   HGT
-    211  00  YHQW67   0                        EOM
-  00001C02  0760FF80  0764028A...000100D8       07     650.0 MB   HGT
-    211  00  YHQW65   0                        EOM
-  00001C02  0760FF80  07640271...000100D8       07     625.0 MB   HGT
-    211  00  YHQW62   0                        EOM
-  00001C02  0760FF80  07640258...000100D8       07     600.0 MB   HGT
-    211  00  YHQW60   0                        EOM
-  00001C02  0760FF80  0764023F...000100D8       07     575.0 MB   HGT
-    211  00  YHQW57   0                        EOM
-  00001C02  0760FF80  07640226...000100D8       07     550.0 MB   HGT
-    211  00  YHQW55   0                        EOM
-  00001C02  0760FF80  0764020D...000100D8       07     525.0 MB   HGT
-    211  00  YHQW52   0                        EOM
-  00001C02  0760FF80  076401F4...000100D8       07     500.0 MB   HGT
-    211  00  YHQW50   0                        EOM
-  00001C02  0760FF80  076401C2...000100D8       07     450.0 MB   HGT
-    211  00  YHQW45   0                        EOM
-  00001C02  0760FF80  07640190...000100D8       07     400.0 MB   HGT
-    211  00  YHQW40   0                        EOM
-  00001C02  0760FF80  0764015E...000100D8       07     350.0 MB   HGT
-    211  00  YHQW35   0                        EOM
-  00001C02  0760FF80  0764012C...000100D8       07     300.0 MB   HGT
-    211  00  YHQW30   0                        EOM
-  00001C02  0760FF80  076400FA...000100D8       07     250.0 MB   HGT
-    211  00  YHQW25   0                        EOM
-  00001C02  0760FF80  076400C8...000100D8       07     200.0 MB   HGT
-    211  00  YHQW20   0                        EOM
-  00001C02  0760FF80  07640096...000100D8       07     150.0 MB   HGT
-    211  00  YHQW15   0                        EOM
-  00001C02  0760FF80  07640064...000100D8       07     100.0 MB   HGT
-    211  00  YHQW10   0                        EOM
-  00001C02  0760FF80  216403E8...000100D8       33    1000.0 MB   U GRD 
-    211  00  YUQW99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100D8       33     975.0 MB   U GRD
-    211  00  YUQW93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100D8       33     950.0 MB   U GRD
-    211  00  YUQW95   0                        EOM
-  00001C02  0760FF80  2164039D...000100D8       33     925.0 MB   U GRD
-    211  00  YUQW92   0                        EOM
-  00001C02  0760FF80  21640384...000100D8       33     900.0 MB   U GRD
-    211  00  YUQW90   0                        EOM
-  00001C02  0760FF80  2164036B...000100D8       33     875.0 MB   U GRD
-    211  00  YUQW91   0                        EOM
-  00001C02  0760FF80  21640352...000100D8       33     850.0 MB   U GRD
-    211  00  YUQW85   0                        EOM
-  00001C02  0760FF80  21640339...000100D8       33     825.0 MB   U GRD
-    211  00  YUQW82   0                        EOM
-  00001C02  0760FF80  21640320...000100D8       33     800.0 MB   U GRD
-    211  00  YUQW80   0                        EOM
-  00001C02  0760FF80  21640307...000100D8       33     775.0 MB   U GRD
-    211  00  YUQW77   0                        EOM
-  00001C02  0760FF80  216402EE...000100D8       33     750.0 MB   U GRD
-    211  00  YUQW75   0                        EOM
-  00001C02  0760FF80  216402D5...000100D8       33     725.0 MB   U GRD
-    211  00  YUQW72   0                        EOM
-  00001C02  0760FF80  216402BC...000100D8       33     700.0 MB   U GRD
-    211  00  YUQW70   0                        EOM
-  00001C02  0760FF80  216402A3...000100D8       33     675.0 MB   U GRD
-    211  00  YUQW67   0                        EOM
-  00001C02  0760FF80  2164028A...000100D8       33     650.0 MB   U GRD
-    211  00  YUQW65   0                        EOM
-  00001C02  0760FF80  21640271...000100D8       33     625.0 MB   U GRD
-    211  00  YUQW62   0                        EOM
-  00001C02  0760FF80  21640258...000100D8       33     600.0 MB   U GRD
-    211  00  YUQW60   0                        EOM
-  00001C02  0760FF80  2164023F...000100D8       33     575.0 MB   U GRD
-    211  00  YUQW57   0                        EOM
-  00001C02  0760FF80  21640226...000100D8       33     550.0 MB   U GRD
-    211  00  YUQW55   0                        EOM
-  00001C02  0760FF80  2164020D...000100D8       33     525.0 MB   U GRD
-    211  00  YUQW52   0                        EOM
-  00001C02  0760FF80  216401F4...000100D8       33     500.0 MB   U GRD
-    211  00  YUQW50   0                        EOM
-  00001C02  0760FF80  216401C2...000100D8       33     450.0 MB   U GRD
-    211  00  YUQW45   0                        EOM
-  00001C02  0760FF80  21640190...000100D8       33     400.0 MB   U GRD
-    211  00  YUQW40   0                        EOM
-  00001C02  0760FF80  2164015E...000100D8       33     350.0 MB   U GRD
-    211  00  YUQW35   0                        EOM
-  00001C02  0760FF80  2164012C...000100D8       33     300.0 MB   U GRD
-    211  00  YUQW30   0                        EOM
-  00001C02  0760FF80  216400FA...000100D8       33     250.0 MB   U GRD
-    211  00  YUQW25   0                        EOM
-  00001C02  0760FF80  216400C8...000100D8       33     200.0 MB   U GRD
-    211  00  YUQW20   0                        EOM
-  00001C02  0760FF80  21640096...000100D8       33     150.0 MB   U GRD
-    211  00  YUQW15   0                        EOM
-  00001C02  0760FF80  21640064...000100D8       33     100.0 MB   U GRD
-    211  00  YUQW10   0                        EOM
-  00001C02  0760FF80  226403E8...000100D8       34    1000.0 MB   V GRD
-    211  00  YVQW99   0                        EOM
-  00001C02  0760FF80  226403CF...000100D8       34     975.0 MB   V GRD
-    211  00  YVQW93   0                        EOM
-  00001C02  0760FF80  226403B6...000100D8       34     950.0 MB   V GRD
-    211  00  YVQW95   0                        EOM
-  00001C02  0760FF80  2264039D...000100D8       34     925.0 MB   V GRD
-    211  00  YVQW92   0                        EOM
-  00001C02  0760FF80  22640384...000100D8       34     900.0 MB   V GRD
-    211  00  YVQW90   0                        EOM
-  00001C02  0760FF80  2264036B...000100D8       34     875.0 MB   V GRD
-    211  00  YVQW91   0                        EOM
-  00001C02  0760FF80  22640352...000100D8       34     850.0 MB   V GRD
-    211  00  YVQW85   0                        EOM
-  00001C02  0760FF80  22640339...000100D8       34     825.0 MB   V GRD
-    211  00  YVQW82   0                        EOM
-  00001C02  0760FF80  22640320...000100D8       34     800.0 MB   V GRD
-    211  00  YVQW80   0                        EOM
-  00001C02  0760FF80  22640307...000100D8       34     775.0 MB   V GRD
-    211  00  YVQW77   0                        EOM
-  00001C02  0760FF80  226402EE...000100D8       34     750.0 MB   V GRD
-    211  00  YVQW75   0                        EOM
-  00001C02  0760FF80  226402D5...000100D8       34     725.0 MB   V GRD
-    211  00  YVQW72   0                        EOM
-  00001C02  0760FF80  226402BC...000100D8       34     700.0 MB   V GRD
-    211  00  YVQW70   0                        EOM
-  00001C02  0760FF80  226402A3...000100D8       34     675.0 MB   V GRD
-    211  00  YVQW67   0                        EOM
-  00001C02  0760FF80  2264028A...000100D8       34     650.0 MB   V GRD
-    211  00  YVQW65   0                        EOM
-  00001C02  0760FF80  22640271...000100D8       34     625.0 MB   V GRD
-    211  00  YVQW62   0                        EOM
-  00001C02  0760FF80  22640258...000100D8       34     600.0 MB   V GRD
-    211  00  YVQW60   0                        EOM
-  00001C02  0760FF80  2264023F...000100D8       34     575.0 MB   V GRD
-    211  00  YVQW57   0                        EOM
-  00001C02  0760FF80  22640226...000100D8       34     550.0 MB   V GRD
-    211  00  YVQW55   0                        EOM
-  00001C02  0760FF80  2264020D...000100D8       34     525.0 MB   V GRD
-    211  00  YVQW52   0                        EOM
-  00001C02  0760FF80  226401F4...000100D8       34     500.0 MB   V GRD
-    211  00  YVQW50   0                        EOM
-  00001C02  0760FF80  226401C2...000100D8       34     450.0 MB   V GRD
-    211  00  YVQW45   0                        EOM
-  00001C02  0760FF80  22640190...000100D8       34     400.0 MB   V GRD
-    211  00  YVQW40   0                        EOM
-  00001C02  0760FF80  2264015E...000100D8       34     350.0 MB   V GRD
-    211  00  YVQW35   0                        EOM
-  00001C02  0760FF80  2264012C...000100D8       34     300.0 MB   V GRD
-    211  00  YVQW30   0                        EOM
-  00001C02  0760FF80  226400FA...000100D8       34     250.0 MB   V GRD
-    211  00  YVQW25   0                        EOM
-  00001C02  0760FF80  226400C8...000100D8       34     200.0 MB   V GRD
-    211  00  YVQW20   0                        EOM
-  00001C02  0760FF80  22640096...000100D8       34     150.0 MB   V GRD
-    211  00  YVQW15   0                        EOM
-  00001C02  0760FF80  22640064...000100D8       34     100.0 MB   V GRD
-    211  00  YVQW10   0                        EOM
-  00001C02  0760FF80  02660000...000100D8       02           MSL  PRMSL
-    211  00  YPQW89   0                        EOM
-  00001C02  0760FF80  3D010000...000100D8       61           SFC  A PCP
-    211  00  YEQW98   0                        EOM
-  00001C02  0760FF80  346403E8...000100D8       52    1000.0 MB   R H
-    211  00  YRQW99   0                        EOM
-  00001C02  0760FF80  346403CF...000100D8       52     975.0 MB   R H
-    211  00  YRQW93   0                        EOM
-  00001C02  0760FF80  346403B6...000100D8       52     950.0 MB   R H
-    211  00  YRQW95   0                        EOM
-  00001C02  0760FF80  3464039D...000100D8       52     925.0 MB   R H
-    211  00  YRQW92   0                        EOM
-  00001C02  0760FF80  34640384...000100D8       52     900.0 MB   R H
-    211  00  YRQW90   0                        EOM
-  00001C02  0760FF80  3464036B...000100D8       52     875.0 MB   R H
-    211  00  YRQW91   0                        EOM
-  00001C02  0760FF80  34640352...000100D8       52     850.0 MB   R H
-    211  00  YRQW85   0                        EOM
-  00001C02  0760FF80  34640339...000100D8       52     825.0 MB   R H
-    211  00  YRQW82   0                        EOM
-  00001C02  0760FF80  34640320...000100D8       52     800.0 MB   R H
-    211  00  YRQW80   0                        EOM
-  00001C02  0760FF80  34640307...000100D8       52     775.0 MB   R H
-    211  00  YRQW77   0                        EOM
-  00001C02  0760FF80  346402EE...000100D8       52     750.0 MB   R H
-    211  00  YRQW75   0                        EOM
-  00001C02  0760FF80  346402D5...000100D8       52     725.0 MB   R H
-    211  00  YRQW72   0                        EOM
-  00001C02  0760FF80  346402BC...000100D8       52     700.0 MB   R H
-    211  00  YRQW70   0                        EOM
-  00001C02  0760FF80  346402A3...000100D8       52     675.0 MB   R H
-    211  00  YRQW67   0                        EOM
-  00001C02  0760FF80  3464028A...000100D8       52     650.0 MB   R H
-    211  00  YRQW65   0                        EOM
-  00001C02  0760FF80  34640271...000100D8       52     625.0 MB   R H
-    211  00  YRQW62   0                        EOM
-  00001C02  0760FF80  34640258...000100D8       52     600.0 MB   R H
-    211  00  YRQW60   0                        EOM
-  00001C02  0760FF80  3464023F...000100D8       52     575.0 MB   R H
-    211  00  YRQW57   0                        EOM
-  00001C02  0760FF80  34640226...000100D8       52     550.0 MB   R H
-    211  00  YRQW55   0                        EOM
-  00001C02  0760FF80  3464020D...000100D8       52     525.0 MB   R H
-    211  00  YRQW52   0                        EOM
-  00001C02  0760FF80  346401F4...000100D8       52     500.0 MB   R H
-    211  00  YRQW50   0                        EOM
-  00001C02  0760FF80  346401C2...000100D8       52     450.0 MB   R H
-    211  00  YRQW45   0                        EOM
-  00001C02  0760FF80  34640190...000100D8       52     400.0 MB   R H
-    211  00  YRQW40   0                        EOM
-  00001C02  0760FF80  3464015E...000100D8       52     350.0 MB   R H
-    211  00  YRQW35   0                        EOM
-  00001C02  0760FF80  3464012C...000100D8       52     300.0 MB   R H
-    211  00  YRQW30   0                        EOM
-  00001C02  0760FF80  346400FA...000100D8       52     250.0 MB   R H
-    211  00  YRQW25   0                        EOM
-  00001C02  0760FF80  346400C8...000100D8       52     200.0 MB   R H
-    211  00  YRQW20   0                        EOM
-  00001C02  0760FF80  34640096...000100D8       52     150.0 MB   R H
-    211  00  YRQW15   0                        EOM
-  00001C02  0760FF80  34640064...000100D8       52     100.0 MB   R H
-    211  00  YRQW10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100D8       11    1000.0 MB   TMP
-    211  00  YTQW99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100D8       11     975.0 MB   TMP
-    211  00  YTQW93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100D8       11     950.0 MB   TMP
-    211  00  YTQW95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100D8       11     925.0 MB   TMP
-    211  00  YTQW92   0                        EOM
-  00001C02  0760FF80  0B640384...000100D8       11     900.0 MB   TMP
-    211  00  YTQW90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100D8       11     875.0 MB   TMP
-    211  00  YTQW91   0                        EOM
-  00001C02  0760FF80  0B640352...000100D8       11     850.0 MB   TMP
-    211  00  YTQW85   0                        EOM
-  00001C02  0760FF80  0B640339...000100D8       11     825.0 MB   TMP
-    211  00  YTQW82   0                        EOM
-  00001C02  0760FF80  0B640320...000100D8       11     800.0 MB   TMP
-    211  00  YTQW80   0                        EOM
-  00001C02  0760FF80  0B640307...000100D8       11     775.0 MB   TMP
-    211  00  YTQW77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100D8       11     750.0 MB   TMP
-    211  00  YTQW75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100D8       11     725.0 MB   TMP
-    211  00  YTQW72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100D8       11     700.0 MB   TMP
-    211  00  YTQW70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100D8       11     675.0 MB   TMP
-    211  00  YTQW67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100D8       11     650.0 MB   TMP
-    211  00  YTQW65   0                        EOM
-  00001C02  0760FF80  0B640271...000100D8       11     625.0 MB   TMP
-    211  00  YTQW62   0                        EOM
-  00001C02  0760FF80  0B640258...000100D8       11     600.0 MB   TMP
-    211  00  YTQW60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100D8       11     575.0 MB   TMP
-    211  00  YTQW57   0                        EOM
-  00001C02  0760FF80  0B640226...000100D8       11     550.0 MB   TMP
-    211  00  YTQW55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100D8       11     525.0 MB   TMP
-    211  00  YTQW52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100D8       11     500.0 MB   TMP
-    211  00  YTQW50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100D8       11     450.0 MB   TMP
-    211  00  YTQW45   0                        EOM
-  00001C02  0760FF80  0B640190...000100D8       11     400.0 MB   TMP
-    211  00  YTQW40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100D8       11     350.0 MB   TMP
-    211  00  YTQW35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100D8       11     300.0 MB   TMP
-    211  00  YTQW30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100D8       11     250.0 MB   TMP
-    211  00  YTQW25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100D8       11     200.0 MB   TMP
-    211  00  YTQW20   0                        EOM
-  00001C02  0760FF80  0B640096...000100D8       11     150.0 MB   TMP
-    211  00  YTQW15   0                        EOM
-  00001C02  0760FF80  0B640064...000100D8       11     100.0 MB   TMP
-    211  00  YTQW10   0                        EOM
-  00001C02  0760FF80  28640352...000100D8       40     850.0 MB  DZDT
-    211  00  YOQW85   0                        EOM
-  00001C02  0760FF80  286402BC...000100D8       40     700.0 MB  DZDT
-    211  00  YOQW70   0                        EOM
-  00001C02  0760FF80  286401F4...000100D8       40     500.0 MB  DZDT
-    211  00  YOQW50   0                        EOM
-  00001C02  0760FF80  01010000...000100D8       01          SFC  PRES
-    211  00  YPQW98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100D8       52        44/100  R H
-    211  00  YRQW00   0                        EOM
-  00001C02  0760FF80  296401F4...000100D8       41     500.0 MB ABS V
-    211  00  YCQW50   0                        EOM
-  00001C02  0760FF80  9D010000...000100D8      157          SFC   CAPE
-    211  00  YWQW98   0                        EOM
-  00001C02  0760FF80  9C010000...000100D8      156          SFC   CIN
-    211  00  YYQW98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100D8      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQW86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100D8      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQW86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100D8       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100D8       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100D8       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100D8       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  0B749678...000100D8       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100D8       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQW86   0                        EOM
-  00001C02  0760FF80  34741E00...000100D8       52   30 SPDY   0 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100D8       52   60 SPDY  30 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100D8       52   90 SPDY  60 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  3474785A...000100D8       52  120 SPDY  90 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  34749678...000100D8       52  150 SPDY 120 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  3474B496...000100D8       52  180 SPDY 150 SPDY  R H
-    211  00  YRQW86   0                        EOM
-  00001C02  0760FF80  21741E00...000100D8       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100D8       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100D8       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  2174785A...000100D8       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  21749678...000100D8       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  2174B496...000100D8       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQW86   0                        EOM
-  00001C02  0760FF80  22741E00...000100D8       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100D8       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100D8       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  2274785A...000100D8       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  22749678...000100D8       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  2274B496...000100D8       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQW86   0                        EOM
-  00001C02  0760FF80  0B690002...000100D8       11    2  HTGL     TMP
-    211  00  YTQW98   0                        EOM
-  00001C02  0760FF80  34690002...000100D8       52    2  HTGL     R H
-    211  00  YRQW98   0                        EOM
-  00001C02  0760FF80  2169000A...000100D8       33   10  HTGL     U GRD
-    211  00  YUQW98   0                        EOM
-  00001C02  0760FF80  2269000A...000100D8       34   10  HTGL     V GRD
-    211  00  YVQW98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs222.211 b/parm/wmo/grib_awpgfs222.211
deleted file mode 100755
index c0e25a858e..0000000000
--- a/parm/wmo/grib_awpgfs222.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100DE       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100DE       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100DE       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100DE       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100DE       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100DE       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100DE       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100DE       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100DE       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100DE       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100DE       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100DE       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100DE       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100DE       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100DE       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100DE       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100DE       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100DE       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100DE       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100DE       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100DE       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  076401C2...000100DE       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  07640190...000100DE       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  0764015E...000100DE       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100DE       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100DE       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100DE       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100DE       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100DE       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100DE       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100DE       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100DE       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100DE       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100DE       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100DE       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100DE       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100DE       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100DE       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100DE       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100DE       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100DE       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100DE       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100DE       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100DE       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100DE       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100DE       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100DE       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100DE       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100DE       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100DE       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100DE       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100DE       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100DE       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100DE       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100DE       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100DE       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100DE       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100DE       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100DE       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100DE       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100DE       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100DE       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100DE       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100DE       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100DE       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100DE       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100DE       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100DE       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100DE       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100DE       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100DE       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100DE       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100DE       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100DE       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100DE       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100DE       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100DE       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100DE       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100DE       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100DE       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100DE       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100DE       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100DE       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100DE       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100DE       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100DE       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100DE       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100DE       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100DE       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100DE       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100DE       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100DE       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100DE       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100DE       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100DE       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100DE       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100DE       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100DE       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100DE       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100DE       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100DE       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100DE       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100DE       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100DE       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100DE       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100DE       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100DE       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100DE       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100DE       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100DE       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100DE       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100DE       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100DE       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100DE       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100DE       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100DE       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100DE       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100DE       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100DE       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100DE       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100DE       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100DE       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100DE       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100DE       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100DE       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100DE       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100DE       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100DE       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100DE       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100DE       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100DE       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100DE       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100DE       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100DE       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100DE       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100DE       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100DE       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100DE       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100DE       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100DE       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100DE       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100DE       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100DE       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100DE       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100DE       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100DE       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100DE       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100DE       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100DE       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100DE       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100DE       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100DE       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100DE       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100DE      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100DE      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100DE      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100DE      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100DE       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100DE       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100DE       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100DE       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100DE       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100DE       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100DE       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100DE       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100DE       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100DE       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100DE       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100DE       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100DE       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100DE       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100DE       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100DE       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100DE       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100DE       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100DE       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100DE       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100DE       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100DE       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100DE       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100DE       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100DE       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100DE       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100DE       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100DE       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs228.211 b/parm/wmo/grib_awpgfs228.211
deleted file mode 100755
index e52c23ff4e..0000000000
--- a/parm/wmo/grib_awpgfs228.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100E4       07    1000.0 MB   HGT
-    211  00  YHQX99   0                        EOM
-  00001C02  0760FF80  076403CF...000100E4       07     975.0 MB   HGT
-    211  00  YHQX93   0                        EOM
-  00001C02  0760FF80  076403B6...000100E4       07     950.0 MB   HGT
-    211  00  YHQX95   0                        EOM
-  00001C02  0760FF80  0764039D...000100E4       07     925.0 MB   HGT
-    211  00  YHQX92   0                        EOM
-  00001C02  0760FF80  07640384...000100E4       07     900.0 MB   HGT
-    211  00  YHQX90   0                        EOM
-  00001C02  0760FF80  0764036B...000100E4       07     875.0 MB   HGT
-    211  00  YHQX91   0                        EOM
-  00001C02  0760FF80  07640352...000100E4       07     850.0 MB   HGT
-    211  00  YHQX85   0                        EOM
-  00001C02  0760FF80  07640339...000100E4       07     825.0 MB   HGT
-    211  00  YHQX82   0                        EOM
-  00001C02  0760FF80  07640320...000100E4       07     800.0 MB   HGT
-    211  00  YHQX80   0                        EOM
-  00001C02  0760FF80  07640307...000100E4       07     775.0 MB   HGT
-    211  00  YHQX77   0                        EOM
-  00001C02  0760FF80  076402EE...000100E4       07     750.0 MB   HGT
-    211  00  YHQX75   0                        EOM
-  00001C02  0760FF80  076402D5...000100E4       07     725.0 MB   HGT
-    211  00  YHQX72   0                        EOM
-  00001C02  0760FF80  076402BC...000100E4       07     700.0 MB   HGT
-    211  00  YHQX70   0                        EOM
-  00001C02  0760FF80  076402A3...000100E4       07     675.0 MB   HGT
-    211  00  YHQX67   0                        EOM
-  00001C02  0760FF80  0764028A...000100E4       07     650.0 MB   HGT
-    211  00  YHQX65   0                        EOM
-  00001C02  0760FF80  07640271...000100E4       07     625.0 MB   HGT
-    211  00  YHQX62   0                        EOM
-  00001C02  0760FF80  07640258...000100E4       07     600.0 MB   HGT
-    211  00  YHQX60   0                        EOM
-  00001C02  0760FF80  0764023F...000100E4       07     575.0 MB   HGT
-    211  00  YHQX57   0                        EOM
-  00001C02  0760FF80  07640226...000100E4       07     550.0 MB   HGT
-    211  00  YHQX55   0                        EOM
-  00001C02  0760FF80  0764020D...000100E4       07     525.0 MB   HGT
-    211  00  YHQX52   0                        EOM
-  00001C02  0760FF80  076401F4...000100E4       07     500.0 MB   HGT
-    211  00  YHQX50   0                        EOM
-  00001C02  0760FF80  076401C2...000100E4       07     450.0 MB   HGT
-    211  00  YHQX45   0                        EOM
-  00001C02  0760FF80  07640190...000100E4       07     400.0 MB   HGT
-    211  00  YHQX40   0                        EOM
-  00001C02  0760FF80  0764015E...000100E4       07     350.0 MB   HGT
-    211  00  YHQX35   0                        EOM
-  00001C02  0760FF80  0764012C...000100E4       07     300.0 MB   HGT
-    211  00  YHQX30   0                        EOM
-  00001C02  0760FF80  076400FA...000100E4       07     250.0 MB   HGT
-    211  00  YHQX25   0                        EOM
-  00001C02  0760FF80  076400C8...000100E4       07     200.0 MB   HGT
-    211  00  YHQX20   0                        EOM
-  00001C02  0760FF80  07640096...000100E4       07     150.0 MB   HGT
-    211  00  YHQX15   0                        EOM
-  00001C02  0760FF80  07640064...000100E4       07     100.0 MB   HGT
-    211  00  YHQX10   0                        EOM
-  00001C02  0760FF80  216403E8...000100E4       33    1000.0 MB   U GRD 
-    211  00  YUQX99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100E4       33     975.0 MB   U GRD
-    211  00  YUQX93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100E4       33     950.0 MB   U GRD
-    211  00  YUQX95   0                        EOM
-  00001C02  0760FF80  2164039D...000100E4       33     925.0 MB   U GRD
-    211  00  YUQX92   0                        EOM
-  00001C02  0760FF80  21640384...000100E4       33     900.0 MB   U GRD
-    211  00  YUQX90   0                        EOM
-  00001C02  0760FF80  2164036B...000100E4       33     875.0 MB   U GRD
-    211  00  YUQX91   0                        EOM
-  00001C02  0760FF80  21640352...000100E4       33     850.0 MB   U GRD
-    211  00  YUQX85   0                        EOM
-  00001C02  0760FF80  21640339...000100E4       33     825.0 MB   U GRD
-    211  00  YUQX82   0                        EOM
-  00001C02  0760FF80  21640320...000100E4       33     800.0 MB   U GRD
-    211  00  YUQX80   0                        EOM
-  00001C02  0760FF80  21640307...000100E4       33     775.0 MB   U GRD
-    211  00  YUQX77   0                        EOM
-  00001C02  0760FF80  216402EE...000100E4       33     750.0 MB   U GRD
-    211  00  YUQX75   0                        EOM
-  00001C02  0760FF80  216402D5...000100E4       33     725.0 MB   U GRD
-    211  00  YUQX72   0                        EOM
-  00001C02  0760FF80  216402BC...000100E4       33     700.0 MB   U GRD
-    211  00  YUQX70   0                        EOM
-  00001C02  0760FF80  216402A3...000100E4       33     675.0 MB   U GRD
-    211  00  YUQX67   0                        EOM
-  00001C02  0760FF80  2164028A...000100E4       33     650.0 MB   U GRD
-    211  00  YUQX65   0                        EOM
-  00001C02  0760FF80  21640271...000100E4       33     625.0 MB   U GRD
-    211  00  YUQX62   0                        EOM
-  00001C02  0760FF80  21640258...000100E4       33     600.0 MB   U GRD
-    211  00  YUQX60   0                        EOM
-  00001C02  0760FF80  2164023F...000100E4       33     575.0 MB   U GRD
-    211  00  YUQX57   0                        EOM
-  00001C02  0760FF80  21640226...000100E4       33     550.0 MB   U GRD
-    211  00  YUQX55   0                        EOM
-  00001C02  0760FF80  2164020D...000100E4       33     525.0 MB   U GRD
-    211  00  YUQX52   0                        EOM
-  00001C02  0760FF80  216401F4...000100E4       33     500.0 MB   U GRD
-    211  00  YUQX50   0                        EOM
-  00001C02  0760FF80  216401C2...000100E4       33     450.0 MB   U GRD
-    211  00  YUQX45   0                        EOM
-  00001C02  0760FF80  21640190...000100E4       33     400.0 MB   U GRD
-    211  00  YUQX40   0                        EOM
-  00001C02  0760FF80  2164015E...000100E4       33     350.0 MB   U GRD
-    211  00  YUQX35   0                        EOM
-  00001C02  0760FF80  2164012C...000100E4       33     300.0 MB   U GRD
-    211  00  YUQX30   0                        EOM
-  00001C02  0760FF80  216400FA...000100E4       33     250.0 MB   U GRD
-    211  00  YUQX25   0                        EOM
-  00001C02  0760FF80  216400C8...000100E4       33     200.0 MB   U GRD
-    211  00  YUQX20   0                        EOM
-  00001C02  0760FF80  21640096...000100E4       33     150.0 MB   U GRD
-    211  00  YUQX15   0                        EOM
-  00001C02  0760FF80  21640064...000100E4       33     100.0 MB   U GRD
-    211  00  YUQX10   0                        EOM
-  00001C02  0760FF80  226403E8...000100E4       34    1000.0 MB   V GRD
-    211  00  YVQX99   0                        EOM
-  00001C02  0760FF80  226403CF...000100E4       34     975.0 MB   V GRD
-    211  00  YVQX93   0                        EOM
-  00001C02  0760FF80  226403B6...000100E4       34     950.0 MB   V GRD
-    211  00  YVQX95   0                        EOM
-  00001C02  0760FF80  2264039D...000100E4       34     925.0 MB   V GRD
-    211  00  YVQX92   0                        EOM
-  00001C02  0760FF80  22640384...000100E4       34     900.0 MB   V GRD
-    211  00  YVQX90   0                        EOM
-  00001C02  0760FF80  2264036B...000100E4       34     875.0 MB   V GRD
-    211  00  YVQX91   0                        EOM
-  00001C02  0760FF80  22640352...000100E4       34     850.0 MB   V GRD
-    211  00  YVQX85   0                        EOM
-  00001C02  0760FF80  22640339...000100E4       34     825.0 MB   V GRD
-    211  00  YVQX82   0                        EOM
-  00001C02  0760FF80  22640320...000100E4       34     800.0 MB   V GRD
-    211  00  YVQX80   0                        EOM
-  00001C02  0760FF80  22640307...000100E4       34     775.0 MB   V GRD
-    211  00  YVQX77   0                        EOM
-  00001C02  0760FF80  226402EE...000100E4       34     750.0 MB   V GRD
-    211  00  YVQX75   0                        EOM
-  00001C02  0760FF80  226402D5...000100E4       34     725.0 MB   V GRD
-    211  00  YVQX72   0                        EOM
-  00001C02  0760FF80  226402BC...000100E4       34     700.0 MB   V GRD
-    211  00  YVQX70   0                        EOM
-  00001C02  0760FF80  226402A3...000100E4       34     675.0 MB   V GRD
-    211  00  YVQX67   0                        EOM
-  00001C02  0760FF80  2264028A...000100E4       34     650.0 MB   V GRD
-    211  00  YVQX65   0                        EOM
-  00001C02  0760FF80  22640271...000100E4       34     625.0 MB   V GRD
-    211  00  YVQX62   0                        EOM
-  00001C02  0760FF80  22640258...000100E4       34     600.0 MB   V GRD
-    211  00  YVQX60   0                        EOM
-  00001C02  0760FF80  2264023F...000100E4       34     575.0 MB   V GRD
-    211  00  YVQX57   0                        EOM
-  00001C02  0760FF80  22640226...000100E4       34     550.0 MB   V GRD
-    211  00  YVQX55   0                        EOM
-  00001C02  0760FF80  2264020D...000100E4       34     525.0 MB   V GRD
-    211  00  YVQX52   0                        EOM
-  00001C02  0760FF80  226401F4...000100E4       34     500.0 MB   V GRD
-    211  00  YVQX50   0                        EOM
-  00001C02  0760FF80  226401C2...000100E4       34     450.0 MB   V GRD
-    211  00  YVQX45   0                        EOM
-  00001C02  0760FF80  22640190...000100E4       34     400.0 MB   V GRD
-    211  00  YVQX40   0                        EOM
-  00001C02  0760FF80  2264015E...000100E4       34     350.0 MB   V GRD
-    211  00  YVQX35   0                        EOM
-  00001C02  0760FF80  2264012C...000100E4       34     300.0 MB   V GRD
-    211  00  YVQX30   0                        EOM
-  00001C02  0760FF80  226400FA...000100E4       34     250.0 MB   V GRD
-    211  00  YVQX25   0                        EOM
-  00001C02  0760FF80  226400C8...000100E4       34     200.0 MB   V GRD
-    211  00  YVQX20   0                        EOM
-  00001C02  0760FF80  22640096...000100E4       34     150.0 MB   V GRD
-    211  00  YVQX15   0                        EOM
-  00001C02  0760FF80  22640064...000100E4       34     100.0 MB   V GRD
-    211  00  YVQX10   0                        EOM
-  00001C02  0760FF80  02660000...000100E4       02           MSL  PRMSL
-    211  00  YPQX89   0                        EOM
-  00001C02  0760FF80  3D010000...000100E4       61           SFC  A PCP
-    211  00  YEQX98   0                        EOM
-  00001C02  0760FF80  346403E8...000100E4       52    1000.0 MB   R H
-    211  00  YRQX99   0                        EOM
-  00001C02  0760FF80  346403CF...000100E4       52     975.0 MB   R H
-    211  00  YRQX93   0                        EOM
-  00001C02  0760FF80  346403B6...000100E4       52     950.0 MB   R H
-    211  00  YRQX95   0                        EOM
-  00001C02  0760FF80  3464039D...000100E4       52     925.0 MB   R H
-    211  00  YRQX92   0                        EOM
-  00001C02  0760FF80  34640384...000100E4       52     900.0 MB   R H
-    211  00  YRQX90   0                        EOM
-  00001C02  0760FF80  3464036B...000100E4       52     875.0 MB   R H
-    211  00  YRQX91   0                        EOM
-  00001C02  0760FF80  34640352...000100E4       52     850.0 MB   R H
-    211  00  YRQX85   0                        EOM
-  00001C02  0760FF80  34640339...000100E4       52     825.0 MB   R H
-    211  00  YRQX82   0                        EOM
-  00001C02  0760FF80  34640320...000100E4       52     800.0 MB   R H
-    211  00  YRQX80   0                        EOM
-  00001C02  0760FF80  34640307...000100E4       52     775.0 MB   R H
-    211  00  YRQX77   0                        EOM
-  00001C02  0760FF80  346402EE...000100E4       52     750.0 MB   R H
-    211  00  YRQX75   0                        EOM
-  00001C02  0760FF80  346402D5...000100E4       52     725.0 MB   R H
-    211  00  YRQX72   0                        EOM
-  00001C02  0760FF80  346402BC...000100E4       52     700.0 MB   R H
-    211  00  YRQX70   0                        EOM
-  00001C02  0760FF80  346402A3...000100E4       52     675.0 MB   R H
-    211  00  YRQX67   0                        EOM
-  00001C02  0760FF80  3464028A...000100E4       52     650.0 MB   R H
-    211  00  YRQX65   0                        EOM
-  00001C02  0760FF80  34640271...000100E4       52     625.0 MB   R H
-    211  00  YRQX62   0                        EOM
-  00001C02  0760FF80  34640258...000100E4       52     600.0 MB   R H
-    211  00  YRQX60   0                        EOM
-  00001C02  0760FF80  3464023F...000100E4       52     575.0 MB   R H
-    211  00  YRQX57   0                        EOM
-  00001C02  0760FF80  34640226...000100E4       52     550.0 MB   R H
-    211  00  YRQX55   0                        EOM
-  00001C02  0760FF80  3464020D...000100E4       52     525.0 MB   R H
-    211  00  YRQX52   0                        EOM
-  00001C02  0760FF80  346401F4...000100E4       52     500.0 MB   R H
-    211  00  YRQX50   0                        EOM
-  00001C02  0760FF80  346401C2...000100E4       52     450.0 MB   R H
-    211  00  YRQX45   0                        EOM
-  00001C02  0760FF80  34640190...000100E4       52     400.0 MB   R H
-    211  00  YRQX40   0                        EOM
-  00001C02  0760FF80  3464015E...000100E4       52     350.0 MB   R H
-    211  00  YRQX35   0                        EOM
-  00001C02  0760FF80  3464012C...000100E4       52     300.0 MB   R H
-    211  00  YRQX30   0                        EOM
-  00001C02  0760FF80  346400FA...000100E4       52     250.0 MB   R H
-    211  00  YRQX25   0                        EOM
-  00001C02  0760FF80  346400C8...000100E4       52     200.0 MB   R H
-    211  00  YRQX20   0                        EOM
-  00001C02  0760FF80  34640096...000100E4       52     150.0 MB   R H
-    211  00  YRQX15   0                        EOM
-  00001C02  0760FF80  34640064...000100E4       52     100.0 MB   R H
-    211  00  YRQX10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100E4       11    1000.0 MB   TMP
-    211  00  YTQX99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100E4       11     975.0 MB   TMP
-    211  00  YTQX93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100E4       11     950.0 MB   TMP
-    211  00  YTQX95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100E4       11     925.0 MB   TMP
-    211  00  YTQX92   0                        EOM
-  00001C02  0760FF80  0B640384...000100E4       11     900.0 MB   TMP
-    211  00  YTQX90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100E4       11     875.0 MB   TMP
-    211  00  YTQX91   0                        EOM
-  00001C02  0760FF80  0B640352...000100E4       11     850.0 MB   TMP
-    211  00  YTQX85   0                        EOM
-  00001C02  0760FF80  0B640339...000100E4       11     825.0 MB   TMP
-    211  00  YTQX82   0                        EOM
-  00001C02  0760FF80  0B640320...000100E4       11     800.0 MB   TMP
-    211  00  YTQX80   0                        EOM
-  00001C02  0760FF80  0B640307...000100E4       11     775.0 MB   TMP
-    211  00  YTQX77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100E4       11     750.0 MB   TMP
-    211  00  YTQX75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100E4       11     725.0 MB   TMP
-    211  00  YTQX72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100E4       11     700.0 MB   TMP
-    211  00  YTQX70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100E4       11     675.0 MB   TMP
-    211  00  YTQX67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100E4       11     650.0 MB   TMP
-    211  00  YTQX65   0                        EOM
-  00001C02  0760FF80  0B640271...000100E4       11     625.0 MB   TMP
-    211  00  YTQX62   0                        EOM
-  00001C02  0760FF80  0B640258...000100E4       11     600.0 MB   TMP
-    211  00  YTQX60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100E4       11     575.0 MB   TMP
-    211  00  YTQX57   0                        EOM
-  00001C02  0760FF80  0B640226...000100E4       11     550.0 MB   TMP
-    211  00  YTQX55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100E4       11     525.0 MB   TMP
-    211  00  YTQX52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100E4       11     500.0 MB   TMP
-    211  00  YTQX50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100E4       11     450.0 MB   TMP
-    211  00  YTQX45   0                        EOM
-  00001C02  0760FF80  0B640190...000100E4       11     400.0 MB   TMP
-    211  00  YTQX40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100E4       11     350.0 MB   TMP
-    211  00  YTQX35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100E4       11     300.0 MB   TMP
-    211  00  YTQX30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100E4       11     250.0 MB   TMP
-    211  00  YTQX25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100E4       11     200.0 MB   TMP
-    211  00  YTQX20   0                        EOM
-  00001C02  0760FF80  0B640096...000100E4       11     150.0 MB   TMP
-    211  00  YTQX15   0                        EOM
-  00001C02  0760FF80  0B640064...000100E4       11     100.0 MB   TMP
-    211  00  YTQX10   0                        EOM
-  00001C02  0760FF80  28640352...000100E4       40     850.0 MB  DZDT
-    211  00  YOQX85   0                        EOM
-  00001C02  0760FF80  286402BC...000100E4       40     700.0 MB  DZDT
-    211  00  YOQX70   0                        EOM
-  00001C02  0760FF80  286401F4...000100E4       40     500.0 MB  DZDT
-    211  00  YOQX50   0                        EOM
-  00001C02  0760FF80  01010000...000100E4       01          SFC  PRES
-    211  00  YPQX98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100E4       52        44/100  R H
-    211  00  YRQX00   0                        EOM
-  00001C02  0760FF80  296401F4...000100E4       41     500.0 MB ABS V
-    211  00  YCQX50   0                        EOM
-  00001C02  0760FF80  9D010000...000100E4      157          SFC   CAPE
-    211  00  YWQX98   0                        EOM
-  00001C02  0760FF80  9C010000...000100E4      156          SFC   CIN
-    211  00  YYQX98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100E4      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQX86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100E4      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQX86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100E4       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100E4       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100E4       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100E4       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  0B749678...000100E4       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100E4       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQX86   0                        EOM
-  00001C02  0760FF80  34741E00...000100E4       52   30 SPDY   0 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100E4       52   60 SPDY  30 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100E4       52   90 SPDY  60 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  3474785A...000100E4       52  120 SPDY  90 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  34749678...000100E4       52  150 SPDY 120 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  3474B496...000100E4       52  180 SPDY 150 SPDY  R H
-    211  00  YRQX86   0                        EOM
-  00001C02  0760FF80  21741E00...000100E4       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100E4       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100E4       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  2174785A...000100E4       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  21749678...000100E4       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  2174B496...000100E4       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQX86   0                        EOM
-  00001C02  0760FF80  22741E00...000100E4       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100E4       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100E4       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  2274785A...000100E4       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  22749678...000100E4       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  2274B496...000100E4       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQX86   0                        EOM
-  00001C02  0760FF80  0B690002...000100E4       11    2  HTGL     TMP
-    211  00  YTQX98   0                        EOM
-  00001C02  0760FF80  34690002...000100E4       52    2  HTGL     R H
-    211  00  YRQX98   0                        EOM
-  00001C02  0760FF80  2169000A...000100E4       33   10  HTGL     U GRD
-    211  00  YUQX98   0                        EOM
-  00001C02  0760FF80  2269000A...000100E4       34   10  HTGL     V GRD
-    211  00  YVQX98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs234.211 b/parm/wmo/grib_awpgfs234.211
deleted file mode 100755
index 986112cc2f..0000000000
--- a/parm/wmo/grib_awpgfs234.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100EA       07    1000.0 MB   HGT
-    211  00  ZHQZ99   0                        EOM
-  00001C02  0760FF80  076403CF...000100EA       07     975.0 MB   HGT
-    211  00  ZHQZ93   0                        EOM
-  00001C02  0760FF80  076403B6...000100EA       07     950.0 MB   HGT
-    211  00  ZHQZ95   0                        EOM
-  00001C02  0760FF80  0764039D...000100EA       07     925.0 MB   HGT
-    211  00  ZHQZ92   0                        EOM
-  00001C02  0760FF80  07640384...000100EA       07     900.0 MB   HGT
-    211  00  ZHQZ90   0                        EOM
-  00001C02  0760FF80  0764036B...000100EA       07     875.0 MB   HGT
-    211  00  ZHQZ91   0                        EOM
-  00001C02  0760FF80  07640352...000100EA       07     850.0 MB   HGT
-    211  00  ZHQZ85   0                        EOM
-  00001C02  0760FF80  07640339...000100EA       07     825.0 MB   HGT
-    211  00  ZHQZ82   0                        EOM
-  00001C02  0760FF80  07640320...000100EA       07     800.0 MB   HGT
-    211  00  ZHQZ80   0                        EOM
-  00001C02  0760FF80  07640307...000100EA       07     775.0 MB   HGT
-    211  00  ZHQZ77   0                        EOM
-  00001C02  0760FF80  076402EE...000100EA       07     750.0 MB   HGT
-    211  00  ZHQZ75   0                        EOM
-  00001C02  0760FF80  076402D5...000100EA       07     725.0 MB   HGT
-    211  00  ZHQZ72   0                        EOM
-  00001C02  0760FF80  076402BC...000100EA       07     700.0 MB   HGT
-    211  00  ZHQZ70   0                        EOM
-  00001C02  0760FF80  076402A3...000100EA       07     675.0 MB   HGT
-    211  00  ZHQZ67   0                        EOM
-  00001C02  0760FF80  0764028A...000100EA       07     650.0 MB   HGT
-    211  00  ZHQZ65   0                        EOM
-  00001C02  0760FF80  07640271...000100EA       07     625.0 MB   HGT
-    211  00  ZHQZ62   0                        EOM
-  00001C02  0760FF80  07640258...000100EA       07     600.0 MB   HGT
-    211  00  ZHQZ60   0                        EOM
-  00001C02  0760FF80  0764023F...000100EA       07     575.0 MB   HGT
-    211  00  ZHQZ57   0                        EOM
-  00001C02  0760FF80  07640226...000100EA       07     550.0 MB   HGT
-    211  00  ZHQZ55   0                        EOM
-  00001C02  0760FF80  0764020D...000100EA       07     525.0 MB   HGT
-    211  00  ZHQZ52   0                        EOM
-  00001C02  0760FF80  076401F4...000100EA       07     500.0 MB   HGT
-    211  00  ZHQZ50   0                        EOM
-  00001C02  0760FF80  076401C2...000100EA       07     450.0 MB   HGT
-    211  00  ZHQZ45   0                        EOM
-  00001C02  0760FF80  07640190...000100EA       07     400.0 MB   HGT
-    211  00  ZHQZ40   0                        EOM
-  00001C02  0760FF80  0764015E...000100EA       07     350.0 MB   HGT
-    211  00  ZHQZ35   0                        EOM
-  00001C02  0760FF80  0764012C...000100EA       07     300.0 MB   HGT
-    211  00  ZHQZ30   0                        EOM
-  00001C02  0760FF80  076400FA...000100EA       07     250.0 MB   HGT
-    211  00  ZHQZ25   0                        EOM
-  00001C02  0760FF80  076400C8...000100EA       07     200.0 MB   HGT
-    211  00  ZHQZ20   0                        EOM
-  00001C02  0760FF80  07640096...000100EA       07     150.0 MB   HGT
-    211  00  ZHQZ15   0                        EOM
-  00001C02  0760FF80  07640064...000100EA       07     100.0 MB   HGT
-    211  00  ZHQZ10   0                        EOM
-  00001C02  0760FF80  216403E8...000100EA       33    1000.0 MB   U GRD 
-    211  00  ZUQZ99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100EA       33     975.0 MB   U GRD
-    211  00  ZUQZ93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100EA       33     950.0 MB   U GRD
-    211  00  ZUQZ95   0                        EOM
-  00001C02  0760FF80  2164039D...000100EA       33     925.0 MB   U GRD
-    211  00  ZUQZ92   0                        EOM
-  00001C02  0760FF80  21640384...000100EA       33     900.0 MB   U GRD
-    211  00  ZUQZ90   0                        EOM
-  00001C02  0760FF80  2164036B...000100EA       33     875.0 MB   U GRD
-    211  00  ZUQZ91   0                        EOM
-  00001C02  0760FF80  21640352...000100EA       33     850.0 MB   U GRD
-    211  00  ZUQZ85   0                        EOM
-  00001C02  0760FF80  21640339...000100EA       33     825.0 MB   U GRD
-    211  00  ZUQZ82   0                        EOM
-  00001C02  0760FF80  21640320...000100EA       33     800.0 MB   U GRD
-    211  00  ZUQZ80   0                        EOM
-  00001C02  0760FF80  21640307...000100EA       33     775.0 MB   U GRD
-    211  00  ZUQZ77   0                        EOM
-  00001C02  0760FF80  216402EE...000100EA       33     750.0 MB   U GRD
-    211  00  ZUQZ75   0                        EOM
-  00001C02  0760FF80  216402D5...000100EA       33     725.0 MB   U GRD
-    211  00  ZUQZ72   0                        EOM
-  00001C02  0760FF80  216402BC...000100EA       33     700.0 MB   U GRD
-    211  00  ZUQZ70   0                        EOM
-  00001C02  0760FF80  216402A3...000100EA       33     675.0 MB   U GRD
-    211  00  ZUQZ67   0                        EOM
-  00001C02  0760FF80  2164028A...000100EA       33     650.0 MB   U GRD
-    211  00  ZUQZ65   0                        EOM
-  00001C02  0760FF80  21640271...000100EA       33     625.0 MB   U GRD
-    211  00  ZUQZ62   0                        EOM
-  00001C02  0760FF80  21640258...000100EA       33     600.0 MB   U GRD
-    211  00  ZUQZ60   0                        EOM
-  00001C02  0760FF80  2164023F...000100EA       33     575.0 MB   U GRD
-    211  00  ZUQZ57   0                        EOM
-  00001C02  0760FF80  21640226...000100EA       33     550.0 MB   U GRD
-    211  00  ZUQZ55   0                        EOM
-  00001C02  0760FF80  2164020D...000100EA       33     525.0 MB   U GRD
-    211  00  ZUQZ52   0                        EOM
-  00001C02  0760FF80  216401F4...000100EA       33     500.0 MB   U GRD
-    211  00  ZUQZ50   0                        EOM
-  00001C02  0760FF80  216401C2...000100EA       33     450.0 MB   U GRD
-    211  00  ZUQZ45   0                        EOM
-  00001C02  0760FF80  21640190...000100EA       33     400.0 MB   U GRD
-    211  00  ZUQZ40   0                        EOM
-  00001C02  0760FF80  2164015E...000100EA       33     350.0 MB   U GRD
-    211  00  ZUQZ35   0                        EOM
-  00001C02  0760FF80  2164012C...000100EA       33     300.0 MB   U GRD
-    211  00  ZUQZ30   0                        EOM
-  00001C02  0760FF80  216400FA...000100EA       33     250.0 MB   U GRD
-    211  00  ZUQZ25   0                        EOM
-  00001C02  0760FF80  216400C8...000100EA       33     200.0 MB   U GRD
-    211  00  ZUQZ20   0                        EOM
-  00001C02  0760FF80  21640096...000100EA       33     150.0 MB   U GRD
-    211  00  ZUQZ15   0                        EOM
-  00001C02  0760FF80  21640064...000100EA       33     100.0 MB   U GRD
-    211  00  ZUQZ10   0                        EOM
-  00001C02  0760FF80  226403E8...000100EA       34    1000.0 MB   V GRD
-    211  00  ZVQZ99   0                        EOM
-  00001C02  0760FF80  226403CF...000100EA       34     975.0 MB   V GRD
-    211  00  ZVQZ93   0                        EOM
-  00001C02  0760FF80  226403B6...000100EA       34     950.0 MB   V GRD
-    211  00  ZVQZ95   0                        EOM
-  00001C02  0760FF80  2264039D...000100EA       34     925.0 MB   V GRD
-    211  00  ZVQZ92   0                        EOM
-  00001C02  0760FF80  22640384...000100EA       34     900.0 MB   V GRD
-    211  00  ZVQZ90   0                        EOM
-  00001C02  0760FF80  2264036B...000100EA       34     875.0 MB   V GRD
-    211  00  ZVQZ91   0                        EOM
-  00001C02  0760FF80  22640352...000100EA       34     850.0 MB   V GRD
-    211  00  ZVQZ85   0                        EOM
-  00001C02  0760FF80  22640339...000100EA       34     825.0 MB   V GRD
-    211  00  ZVQZ82   0                        EOM
-  00001C02  0760FF80  22640320...000100EA       34     800.0 MB   V GRD
-    211  00  ZVQZ80   0                        EOM
-  00001C02  0760FF80  22640307...000100EA       34     775.0 MB   V GRD
-    211  00  ZVQZ77   0                        EOM
-  00001C02  0760FF80  226402EE...000100EA       34     750.0 MB   V GRD
-    211  00  ZVQZ75   0                        EOM
-  00001C02  0760FF80  226402D5...000100EA       34     725.0 MB   V GRD
-    211  00  ZVQZ72   0                        EOM
-  00001C02  0760FF80  226402BC...000100EA       34     700.0 MB   V GRD
-    211  00  ZVQZ70   0                        EOM
-  00001C02  0760FF80  226402A3...000100EA       34     675.0 MB   V GRD
-    211  00  ZVQZ67   0                        EOM
-  00001C02  0760FF80  2264028A...000100EA       34     650.0 MB   V GRD
-    211  00  ZVQZ65   0                        EOM
-  00001C02  0760FF80  22640271...000100EA       34     625.0 MB   V GRD
-    211  00  ZVQZ62   0                        EOM
-  00001C02  0760FF80  22640258...000100EA       34     600.0 MB   V GRD
-    211  00  ZVQZ60   0                        EOM
-  00001C02  0760FF80  2264023F...000100EA       34     575.0 MB   V GRD
-    211  00  ZVQZ57   0                        EOM
-  00001C02  0760FF80  22640226...000100EA       34     550.0 MB   V GRD
-    211  00  ZVQZ55   0                        EOM
-  00001C02  0760FF80  2264020D...000100EA       34     525.0 MB   V GRD
-    211  00  ZVQZ52   0                        EOM
-  00001C02  0760FF80  226401F4...000100EA       34     500.0 MB   V GRD
-    211  00  ZVQZ50   0                        EOM
-  00001C02  0760FF80  226401C2...000100EA       34     450.0 MB   V GRD
-    211  00  ZVQZ45   0                        EOM
-  00001C02  0760FF80  22640190...000100EA       34     400.0 MB   V GRD
-    211  00  ZVQZ40   0                        EOM
-  00001C02  0760FF80  2264015E...000100EA       34     350.0 MB   V GRD
-    211  00  ZVQZ35   0                        EOM
-  00001C02  0760FF80  2264012C...000100EA       34     300.0 MB   V GRD
-    211  00  ZVQZ30   0                        EOM
-  00001C02  0760FF80  226400FA...000100EA       34     250.0 MB   V GRD
-    211  00  ZVQZ25   0                        EOM
-  00001C02  0760FF80  226400C8...000100EA       34     200.0 MB   V GRD
-    211  00  ZVQZ20   0                        EOM
-  00001C02  0760FF80  22640096...000100EA       34     150.0 MB   V GRD
-    211  00  ZVQZ15   0                        EOM
-  00001C02  0760FF80  22640064...000100EA       34     100.0 MB   V GRD
-    211  00  ZVQZ10   0                        EOM
-  00001C02  0760FF80  02660000...000100EA       02           MSL  PRMSL
-    211  00  ZPQZ89   0                        EOM
-  00001C02  0760FF80  3D010000...000100EA       61           SFC  A PCP
-    211  00  ZEQZ98   0                        EOM
-  00001C02  0760FF80  346403E8...000100EA       52    1000.0 MB   R H
-    211  00  ZRQZ99   0                        EOM
-  00001C02  0760FF80  346403CF...000100EA       52     975.0 MB   R H
-    211  00  ZRQZ93   0                        EOM
-  00001C02  0760FF80  346403B6...000100EA       52     950.0 MB   R H
-    211  00  ZRQZ95   0                        EOM
-  00001C02  0760FF80  3464039D...000100EA       52     925.0 MB   R H
-    211  00  ZRQZ92   0                        EOM
-  00001C02  0760FF80  34640384...000100EA       52     900.0 MB   R H
-    211  00  ZRQZ90   0                        EOM
-  00001C02  0760FF80  3464036B...000100EA       52     875.0 MB   R H
-    211  00  ZRQZ91   0                        EOM
-  00001C02  0760FF80  34640352...000100EA       52     850.0 MB   R H
-    211  00  ZRQZ85   0                        EOM
-  00001C02  0760FF80  34640339...000100EA       52     825.0 MB   R H
-    211  00  ZRQZ82   0                        EOM
-  00001C02  0760FF80  34640320...000100EA       52     800.0 MB   R H
-    211  00  ZRQZ80   0                        EOM
-  00001C02  0760FF80  34640307...000100EA       52     775.0 MB   R H
-    211  00  ZRQZ77   0                        EOM
-  00001C02  0760FF80  346402EE...000100EA       52     750.0 MB   R H
-    211  00  ZRQZ75   0                        EOM
-  00001C02  0760FF80  346402D5...000100EA       52     725.0 MB   R H
-    211  00  ZRQZ72   0                        EOM
-  00001C02  0760FF80  346402BC...000100EA       52     700.0 MB   R H
-    211  00  ZRQZ70   0                        EOM
-  00001C02  0760FF80  346402A3...000100EA       52     675.0 MB   R H
-    211  00  ZRQZ67   0                        EOM
-  00001C02  0760FF80  3464028A...000100EA       52     650.0 MB   R H
-    211  00  ZRQZ65   0                        EOM
-  00001C02  0760FF80  34640271...000100EA       52     625.0 MB   R H
-    211  00  ZRQZ62   0                        EOM
-  00001C02  0760FF80  34640258...000100EA       52     600.0 MB   R H
-    211  00  ZRQZ60   0                        EOM
-  00001C02  0760FF80  3464023F...000100EA       52     575.0 MB   R H
-    211  00  ZRQZ57   0                        EOM
-  00001C02  0760FF80  34640226...000100EA       52     550.0 MB   R H
-    211  00  ZRQZ55   0                        EOM
-  00001C02  0760FF80  3464020D...000100EA       52     525.0 MB   R H
-    211  00  ZRQZ52   0                        EOM
-  00001C02  0760FF80  346401F4...000100EA       52     500.0 MB   R H
-    211  00  ZRQZ50   0                        EOM
-  00001C02  0760FF80  346401C2...000100EA       52     450.0 MB   R H
-    211  00  ZRQZ45   0                        EOM
-  00001C02  0760FF80  34640190...000100EA       52     400.0 MB   R H
-    211  00  ZRQZ40   0                        EOM
-  00001C02  0760FF80  3464015E...000100EA       52     350.0 MB   R H
-    211  00  ZRQZ35   0                        EOM
-  00001C02  0760FF80  3464012C...000100EA       52     300.0 MB   R H
-    211  00  ZRQZ30   0                        EOM
-  00001C02  0760FF80  346400FA...000100EA       52     250.0 MB   R H
-    211  00  ZRQZ25   0                        EOM
-  00001C02  0760FF80  346400C8...000100EA       52     200.0 MB   R H
-    211  00  ZRQZ20   0                        EOM
-  00001C02  0760FF80  34640096...000100EA       52     150.0 MB   R H
-    211  00  ZRQZ15   0                        EOM
-  00001C02  0760FF80  34640064...000100EA       52     100.0 MB   R H
-    211  00  ZRQZ10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100EA       11    1000.0 MB   TMP
-    211  00  ZTQZ99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100EA       11     975.0 MB   TMP
-    211  00  ZTQZ93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100EA       11     950.0 MB   TMP
-    211  00  ZTQZ95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100EA       11     925.0 MB   TMP
-    211  00  ZTQZ92   0                        EOM
-  00001C02  0760FF80  0B640384...000100EA       11     900.0 MB   TMP
-    211  00  ZTQZ90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100EA       11     875.0 MB   TMP
-    211  00  ZTQZ91   0                        EOM
-  00001C02  0760FF80  0B640352...000100EA       11     850.0 MB   TMP
-    211  00  ZTQZ85   0                        EOM
-  00001C02  0760FF80  0B640339...000100EA       11     825.0 MB   TMP
-    211  00  ZTQZ82   0                        EOM
-  00001C02  0760FF80  0B640320...000100EA       11     800.0 MB   TMP
-    211  00  ZTQZ80   0                        EOM
-  00001C02  0760FF80  0B640307...000100EA       11     775.0 MB   TMP
-    211  00  ZTQZ77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100EA       11     750.0 MB   TMP
-    211  00  ZTQZ75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100EA       11     725.0 MB   TMP
-    211  00  ZTQZ72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100EA       11     700.0 MB   TMP
-    211  00  ZTQZ70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100EA       11     675.0 MB   TMP
-    211  00  ZTQZ67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100EA       11     650.0 MB   TMP
-    211  00  ZTQZ65   0                        EOM
-  00001C02  0760FF80  0B640271...000100EA       11     625.0 MB   TMP
-    211  00  ZTQZ62   0                        EOM
-  00001C02  0760FF80  0B640258...000100EA       11     600.0 MB   TMP
-    211  00  ZTQZ60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100EA       11     575.0 MB   TMP
-    211  00  ZTQZ57   0                        EOM
-  00001C02  0760FF80  0B640226...000100EA       11     550.0 MB   TMP
-    211  00  ZTQZ55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100EA       11     525.0 MB   TMP
-    211  00  ZTQZ52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100EA       11     500.0 MB   TMP
-    211  00  ZTQZ50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100EA       11     450.0 MB   TMP
-    211  00  ZTQZ45   0                        EOM
-  00001C02  0760FF80  0B640190...000100EA       11     400.0 MB   TMP
-    211  00  ZTQZ40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100EA       11     350.0 MB   TMP
-    211  00  ZTQZ35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100EA       11     300.0 MB   TMP
-    211  00  ZTQZ30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100EA       11     250.0 MB   TMP
-    211  00  ZTQZ25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100EA       11     200.0 MB   TMP
-    211  00  ZTQZ20   0                        EOM
-  00001C02  0760FF80  0B640096...000100EA       11     150.0 MB   TMP
-    211  00  ZTQZ15   0                        EOM
-  00001C02  0760FF80  0B640064...000100EA       11     100.0 MB   TMP
-    211  00  ZTQZ10   0                        EOM
-  00001C02  0760FF80  28640352...000100EA       40     850.0 MB  DZDT
-    211  00  ZOQZ85   0                        EOM
-  00001C02  0760FF80  286402BC...000100EA       40     700.0 MB  DZDT
-    211  00  ZOQZ70   0                        EOM
-  00001C02  0760FF80  286401F4...000100EA       40     500.0 MB  DZDT
-    211  00  ZOQZ50   0                        EOM
-  00001C02  0760FF80  01010000...000100EA       01          SFC  PRES
-    211  00  ZPQZ98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100EA       52        44/100  R H
-    211  00  ZRQZ00   0                        EOM
-  00001C02  0760FF80  296401F4...000100EA       41     500.0 MB ABS V
-    211  00  ZCQZ50   0                        EOM
-  00001C02  0760FF80  9D010000...000100EA      157          SFC   CAPE
-    211  00  ZWQZ98   0                        EOM
-  00001C02  0760FF80  9C010000...000100EA      156          SFC   CIN
-    211  00  ZYQZ98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100EA      157  180 SPDY 0 SPDY  CAPE
-    211  00  ZWQZ86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100EA      156  180 SPDY 0 SPDY  CIN
-    211  00  ZYQZ86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100EA       11   30 SPDY   0 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100EA       11   60 SPDY  30 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100EA       11   90 SPDY  60 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100EA       11  120 SPDY  90 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B749678...000100EA       11  150 SPDY 120 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100EA       11  180 SPDY 150 SPDY  TMP
-    211  00  ZTQZ86   0                        EOM
-  00001C02  0760FF80  34741E00...000100EA       52   30 SPDY   0 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100EA       52   60 SPDY  30 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100EA       52   90 SPDY  60 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474785A...000100EA       52  120 SPDY  90 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  34749678...000100EA       52  150 SPDY 120 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  3474B496...000100EA       52  180 SPDY 150 SPDY  R H
-    211  00  ZRQZ86   0                        EOM
-  00001C02  0760FF80  21741E00...000100EA       33   30 SPDY   0 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100EA       33   60 SPDY  30 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100EA       33   90 SPDY  60 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174785A...000100EA       33  120 SPDY  90 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  21749678...000100EA       33  150 SPDY 120 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  2174B496...000100EA       33  180 SPDY 150 SPDY  U GRD
-    211  00  ZUQZ86   0                        EOM
-  00001C02  0760FF80  22741E00...000100EA       34   30 SPDY   0 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100EA       34   60 SPDY  30 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100EA       34   90 SPDY  60 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274785A...000100EA       34  120 SPDY  90 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  22749678...000100EA       34  150 SPDY 120 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  2274B496...000100EA       34  180 SPDY 150 SPDY  V GRD
-    211  00  ZVQZ86   0                        EOM
-  00001C02  0760FF80  0B690002...000100EA       11    2  HTGL     TMP
-    211  00  ZTQZ98   0                        EOM
-  00001C02  0760FF80  34690002...000100EA       52    2  HTGL     R H
-    211  00  ZRQZ98   0                        EOM
-  00001C02  0760FF80  2169000A...000100EA       33   10  HTGL     U GRD
-    211  00  ZUQZ98   0                        EOM
-  00001C02  0760FF80  2269000A...000100EA       34   10  HTGL     V GRD
-    211  00  ZVQZ98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/parm/wmo/grib_awpgfs240.211 b/parm/wmo/grib_awpgfs240.211
deleted file mode 100755
index 60171803e0..0000000000
--- a/parm/wmo/grib_awpgfs240.211
+++ /dev/null
@@ -1,371 +0,0 @@
-  00001C02  0760FF80  076403E8...000100F0       07    1000.0 MB   HGT
-    211  00  YHQY99   0                        EOM
-  00001C02  0760FF80  076403CF...000100F0       07     975.0 MB   HGT
-    211  00  YHQY93   0                        EOM
-  00001C02  0760FF80  076403B6...000100F0       07     950.0 MB   HGT
-    211  00  YHQY95   0                        EOM
-  00001C02  0760FF80  0764039D...000100F0       07     925.0 MB   HGT
-    211  00  YHQY92   0                        EOM
-  00001C02  0760FF80  07640384...000100F0       07     900.0 MB   HGT
-    211  00  YHQY90   0                        EOM
-  00001C02  0760FF80  0764036B...000100F0       07     875.0 MB   HGT
-    211  00  YHQY91   0                        EOM
-  00001C02  0760FF80  07640352...000100F0       07     850.0 MB   HGT
-    211  00  YHQY85   0                        EOM
-  00001C02  0760FF80  07640339...000100F0       07     825.0 MB   HGT
-    211  00  YHQY82   0                        EOM
-  00001C02  0760FF80  07640320...000100F0       07     800.0 MB   HGT
-    211  00  YHQY80   0                        EOM
-  00001C02  0760FF80  07640307...000100F0       07     775.0 MB   HGT
-    211  00  YHQY77   0                        EOM
-  00001C02  0760FF80  076402EE...000100F0       07     750.0 MB   HGT
-    211  00  YHQY75   0                        EOM
-  00001C02  0760FF80  076402D5...000100F0       07     725.0 MB   HGT
-    211  00  YHQY72   0                        EOM
-  00001C02  0760FF80  076402BC...000100F0       07     700.0 MB   HGT
-    211  00  YHQY70   0                        EOM
-  00001C02  0760FF80  076402A3...000100F0       07     675.0 MB   HGT
-    211  00  YHQY67   0                        EOM
-  00001C02  0760FF80  0764028A...000100F0       07     650.0 MB   HGT
-    211  00  YHQY65   0                        EOM
-  00001C02  0760FF80  07640271...000100F0       07     625.0 MB   HGT
-    211  00  YHQY62   0                        EOM
-  00001C02  0760FF80  07640258...000100F0       07     600.0 MB   HGT
-    211  00  YHQY60   0                        EOM
-  00001C02  0760FF80  0764023F...000100F0       07     575.0 MB   HGT
-    211  00  YHQY57   0                        EOM
-  00001C02  0760FF80  07640226...000100F0       07     550.0 MB   HGT
-    211  00  YHQY55   0                        EOM
-  00001C02  0760FF80  0764020D...000100F0       07     525.0 MB   HGT
-    211  00  YHQY52   0                        EOM
-  00001C02  0760FF80  076401F4...000100F0       07     500.0 MB   HGT
-    211  00  YHQY50   0                        EOM
-  00001C02  0760FF80  076401C2...000100F0       07     450.0 MB   HGT
-    211  00  YHQY45   0                        EOM
-  00001C02  0760FF80  07640190...000100F0       07     400.0 MB   HGT
-    211  00  YHQY40   0                        EOM
-  00001C02  0760FF80  0764015E...000100F0       07     350.0 MB   HGT
-    211  00  YHQY35   0                        EOM
-  00001C02  0760FF80  0764012C...000100F0       07     300.0 MB   HGT
-    211  00  YHQY30   0                        EOM
-  00001C02  0760FF80  076400FA...000100F0       07     250.0 MB   HGT
-    211  00  YHQY25   0                        EOM
-  00001C02  0760FF80  076400C8...000100F0       07     200.0 MB   HGT
-    211  00  YHQY20   0                        EOM
-  00001C02  0760FF80  07640096...000100F0       07     150.0 MB   HGT
-    211  00  YHQY15   0                        EOM
-  00001C02  0760FF80  07640064...000100F0       07     100.0 MB   HGT
-    211  00  YHQY10   0                        EOM
-  00001C02  0760FF80  216403E8...000100F0       33    1000.0 MB   U GRD 
-    211  00  YUQY99   0                        EOM 
-  00001C02  0760FF80  216403CF...000100F0       33     975.0 MB   U GRD
-    211  00  YUQY93   0                        EOM 
-  00001C02  0760FF80  216403B6...000100F0       33     950.0 MB   U GRD
-    211  00  YUQY95   0                        EOM
-  00001C02  0760FF80  2164039D...000100F0       33     925.0 MB   U GRD
-    211  00  YUQY92   0                        EOM
-  00001C02  0760FF80  21640384...000100F0       33     900.0 MB   U GRD
-    211  00  YUQY90   0                        EOM
-  00001C02  0760FF80  2164036B...000100F0       33     875.0 MB   U GRD
-    211  00  YUQY91   0                        EOM
-  00001C02  0760FF80  21640352...000100F0       33     850.0 MB   U GRD
-    211  00  YUQY85   0                        EOM
-  00001C02  0760FF80  21640339...000100F0       33     825.0 MB   U GRD
-    211  00  YUQY82   0                        EOM
-  00001C02  0760FF80  21640320...000100F0       33     800.0 MB   U GRD
-    211  00  YUQY80   0                        EOM
-  00001C02  0760FF80  21640307...000100F0       33     775.0 MB   U GRD
-    211  00  YUQY77   0                        EOM
-  00001C02  0760FF80  216402EE...000100F0       33     750.0 MB   U GRD
-    211  00  YUQY75   0                        EOM
-  00001C02  0760FF80  216402D5...000100F0       33     725.0 MB   U GRD
-    211  00  YUQY72   0                        EOM
-  00001C02  0760FF80  216402BC...000100F0       33     700.0 MB   U GRD
-    211  00  YUQY70   0                        EOM
-  00001C02  0760FF80  216402A3...000100F0       33     675.0 MB   U GRD
-    211  00  YUQY67   0                        EOM
-  00001C02  0760FF80  2164028A...000100F0       33     650.0 MB   U GRD
-    211  00  YUQY65   0                        EOM
-  00001C02  0760FF80  21640271...000100F0       33     625.0 MB   U GRD
-    211  00  YUQY62   0                        EOM
-  00001C02  0760FF80  21640258...000100F0       33     600.0 MB   U GRD
-    211  00  YUQY60   0                        EOM
-  00001C02  0760FF80  2164023F...000100F0       33     575.0 MB   U GRD
-    211  00  YUQY57   0                        EOM
-  00001C02  0760FF80  21640226...000100F0       33     550.0 MB   U GRD
-    211  00  YUQY55   0                        EOM
-  00001C02  0760FF80  2164020D...000100F0       33     525.0 MB   U GRD
-    211  00  YUQY52   0                        EOM
-  00001C02  0760FF80  216401F4...000100F0       33     500.0 MB   U GRD
-    211  00  YUQY50   0                        EOM
-  00001C02  0760FF80  216401C2...000100F0       33     450.0 MB   U GRD
-    211  00  YUQY45   0                        EOM
-  00001C02  0760FF80  21640190...000100F0       33     400.0 MB   U GRD
-    211  00  YUQY40   0                        EOM
-  00001C02  0760FF80  2164015E...000100F0       33     350.0 MB   U GRD
-    211  00  YUQY35   0                        EOM
-  00001C02  0760FF80  2164012C...000100F0       33     300.0 MB   U GRD
-    211  00  YUQY30   0                        EOM
-  00001C02  0760FF80  216400FA...000100F0       33     250.0 MB   U GRD
-    211  00  YUQY25   0                        EOM
-  00001C02  0760FF80  216400C8...000100F0       33     200.0 MB   U GRD
-    211  00  YUQY20   0                        EOM
-  00001C02  0760FF80  21640096...000100F0       33     150.0 MB   U GRD
-    211  00  YUQY15   0                        EOM
-  00001C02  0760FF80  21640064...000100F0       33     100.0 MB   U GRD
-    211  00  YUQY10   0                        EOM
-  00001C02  0760FF80  226403E8...000100F0       34    1000.0 MB   V GRD
-    211  00  YVQY99   0                        EOM
-  00001C02  0760FF80  226403CF...000100F0       34     975.0 MB   V GRD
-    211  00  YVQY93   0                        EOM
-  00001C02  0760FF80  226403B6...000100F0       34     950.0 MB   V GRD
-    211  00  YVQY95   0                        EOM
-  00001C02  0760FF80  2264039D...000100F0       34     925.0 MB   V GRD
-    211  00  YVQY92   0                        EOM
-  00001C02  0760FF80  22640384...000100F0       34     900.0 MB   V GRD
-    211  00  YVQY90   0                        EOM
-  00001C02  0760FF80  2264036B...000100F0       34     875.0 MB   V GRD
-    211  00  YVQY91   0                        EOM
-  00001C02  0760FF80  22640352...000100F0       34     850.0 MB   V GRD
-    211  00  YVQY85   0                        EOM
-  00001C02  0760FF80  22640339...000100F0       34     825.0 MB   V GRD
-    211  00  YVQY82   0                        EOM
-  00001C02  0760FF80  22640320...000100F0       34     800.0 MB   V GRD
-    211  00  YVQY80   0                        EOM
-  00001C02  0760FF80  22640307...000100F0       34     775.0 MB   V GRD
-    211  00  YVQY77   0                        EOM
-  00001C02  0760FF80  226402EE...000100F0       34     750.0 MB   V GRD
-    211  00  YVQY75   0                        EOM
-  00001C02  0760FF80  226402D5...000100F0       34     725.0 MB   V GRD
-    211  00  YVQY72   0                        EOM
-  00001C02  0760FF80  226402BC...000100F0       34     700.0 MB   V GRD
-    211  00  YVQY70   0                        EOM
-  00001C02  0760FF80  226402A3...000100F0       34     675.0 MB   V GRD
-    211  00  YVQY67   0                        EOM
-  00001C02  0760FF80  2264028A...000100F0       34     650.0 MB   V GRD
-    211  00  YVQY65   0                        EOM
-  00001C02  0760FF80  22640271...000100F0       34     625.0 MB   V GRD
-    211  00  YVQY62   0                        EOM
-  00001C02  0760FF80  22640258...000100F0       34     600.0 MB   V GRD
-    211  00  YVQY60   0                        EOM
-  00001C02  0760FF80  2264023F...000100F0       34     575.0 MB   V GRD
-    211  00  YVQY57   0                        EOM
-  00001C02  0760FF80  22640226...000100F0       34     550.0 MB   V GRD
-    211  00  YVQY55   0                        EOM
-  00001C02  0760FF80  2264020D...000100F0       34     525.0 MB   V GRD
-    211  00  YVQY52   0                        EOM
-  00001C02  0760FF80  226401F4...000100F0       34     500.0 MB   V GRD
-    211  00  YVQY50   0                        EOM
-  00001C02  0760FF80  226401C2...000100F0       34     450.0 MB   V GRD
-    211  00  YVQY45   0                        EOM
-  00001C02  0760FF80  22640190...000100F0       34     400.0 MB   V GRD
-    211  00  YVQY40   0                        EOM
-  00001C02  0760FF80  2264015E...000100F0       34     350.0 MB   V GRD
-    211  00  YVQY35   0                        EOM
-  00001C02  0760FF80  2264012C...000100F0       34     300.0 MB   V GRD
-    211  00  YVQY30   0                        EOM
-  00001C02  0760FF80  226400FA...000100F0       34     250.0 MB   V GRD
-    211  00  YVQY25   0                        EOM
-  00001C02  0760FF80  226400C8...000100F0       34     200.0 MB   V GRD
-    211  00  YVQY20   0                        EOM
-  00001C02  0760FF80  22640096...000100F0       34     150.0 MB   V GRD
-    211  00  YVQY15   0                        EOM
-  00001C02  0760FF80  22640064...000100F0       34     100.0 MB   V GRD
-    211  00  YVQY10   0                        EOM
-  00001C02  0760FF80  02660000...000100F0       02           MSL  PRMSL
-    211  00  YPQY89   0                        EOM
-  00001C02  0760FF80  3D010000...000100F0       61           SFC  A PCP
-    211  00  YEQY98   0                        EOM
-  00001C02  0760FF80  346403E8...000100F0       52    1000.0 MB   R H
-    211  00  YRQY99   0                        EOM
-  00001C02  0760FF80  346403CF...000100F0       52     975.0 MB   R H
-    211  00  YRQY93   0                        EOM
-  00001C02  0760FF80  346403B6...000100F0       52     950.0 MB   R H
-    211  00  YRQY95   0                        EOM
-  00001C02  0760FF80  3464039D...000100F0       52     925.0 MB   R H
-    211  00  YRQY92   0                        EOM
-  00001C02  0760FF80  34640384...000100F0       52     900.0 MB   R H
-    211  00  YRQY90   0                        EOM
-  00001C02  0760FF80  3464036B...000100F0       52     875.0 MB   R H
-    211  00  YRQY91   0                        EOM
-  00001C02  0760FF80  34640352...000100F0       52     850.0 MB   R H
-    211  00  YRQY85   0                        EOM
-  00001C02  0760FF80  34640339...000100F0       52     825.0 MB   R H
-    211  00  YRQY82   0                        EOM
-  00001C02  0760FF80  34640320...000100F0       52     800.0 MB   R H
-    211  00  YRQY80   0                        EOM
-  00001C02  0760FF80  34640307...000100F0       52     775.0 MB   R H
-    211  00  YRQY77   0                        EOM
-  00001C02  0760FF80  346402EE...000100F0       52     750.0 MB   R H
-    211  00  YRQY75   0                        EOM
-  00001C02  0760FF80  346402D5...000100F0       52     725.0 MB   R H
-    211  00  YRQY72   0                        EOM
-  00001C02  0760FF80  346402BC...000100F0       52     700.0 MB   R H
-    211  00  YRQY70   0                        EOM
-  00001C02  0760FF80  346402A3...000100F0       52     675.0 MB   R H
-    211  00  YRQY67   0                        EOM
-  00001C02  0760FF80  3464028A...000100F0       52     650.0 MB   R H
-    211  00  YRQY65   0                        EOM
-  00001C02  0760FF80  34640271...000100F0       52     625.0 MB   R H
-    211  00  YRQY62   0                        EOM
-  00001C02  0760FF80  34640258...000100F0       52     600.0 MB   R H
-    211  00  YRQY60   0                        EOM
-  00001C02  0760FF80  3464023F...000100F0       52     575.0 MB   R H
-    211  00  YRQY57   0                        EOM
-  00001C02  0760FF80  34640226...000100F0       52     550.0 MB   R H
-    211  00  YRQY55   0                        EOM
-  00001C02  0760FF80  3464020D...000100F0       52     525.0 MB   R H
-    211  00  YRQY52   0                        EOM
-  00001C02  0760FF80  346401F4...000100F0       52     500.0 MB   R H
-    211  00  YRQY50   0                        EOM
-  00001C02  0760FF80  346401C2...000100F0       52     450.0 MB   R H
-    211  00  YRQY45   0                        EOM
-  00001C02  0760FF80  34640190...000100F0       52     400.0 MB   R H
-    211  00  YRQY40   0                        EOM
-  00001C02  0760FF80  3464015E...000100F0       52     350.0 MB   R H
-    211  00  YRQY35   0                        EOM
-  00001C02  0760FF80  3464012C...000100F0       52     300.0 MB   R H
-    211  00  YRQY30   0                        EOM
-  00001C02  0760FF80  346400FA...000100F0       52     250.0 MB   R H
-    211  00  YRQY25   0                        EOM
-  00001C02  0760FF80  346400C8...000100F0       52     200.0 MB   R H
-    211  00  YRQY20   0                        EOM
-  00001C02  0760FF80  34640096...000100F0       52     150.0 MB   R H
-    211  00  YRQY15   0                        EOM
-  00001C02  0760FF80  34640064...000100F0       52     100.0 MB   R H
-    211  00  YRQY10   0                        EOM
-  00001C02  0760FF80  0B6403E8...000100F0       11    1000.0 MB   TMP
-    211  00  YTQY99   0                        EOM
-  00001C02  0760FF80  0B6403CF...000100F0       11     975.0 MB   TMP
-    211  00  YTQY93   0                        EOM
-  00001C02  0760FF80  0B6403B6...000100F0       11     950.0 MB   TMP
-    211  00  YTQY95   0                        EOM
-  00001C02  0760FF80  0B64039D...000100F0       11     925.0 MB   TMP
-    211  00  YTQY92   0                        EOM
-  00001C02  0760FF80  0B640384...000100F0       11     900.0 MB   TMP
-    211  00  YTQY90   0                        EOM
-  00001C02  0760FF80  0B64036B...000100F0       11     875.0 MB   TMP
-    211  00  YTQY91   0                        EOM
-  00001C02  0760FF80  0B640352...000100F0       11     850.0 MB   TMP
-    211  00  YTQY85   0                        EOM
-  00001C02  0760FF80  0B640339...000100F0       11     825.0 MB   TMP
-    211  00  YTQY82   0                        EOM
-  00001C02  0760FF80  0B640320...000100F0       11     800.0 MB   TMP
-    211  00  YTQY80   0                        EOM
-  00001C02  0760FF80  0B640307...000100F0       11     775.0 MB   TMP
-    211  00  YTQY77   0                        EOM
-  00001C02  0760FF80  0B6402EE...000100F0       11     750.0 MB   TMP
-    211  00  YTQY75   0                        EOM
-  00001C02  0760FF80  0B6402D5...000100F0       11     725.0 MB   TMP
-    211  00  YTQY72   0                        EOM
-  00001C02  0760FF80  0B6402BC...000100F0       11     700.0 MB   TMP
-    211  00  YTQY70   0                        EOM
-  00001C02  0760FF80  0B6402A3...000100F0       11     675.0 MB   TMP
-    211  00  YTQY67   0                        EOM
-  00001C02  0760FF80  0B64028A...000100F0       11     650.0 MB   TMP
-    211  00  YTQY65   0                        EOM
-  00001C02  0760FF80  0B640271...000100F0       11     625.0 MB   TMP
-    211  00  YTQY62   0                        EOM
-  00001C02  0760FF80  0B640258...000100F0       11     600.0 MB   TMP
-    211  00  YTQY60   0                        EOM
-  00001C02  0760FF80  0B64023F...000100F0       11     575.0 MB   TMP
-    211  00  YTQY57   0                        EOM
-  00001C02  0760FF80  0B640226...000100F0       11     550.0 MB   TMP
-    211  00  YTQY55   0                        EOM
-  00001C02  0760FF80  0B64020D...000100F0       11     525.0 MB   TMP
-    211  00  YTQY52   0                        EOM
-  00001C02  0760FF80  0B6401F4...000100F0       11     500.0 MB   TMP
-    211  00  YTQY50   0                        EOM
-  00001C02  0760FF80  0B6401C2...000100F0       11     450.0 MB   TMP
-    211  00  YTQY45   0                        EOM
-  00001C02  0760FF80  0B640190...000100F0       11     400.0 MB   TMP
-    211  00  YTQY40   0                        EOM
-  00001C02  0760FF80  0B64015E...000100F0       11     350.0 MB   TMP
-    211  00  YTQY35   0                        EOM
-  00001C02  0760FF80  0B64012C...000100F0       11     300.0 MB   TMP
-    211  00  YTQY30   0                        EOM
-  00001C02  0760FF80  0B6400FA...000100F0       11     250.0 MB   TMP
-    211  00  YTQY25   0                        EOM
-  00001C02  0760FF80  0B6400C8...000100F0       11     200.0 MB   TMP
-    211  00  YTQY20   0                        EOM
-  00001C02  0760FF80  0B640096...000100F0       11     150.0 MB   TMP
-    211  00  YTQY15   0                        EOM
-  00001C02  0760FF80  0B640064...000100F0       11     100.0 MB   TMP
-    211  00  YTQY10   0                        EOM
-  00001C02  0760FF80  28640352...000100F0       40     850.0 MB  DZDT
-    211  00  YOQY85   0                        EOM
-  00001C02  0760FF80  286402BC...000100F0       40     700.0 MB  DZDT
-    211  00  YOQY70   0                        EOM
-  00001C02  0760FF80  286401F4...000100F0       40     500.0 MB  DZDT
-    211  00  YOQY50   0                        EOM
-  00001C02  0760FF80  01010000...000100F0       01          SFC  PRES
-    211  00  YPQY98   0                        EOM
-  00001C02  0760FF80  346C2C64...000100F0       52        44/100  R H
-    211  00  YRQY00   0                        EOM
-  00001C02  0760FF80  296401F4...000100F0       41     500.0 MB ABS V
-    211  00  YCQY50   0                        EOM
-  00001C02  0760FF80  9D010000...000100F0      157          SFC   CAPE
-    211  00  YWQY98   0                        EOM
-  00001C02  0760FF80  9C010000...000100F0      156          SFC   CIN
-    211  00  YYQY98   0                        EOM
-  00001C02  0760FF80  9D74B400...000100F0      157  180 SPDY 0 SPDY  CAPE
-    211  00  YWQY86   0                        EOM
-  00001C02  0760FF80  9C74B400...000100F0      156  180 SPDY 0 SPDY  CIN
-    211  00  YYQY86   0                        EOM
-  00001C02  0760FF80  0B741E00...000100F0       11   30 SPDY   0 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  0B743C1E...000100F0       11   60 SPDY  30 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  0B745A3C...000100F0       11   90 SPDY  60 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  0B74785A...000100F0       11  120 SPDY  90 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  0B749678...000100F0       11  150 SPDY 120 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  0B74B496...000100F0       11  180 SPDY 150 SPDY  TMP
-    211  00  YTQY86   0                        EOM
-  00001C02  0760FF80  34741E00...000100F0       52   30 SPDY   0 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  34743C1E...000100F0       52   60 SPDY  30 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  34745A3C...000100F0       52   90 SPDY  60 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  3474785A...000100F0       52  120 SPDY  90 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  34749678...000100F0       52  150 SPDY 120 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  3474B496...000100F0       52  180 SPDY 150 SPDY  R H
-    211  00  YRQY86   0                        EOM
-  00001C02  0760FF80  21741E00...000100F0       33   30 SPDY   0 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  21743C1E...000100F0       33   60 SPDY  30 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  21745A3C...000100F0       33   90 SPDY  60 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  2174785A...000100F0       33  120 SPDY  90 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  21749678...000100F0       33  150 SPDY 120 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  2174B496...000100F0       33  180 SPDY 150 SPDY  U GRD
-    211  00  YUQY86   0                        EOM
-  00001C02  0760FF80  22741E00...000100F0       34   30 SPDY   0 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  22743C1E...000100F0       34   60 SPDY  30 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  22745A3C...000100F0       34   90 SPDY  60 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  2274785A...000100F0       34  120 SPDY  90 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  22749678...000100F0       34  150 SPDY 120 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  2274B496...000100F0       34  180 SPDY 150 SPDY  V GRD
-    211  00  YVQY86   0                        EOM
-  00001C02  0760FF80  0B690002...000100F0       11    2  HTGL     TMP
-    211  00  YTQY98   0                        EOM
-  00001C02  0760FF80  34690002...000100F0       52    2  HTGL     R H
-    211  00  YRQY98   0                        EOM
-  00001C02  0760FF80  2169000A...000100F0       33   10  HTGL     U GRD
-    211  00  YUQY98   0                        EOM
-  00001C02  0760FF80  2269000A...000100F0       34   10  HTGL     V GRD
-    211  00  YVQY98   0                        EOM
-  FFFFFFFF  00000000  00000000...00000000       00
diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh
index d48d58947e..1833ed7b1f 100755
--- a/scripts/exgdas_atmos_chgres_forenkf.sh
+++ b/scripts/exgdas_atmos_chgres_forenkf.sh
@@ -17,15 +17,13 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #  Directories.
 pwd=$(pwd)
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
 
 # Base variables
 CDATE=${CDATE:-"2001010100"}
-CDUMP=${CDUMP:-"enkfgdas"}
 GDUMP=${GDUMP:-"gdas"}
 
 # Derived base variables
@@ -37,11 +35,8 @@ bPDY=$(echo $BDATE | cut -c1-8)
 bcyc=$(echo $BDATE | cut -c9-10)
 
 # Utilities
-export NCP=${NCP:-"/bin/cp"}
-export NMV=${NMV:-"/bin/mv"}
-export NLN=${NLN:-"/bin/ln -sf"}
 export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"}
-export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+export NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 
 # IAU
 DOIAU=${DOIAU:-"NO"}
@@ -49,7 +44,7 @@ export IAUFHRS=${IAUFHRS:-"6"}
 
 # Dependent Scripts and Executables
 export APRUN_CHGRES=${APRUN_CHGRES:-${APRUN:-""}}
-export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x}
+export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECgfs}/enkf_chgres_recenter_nc.x}
 export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1}
 APRUNCFP=${APRUNCFP:-""}
 
@@ -59,7 +54,7 @@ SENDECF=${SENDECF:-"NO"}
 SENDDBN=${SENDDBN:-"NO"}
 
 # level info file
-SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt}
+SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
 
 # forecast files
 APREFIX=${APREFIX:-""}
@@ -129,7 +124,7 @@ if [ $DO_CALC_ANALYSIS == "YES" ]; then
       $NLN $ATMF09ENS  fcst.ensres.09
    fi
    export OMP_NUM_THREADS=$NTHREADS_CHGRES
-   SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS_ENKF}.txt}
+   SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS_ENKF}.txt}
 
    if [ $USE_CFP = "YES" ]; then
       [[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh
@@ -168,7 +163,7 @@ EOF
       chmod 755 $DATA/mp_chgres.sh
       ncmd=$(cat $DATA/mp_chgres.sh | wc -l)
       if [ $ncmd -gt 0 ]; then
-         ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max))
+         ncmd_max=$((ncmd < max_tasks_per_node ? ncmd : max_tasks_per_node))
          APRUNCFP_CHGRES=$(eval echo $APRUNCFP)
 
          export pgm=$CHGRESNCEXEC
diff --git a/scripts/exgdas_atmos_gempak_gif_ncdc.sh b/scripts/exgdas_atmos_gempak_gif_ncdc.sh
index 63a7475a0e..2dc460cc55 100755
--- a/scripts/exgdas_atmos_gempak_gif_ncdc.sh
+++ b/scripts/exgdas_atmos_gempak_gif_ncdc.sh
@@ -6,55 +6,34 @@
 # in the future, we should move it above somewhere else.
 ##############################################################
 
-source "$HOMEgfs/ush/preamble.sh"
-
-cd $DATA
-
-export NTS=$USHgempak/restore
-
-if [ $MODEL = GDAS ]
-then
-    case $MODEL in
-      GDAS) fcsthrs="000";;
-    esac
-
-    export fhr
-    for fhr in $fcsthrs
-    do
-        icnt=1
-        maxtries=180
-        while [ $icnt -lt 1000 ]
-        do
-          if [ -r ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} ] ; then
-            break
-          else
-            sleep 20
-            let "icnt=icnt+1"
-          fi
-          if [ $icnt -ge $maxtries ]
-          then
-            msg="ABORTING after 1 hour of waiting for F$fhr to end."
-            err_exit $msg
-          fi
-        done
-
-       cp ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} gem_grids${fhr}.gem
-       export err=$?
-       if [[ $err -ne 0 ]] ; then
-          echo " File: ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} does not exist."
-          exit $err
-       fi
-     
-       if [ $cyc -eq 00 -o $cyc -eq 12 ]
-       then
-          $USHgempak/gempak_${RUN}_f${fhr}_gif.sh
-          if [ ! -f $USHgempak/gempak_${RUN}_f${fhr}_gif.sh ] ; then
-             echo "WARNING: $USHgempak/gempak_${RUN}_f${fhr}_gif.sh FILE is missing"
-          fi
-       fi
+source "${HOMEgfs}/ush/preamble.sh"
 
+cd "${DATA}" || exit 2
+
+export NTS="${HOMEgfs}/gempak/ush/restore"
+
+if [[ ${MODEL} == GDAS ]]; then
+    fcsthrs="000"
+
+    sleep_interval=20
+    max_tries=180
+    export fhr3
+    for fhr3 in ${fcsthrs}; do
+        gempak_file="${COM_ATMOS_GEMPAK_1p00}/${RUN}_1p00_${PDY}${cyc}f${fhr3}"
+        if ! wait_for_file "${gempak_file}" "${sleep_interval}" "${max_tries}" ; then
+            echo "FATAL ERROR: ${gempak_file} not found after ${max_tries} iterations"
+            exit 10
+        fi
+
+        cp "${gempak_file}" "gem_grids${fhr3}.gem"
+        export err=$?
+        if (( err != 0 )) ; then
+            echo "FATAL: Could not copy ${gempak_file}"
+            exit "${err}"
+        fi
+
+        "${HOMEgfs}/gempak/ush/gempak_${RUN}_f${fhr3}_gif.sh"
     done
 fi
 
-
 exit
diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh
index 94a23f2a85..7feb3210ac 100755
--- a/scripts/exgdas_atmos_nawips.sh
+++ b/scripts/exgdas_atmos_nawips.sh
@@ -1,62 +1,30 @@
 #! /usr/bin/env bash
 
 ###################################################################
-# echo "----------------------------------------------------"
 # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids"
-# echo "----------------------------------------------------"
-# echo "History: Mar 2000 - First implementation of this new script."
-# echo "S Lilly: May 2008 - add logic to make sure that all of the "
-# echo "                    data produced from the restricted ECMWF"
-# echo "                    data on the CCS is properly protected."
-#####################################################################
-
-source "$HOMEgfs/ush/preamble.sh" "${2}"
-
-cd $DATA
-RUN2=$1
-fend=$2
+###################################################################
+
+source "${USHgfs}/preamble.sh" "${2}"
+
+cd "${DATA}" || exit 1
+grid=$1
+fhr3=$2
 DBN_ALERT_TYPE=$3
 destination=$4
 
-DATA_RUN=$DATA/$RUN2
-mkdir -p $DATA_RUN
-cd $DATA_RUN
+DATA_RUN="${DATA}/${grid}"
+mkdir -p "${DATA_RUN}"
+cd "${DATA_RUN}" || exit 1
 
-cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File g2varswmo2.tbl file is missing."
-   exit $err
-fi
-cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File g2vcrdwmo2.tbl file is missing."
-   exit $err
-fi
+# "Import" functions used in this script
+source "${USHgfs}/product_functions.sh"
 
-cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File g2varsncep1.tbl file is missing."
-   exit $err
-fi
-
-cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File g2vcrdncep1.tbl file is missing."
-   exit $err
-fi
+for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do
+  cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \
+    ( echo "FATAL ERROR: ${table} is missing" && exit 2 )
+done
 
-#
-NAGRIB=$GEMEXE/nagrib2_nc
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File $GEMEXE/nagrib2_nc is missing."
-   echo " WARNING: module GEMPAK was not loaded"
-   exit $err
-fi
+NAGRIB="${GEMEXE}/nagrib2"
 
 cpyfil=gds
 garea=dset
@@ -68,97 +36,50 @@ proj=
 output=T
 pdsext=no
 
-maxtries=180
-fhcnt=$fstart
-while [ $fhcnt -le $fend ] ; do
-  fhr=$(printf "%03d" $fhcnt)
-  fhcnt3=$(expr $fhr % 3)
-
-  fhr3=$(printf "%03d" $fhcnt)
-
-  GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3}
-
-  if [[ ${RUN2} = "gdas_0p25" ]]; then
-    export GRIBIN=${COM_ATMOS_GRIB_0p25}/${model}.${cycle}.pgrb2.0p25.f${fhr}
-    if [[ ! -f ${GRIBIN} ]] ; then
-       echo "WARNING: ${GRIBIN} FILE is missing"
-    fi
-    GRIBIN_chk=${COM_ATMOS_GRIB_0p25}${model}.${cycle}.pgrb2.0p25.f${fhr}.idx
-  else
-    export GRIBIN=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr}
-    if [[ ! -f ${GRIBIN} ]] ; then
-       echo "WARNING: ${GRIBIN} FILE is missing"
-    fi
-    GRIBIN_chk=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx
-  fi
-
-  icnt=1
-  while [ $icnt -lt 1000 ]
-  do
-    if [ -r $GRIBIN_chk ] ; then
-      sleep 5
-      break
-    else
-      echo "The process is waiting ... ${GRIBIN_chk} file to proceed."
-      sleep 20
-      let "icnt=icnt+1"
-    fi
-    if [ $icnt -ge $maxtries ]
-    then
-      echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end."
-      export err=7 ; err_chk
-      exit $err
-    fi
-  done
-
-  cp $GRIBIN grib$fhr
-
-  export pgm="nagrib2 F$fhr"
-  startmsg
-
-   $NAGRIB << EOF
-   GBFILE   = grib$fhr
-   INDXFL   = 
-   GDOUTF   = $GEMGRD
-   PROJ     = $proj
-   GRDAREA  = $grdarea
-   KXKY     = $kxky
-   MAXGRD   = $maxgrd
-   CPYFIL   = $cpyfil
-   GAREA    = $garea
-   OUTPUT   = $output
-   GBTBLS   = $gbtbls
-   GBDIAG   = 
-   PDSEXT   = $pdsext
-  l
-  r
+
+
+GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}"
+source_dirvar="COM_ATMOS_GRIB_${grid}"
+export GRIBIN="${!source_dirvar}/${model}.${cycle}.pgrb2.${grid}.f${fhr3}"
+GRIBIN_chk="${GRIBIN}.idx"
+
+if [[ ! -r "${GRIBIN_chk}" ]]; then
+  echo "FATAL ERROR: GRIB index file ${GRIBIN_chk} not found!"
+  export err=7 ; err_chk
+  exit "${err}"
+fi
+
+cp "${GRIBIN}" "grib${fhr3}"
+
+export pgm="nagrib2 F${fhr3}"
+startmsg
+
+${NAGRIB} << EOF
+GBFILE   = grib${fhr3}
+INDXFL   = 
+GDOUTF   = ${GEMGRD}
+PROJ     = ${proj}
+GRDAREA  = ${grdarea}
+KXKY     = ${kxky}
+MAXGRD   = ${maxgrd}
+CPYFIL   = ${cpyfil}
+GAREA    = ${garea}
+OUTPUT   = ${output}
+GBTBLS   = ${gbtbls}
+GBDIAG   = 
+PDSEXT   = ${pdsext}
+l
+r
 EOF
-  export err=$?;err_chk
-
-  cp "${GEMGRD}" "${destination}/.${GEMGRD}"
-  export err=$?
-  if [[ ${err} -ne 0 ]] ; then
-      echo " File ${GEMGRD} does not exist."
-      exit "${err}"
-  fi
-
-  mv "${destination}/.${GEMGRD}" "${destination}/${GEMGRD}"
-  if [[ ${SENDDBN} = "YES" ]] ; then
-      "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
-				 "${destination}/${GEMGRD}"
-  else
-      echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####"
-  fi
-
-  if [ $fhcnt -ge 240 ] ; then
-    let fhcnt=fhcnt+12
-  else
-    let fhcnt=fhcnt+finc
-  fi
-done
 
-$GEMEXE/gpend
-#####################################################################
+export err=$?; err_chk
+
+cpfs "${GEMGRD}" "${destination}/${GEMGRD}"
+if [[ ${SENDDBN} = "YES" ]] ; then
+  "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+    "${destination}/${GEMGRD}"
+fi
 
+"${GEMEXE}/gpend"
 
 ############################### END OF SCRIPT #######################
diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh
index 1810fdef5d..e681fc55c5 100755
--- a/scripts/exgdas_atmos_verfozn.sh
+++ b/scripts/exgdas_atmos_verfozn.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 # exgdas_atmos_verfozn.sh
diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh
index 50320ffba1..bad8715acd 100755
--- a/scripts/exgdas_atmos_verfrad.sh
+++ b/scripts/exgdas_atmos_verfrad.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -37,9 +37,9 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then
 
    #------------------------------------------------------------------
    #  SATYPE is the list of expected satellite/instrument sources
-   #  in the radstat file.  It should be stored in the $TANKverf
-   #  directory.  If it isn't there then use the $FIXgdas copy.  In all
-   #  cases write it back out to the radmon.$PDY directory.  Add any
+   #  in the radstat file. It should be stored in the $TANKverf
+   #  directory. If it isn't there then use the gdas fix copy. In all
+   #  cases write it back out to the radmon.$PDY directory. Add any
    #  new sources to the list before writing back out.
    #------------------------------------------------------------------
 
@@ -131,15 +131,6 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then
     "${USHgfs}/radmon_verf_time.sh"
     rc_time=$?
 
-    #--------------------------------------
-    #  optionally run clean_tankdir script
-    #
-    if [[ ${CLEAN_TANKVERF:-0} -eq 1 ]]; then
-       "${USHradmon}/clean_tankdir.sh" glb 60
-       rc_clean_tankdir=$?
-       echo "rc_clean_tankdir = ${rc_clean_tankdir}"
-    fi
-
 fi
 
 
diff --git a/scripts/exgdas_enkf_earc.py b/scripts/exgdas_enkf_earc.py
new file mode 100755
index 0000000000..a515ec9746
--- /dev/null
+++ b/scripts/exgdas_enkf_earc.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+
+import os
+
+from pygfs.task.archive import Archive
+from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, chdir, logit
+
+# initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+@logit(logger)
+def main():
+
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the Archive object
+    archive = Archive(config)
+
+    # Pull out all the configuration keys needed to run the rest of archive steps
+    keys = ['ATARDIR', 'current_cycle', 'IAUFHRS', 'RUN', 'PDY',
+            'PSLOT', 'HPSSARCH', 'LOCALARCH', 'ROTDIR', 'PARMgfs',
+            'ARCDIR', 'SDATE', 'MODE', 'ENSGRP', 'NMEM_EARCGRP',
+            'NMEM_ENS', 'DO_CALC_INCREMENT_ENKF_GFS', 'DO_JEDIATMENS',
+            'lobsdiag_forenkf', 'FHMIN_ENKF', 'FHMAX_ENKF_GFS',
+            'FHOUT_ENKF_GFS', 'FHMAX_ENKF', 'FHOUT_ENKF', 'ENKF_SPREAD',
+            'restart_interval_enkfgdas', 'restart_interval_enkfgfs',
+            'DOHYBVAR', 'DOIAU_ENKF', 'IAU_OFFSET', 'DOIAU',
+            'DO_CALC_INCREMENT', 'assim_freq', 'ARCH_CYC',
+            'ARCH_WARMICFREQ', 'ARCH_FCSTICFREQ',
+            'IAUFHRS_ENKF']
+
+    archive_dict = AttrDict()
+    for key in keys:
+        archive_dict[key] = archive.task_config[key]
+
+    # Also import all COMIN* directory and template variables
+    for key in archive.task_config.keys():
+        if key.startswith("COM"):
+            archive_dict[key] = archive.task_config[key]
+
+    cwd = os.getcwd()
+
+    os.chdir(config.ROTDIR)
+
+    # Determine which archives to create
+    arcdir_set, atardir_sets = archive.configure(archive_dict)
+
+    # Populate the product archive (ARCDIR)
+    archive.execute_store_products(arcdir_set)
+
+    # Create the backup tarballs and store in ATARDIR
+    for atardir_set in atardir_sets:
+        archive.execute_backup_dataset(atardir_set)
+
+    os.chdir(cwd)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh
deleted file mode 100755
index 199b5609a2..0000000000
--- a/scripts/exgdas_enkf_earc.sh
+++ /dev/null
@@ -1,163 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-##############################################
-# Begin JOB SPECIFIC work
-##############################################
-export n=$((10#${ENSGRP}))
-export CDUMP_ENKF="${EUPD_CYC:-"gdas"}"
-
-# ICS are restarts and always lag INC by $assim_freq hours.
-EARCINC_CYC=${ARCH_CYC}
-EARCICS_CYC=$((ARCH_CYC-assim_freq))
-if [ "${EARCICS_CYC}" -lt 0 ]; then
-  EARCICS_CYC=$((EARCICS_CYC+24))
-fi
-
-"${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}"
-status=$?
-if [ "${status}" -ne 0 ]; then
-   echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!"
-   exit "${status}"
-fi
-
-cd "${ROTDIR}" || exit 2
-
-source "${HOMEgfs}/ush/file_utils.sh"
-
-###################################################################
-# ENSGRP > 0 archives a group of ensemble members
-firstday=$(${NDATE} +24 "${SDATE}")
-if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
-
-#--set the archiving command and create local directories, if necessary
-   TARCMD="htar"
-   if [[ ${LOCALARCH} = "YES" ]]; then
-       TARCMD="tar"
-       if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi
-   fi
-
-#--determine when to save ICs for warm start
-   SAVEWARMICA="NO"
-   SAVEWARMICB="NO"
-   mm="${PDY:4:2}"
-   dd="${PDY:6:2}"
-   nday=$(( (10#${mm}-1)*30+10#${dd} ))
-   mod=$((nday % ARCH_WARMICFREQ))
-   if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi
-   if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi
-   if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi
-   if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi
-
-   if [ "${EARCICS_CYC}" -eq 18 ]; then
-       nday1=$((nday+1))
-       mod1=$((nday1 % ARCH_WARMICFREQ))
-       if [ "${mod1}" -eq 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi
-       if [ "${mod1}" -ne 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="NO" ; fi
-       if [ "${PDY}${cyc}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi
-   fi
-
-   if [ "${PDY}${cyc}" -gt "${SDATE}" ]; then # Don't run for first half cycle
-
-     ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_grp${n}.txt")
-     status=$?
-     if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then
-         echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_grp${ENSGRP}.tar failed"
-         exit "${status}"
-     fi
-
-     if [ "${SAVEWARMICA}" = "YES" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then
-       ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restarta_grp${n}.txt")
-       status=$?
-       if [ "${status}" -ne 0 ]; then
-           echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restarta_grp${ENSGRP}.tar failed"
-           exit "${status}"
-       fi
-     fi
-
-     if [ "${SAVEWARMICB}" = "YES" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then
-       ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restartb_grp${n}.txt")
-       status=$?
-       if [ "${status}" -ne 0 ]; then
-           echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restartb_grp${ENSGRP}.tar failed"
-           exit "${status}"
-       fi
-     fi
-
-   fi # CDATE>SDATE
-
-fi
-
-
-###################################################################
-# ENSGRP 0 archives ensemble means and copy data to online archive
-if [ "${ENSGRP}" -eq 0 ]; then
-
-    if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
-
-        #--set the archiving command and create local directories, if necessary
-        TARCMD="htar"
-        HSICMD="hsi"
-        if [[ ${LOCALARCH} = "YES" ]]; then
-            TARCMD="tar"
-            HSICMD=""
-            if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi
-        fi
-
-        set +e
-        # Check if the tarball will have rstprod in it
-        has_rstprod="NO"
-        while IFS= read -r file; do
-            if [[ -f ${file} ]]; then
-                group=$( stat -c "%G" "${file}" )
-                if [[ "${group}" == "rstprod" ]]; then
-                    has_rstprod="YES"
-                    break
-                fi
-            fi
-        done < "${DATA}/${RUN}.txt"
-
-        # Create the tarball
-        tar_fl=${ATARDIR}/${PDY}${cyc}/${RUN}.tar
-        ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${RUN}.txt")
-        status=$?
-        if [[ "${status}" -ne 0 ]]; then
-            echo "FATAL ERROR: Tarring of ${tar_fl} failed"
-            exit "${status}"
-        fi
-
-        # If rstprod was found, change the group of the tarball
-        if [[ "${has_rstprod}" == "YES" ]]; then
-            ${HSICMD} chgrp rstprod "${tar_fl}"
-            stat_chgrp=$?
-            ${HSICMD} chmod 640 "${tar_fl}"
-            stat_chgrp=$((stat_chgrp+$?))
-            if [[ "${stat_chgrp}" -gt 0 ]]; then
-                echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!"
-                echo "Attempting to delete ${tar_fl}"
-                ${HSICMD} rm "${tar_fl}"
-                echo "Please verify that ${tar_fl} was deleted!"
-                exit "${stat_chgrp}"
-            fi
-        fi
-
-        # For safety, test if the htar/tar command failed only after changing groups
-        if (( status != 0 && ${PDY}${cyc} >= firstday )); then
-            echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed"
-            exit "${status}"
-        fi
-        set_strict
-    fi
-
-    #-- Archive online for verification and diagnostics
-    [[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}"
-    cd "${ARCDIR}" || exit 2
-
-    nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.enkfstat" \
-        "enkfstat.${RUN}.${PDY}${cyc}"
-    nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.gsistat.ensmean" \
-        "gsistat.${RUN}.${PDY}${cyc}.ensmean"
-fi
-
-exit 0
diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh
index c20d1dec78..442b0b04a1 100755
--- a/scripts/exgdas_enkf_ecen.sh
+++ b/scripts/exgdas_enkf_ecen.sh
@@ -17,7 +17,7 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Directories.
 pwd=$(pwd)
@@ -29,18 +29,16 @@ export CASE=${CASE:-384}
 ntiles=${ntiles:-6}
 
 # Utilities
-NCP=${NCP:-"/bin/cp -p"}
-NLN=${NLN:-"/bin/ln -sf"}
-NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 
 # Scripts
 
 # Executables.
-GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x}
-GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x}
-RECENATMEXEC=${RECENATMEXEC:-$HOMEgfs/exec/recentersigp.x}
-CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-$HOMEgfs/exec/calc_increment_ens.x}
-CALCINCNCEXEC=${CALCINCEXEC:-$HOMEgfs/exec/calc_increment_ens_ncio.x}
+GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECgfs}/getsigensmeanp_smooth.x}
+GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECgfs}/getsfcensmeanp.x}
+RECENATMEXEC=${RECENATMEXEC:-${EXECgfs}/recentersigp.x}
+CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-${EXECgfs}/calc_increment_ens.x}
+CALCINCNCEXEC=${CALCINCEXEC:-${EXECgfs}/calc_increment_ens_ncio.x}
 
 # Files.
 OPREFIX=${OPREFIX:-""}
@@ -51,7 +49,6 @@ GPREFIX=${GPREFIX:-""}
 GPREFIX_ENS=${GPREFIX_ENS:-$GPREFIX}
 
 # Variables
-NMEM_ENS=${NMEM_ENS:-80}
 imp_physics=${imp_physics:-99}
 INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"}
 DOIAU=${DOIAU_ENKF:-"NO"}
@@ -59,25 +56,29 @@ FHMIN=${FHMIN_ECEN:-3}
 FHMAX=${FHMAX_ECEN:-9}
 FHOUT=${FHOUT_ECEN:-3}
 FHSFC=${FHSFC_ECEN:-$FHMIN}
-if [ $RUN = "enkfgfs" ]; then
+NMEM_ENS_MAX=${NMEM_ENS:-80}
+if [ "${RUN}" = "enkfgfs" ]; then
    DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"}
+   NMEM_ENS=${NMEM_ENS_GFS:-30}
+   ec_offset=${NMEM_ENS_GFS_OFFSET:-20}
+   mem_offset=$((ec_offset * cyc/6))
 else
    DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"}
+   NMEM_ENS=${NMEM_ENS:-80}
+   mem_offset=0
 fi
 
 # global_chgres stuff
-CHGRESNEMS=${CHGRESNEMS:-$HOMEgfs/exec/enkf_chgres_recenter.x}
-CHGRESNC=${CHGRESNC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x}
+CHGRESNEMS=${CHGRESNEMS:-${EXECgfs}/enkf_chgres_recenter.x}
+CHGRESNC=${CHGRESNC:-${EXECgfs}/enkf_chgres_recenter_nc.x}
 NTHREADS_CHGRES=${NTHREADS_CHGRES:-24}
 APRUN_CHGRES=${APRUN_CHGRES:-""}
 
 # global_cycle stuff
-CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh}
-export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle}
+CYCLESH=${CYCLESH:-${USHgfs}/global_cycle.sh}
+export CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle}
 APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}}
 NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}}
-export FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
 export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"}
 export FHOUR=${FHOUR:-0}
 export DELTSFC=${DELTSFC:-6}
@@ -108,12 +109,17 @@ ENKF_SUFFIX="s"
 for FHR in $(seq $FHMIN $FHOUT $FHMAX); do
 
 for imem in $(seq 1 $NMEM_ENS); do
+   smem=$((imem + mem_offset))
+   if (( smem > NMEM_ENS_MAX )); then
+      smem=$((smem - NMEM_ENS_MAX))
+   fi
+   gmemchar="mem"$(printf %03i $smem)
    memchar="mem"$(printf %03i $imem)
 
-   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \
+   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl -x \
       COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL
 
-   MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \
+   MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -x \
       COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL
 
    ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX_ENS}atmf00${FHR}${ENKF_SUFFIX}.nc" "./atmges_${memchar}"
@@ -241,7 +247,7 @@ if [ $RECENTER_ENKF = "YES" ]; then
 
       $NLN $ATMANL_GSI        atmanl_gsi
       $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres
-      SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt}
+      SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
       $NLN $CHGRESNC chgres.x
       chgresnml=chgres_nc_gauss.nml
       nmltitle=chgres
diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh
deleted file mode 100755
index fd6136ddd2..0000000000
--- a/scripts/exgdas_enkf_fcst.sh
+++ /dev/null
@@ -1,225 +0,0 @@
-#! /usr/bin/env bash
-
-################################################################################
-####  UNIX Script Documentation Block
-#                      .                                             .
-# Script name:         exgdas_enkf_fcst.sh
-# Script description:  Run ensemble forecasts
-#
-# Author:        Rahul Mahajan      Org: NCEP/EMC     Date: 2017-03-02
-#
-# Abstract: This script runs ensemble forecasts serially one-after-another
-#
-# $Id$
-#
-# Attributes:
-#   Language: POSIX shell
-#
-####
-################################################################################
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-# Enemble group, begin and end
-ENSGRP=${ENSGRP:-1}
-ENSBEG=${ENSBEG:-1}
-ENSEND=${ENSEND:-1}
-
-# Re-run failed members, or entire group
-RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"}
-
-# Recenter flag and increment file prefix
-RECENTER_ENKF=${RECENTER_ENKF:-"YES"}
-export PREFIX_ATMINC=${PREFIX_ATMINC:-""}
-
-################################################################################
-# Preprocessing
-cd "${DATA}" || exit 99
-DATATOP=${DATA}
-
-################################################################################
-# Set output data
-EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}"
-if [[ -f ${EFCSGRP} ]]; then
-   if [[ ${RERUN_EFCSGRP} = "YES" ]]; then
-      rm -f "${EFCSGRP}"
-   else
-      echo "RERUN_EFCSGRP = ${RERUN_EFCSGRP}, will re-run FAILED members only!"
-      ${NMV} "${EFCSGRP}" "${EFCSGRP}.fail"
-   fi
-fi
-
-################################################################################
-# Set namelist/model config options common to all members once
-
-# There are many many model namelist options
-# Some are resolution (CASE) dependent, some depend on the model configuration
-# and will need to be added here before $FORECASTSH is called
-# For now assume that
-# 1. the ensemble and the deterministic are same resolution
-# 2. the ensemble runs with the same configuration as the deterministic
-
-# Model config option for Ensemble
-export TYPE=${TYPE_ENKF:-${TYPE:-nh}}                  # choices:  nh, hydro
-export MONO=${MONO_ENKF:-${MONO:-non-mono}}            # choices:  mono, non-mono
-
-# fv_core_nml
-export CASE=${CASE_ENS:-${CASE:-C768}}
-export layout_x=${layout_x_ENKF:-${layout_x:-8}}
-export layout_y=${layout_y_ENKF:-${layout_y:-16}}
-export LEVS=${LEVS_ENKF:-${LEVS:-64}}
-
-# nggps_diag_nml
-export FHOUT=${FHOUT_ENKF:-3}
-if [[ ${RUN} == "enkfgfs" ]]; then
-    export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT_ENKF:${FHOUT:-3}}}
-fi
-# model_configure
-export DELTIM=${DELTIM_ENKF:-${DELTIM:-225}}
-export FHMAX=${FHMAX_ENKF:-9}
-if [[ ${RUN} == "enkfgfs" ]]; then
-   export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX_ENKF:-${FHMAX}}}
-fi
-
-# gfs_physics_nml
-export FHSWR=${FHSWR_ENKF:-${FHSWR:-3600.}}
-export FHLWR=${FHLWR_ENKF:-${FHLWR:-3600.}}
-export IEMS=${IEMS_ENKF:-${IEMS:-1}}
-export ISOL=${ISOL_ENKF:-${ISOL:-2}}
-export IAER=${IAER_ENKF:-${IAER:-111}}
-export ICO2=${ICO2_ENKF:-${ICO2:-2}}
-export cdmbgwd=${cdmbgwd_ENKF:-${cdmbgwd:-"3.5,0.25"}}
-export dspheat=${dspheat_ENKF:-${dspheat:-".true."}}
-export shal_cnv=${shal_cnv_ENKF:-${shal_cnv:-".true."}}
-export FHZER=${FHZER_ENKF:-${FHZER:-6}}
-export FHCYC=${FHCYC_ENKF:-${FHCYC:-6}}
-
-# Set PREFIX_ATMINC to r when recentering on
-if [[ ${RECENTER_ENKF} = "YES" ]]; then
-   export PREFIX_ATMINC="r"
-fi
-
-# Ignore possible spelling error (nothing is misspelled)
-# shellcheck disable=SC2153
-GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}")
-declare -x gPDY="${GDATE:0:8}"
-declare -x gcyc="${GDATE:8:2}"
-
-################################################################################
-# Run forecast for ensemble member
-rc=0
-for imem in $(seq "${ENSBEG}" "${ENSEND}"); do
-
-   cd "${DATATOP}"
-
-   ENSMEM=$(printf %03i "${imem}")
-   export ENSMEM
-   memchar="mem${ENSMEM}"
-
-   echo "Processing MEMBER: ${ENSMEM}"
-
-   ra=0
-
-   skip_mem="NO"
-   if [[ -f ${EFCSGRP}.fail ]]; then
-      set +e
-      memstat=$(grep "MEMBER ${ENSMEM}" "${EFCSGRP}.fail" | grep -c "PASS")
-      set_strict
-      [[ ${memstat} -eq 1 ]] && skip_mem="YES"
-   fi
-
-   # Construct COM variables from templates (see config.com)
-   # Can't make these read-only because we are looping over members
-   MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \
-     COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_CONF
-
-   MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
-
-   if [[ ${DO_WAVE} == "YES" ]]; then
-     MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY
-     MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL
-   fi
-
-   if [[ ${DO_OCN} == "YES" ]]; then
-     MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_MED_RESTART COM_OCEAN_RESTART \
-       COM_OCEAN_INPUT COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS
-     MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL
-   fi
-
-   if [[ ${DO_ICE} == "YES" ]]; then
-     MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART
-     MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
-   fi
-
-   if [[ ${DO_AERO} == "YES" ]]; then
-     MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_CHEM_HISTORY
-   fi
-
-
-   if [[ ${skip_mem} = "NO" ]]; then
-
-      ra=0
-
-      export MEMBER=${imem}
-      export DATA="${DATATOP}/${memchar}"
-      if [[ -d ${DATA} ]]; then rm -rf "${DATA}"; fi
-      mkdir -p "${DATA}"
-      ${FORECASTSH}
-      ra=$?
-
-      # Notify a member forecast failed and abort
-      if [[ ${ra} -ne 0 ]]; then
-         err_exit "FATAL ERROR:  forecast of member ${ENSMEM} FAILED.  Aborting job"
-      fi
-
-      rc=$((rc+ra))
-
-   fi
-
-   if [[ ${SENDDBN} = YES ]]; then
-     fhr=${FHOUT}
-     while [[ ${fhr} -le ${FHMAX} ]]; do
-       FH3=$(printf %03i "${fhr}")
-       if (( fhr % 3 == 0 )); then
-         "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc"
-       fi
-       fhr=$((fhr+FHOUT))
-     done
-   fi
-
-   cd "${DATATOP}"
-
-   if [[ -s ${EFCSGRP} ]]; then
-       ${NCP} "${EFCSGRP}" log_old
-   fi
-   [[ -f log ]] && rm log
-   [[ -f log_new ]] && rm log_new
-   if [[ ${ra} -ne 0 ]]; then
-      echo "MEMBER ${ENSMEM} : FAIL" > log
-   else
-      echo "MEMBER ${ENSMEM} : PASS" > log
-   fi
-   if [[ -s log_old ]] ; then
-       cat log_old log > log_new
-   else
-       cat log > log_new
-   fi
-   ${NCP} log_new "${EFCSGRP}"
-
-done
-
-################################################################################
-# Echo status of ensemble group
-cd "${DATATOP}"
-echo "Status of ensemble members in group ${ENSGRP}:"
-cat "${EFCSGRP}"
-[[ -f ${EFCSGRP}.fail ]] && rm "${EFCSGRP}".fail
-
-################################################################################
-# If any members failed, error out
-export err=${rc}; err_chk
-
-################################################################################
-#  Postprocessing
-
-exit "${err}"
diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh
index 86ab9071a4..6e93284695 100755
--- a/scripts/exgdas_enkf_post.sh
+++ b/scripts/exgdas_enkf_post.sh
@@ -17,15 +17,11 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Directories.
 pwd=$(pwd)
 
-# Utilities
-NCP=${NCP:-"/bin/cp"}
-NLN=${NLN:-"/bin/ln -sf"}
-
 APRUN_EPOS=${APRUN_EPOS:-${APRUN:-""}}
 NTHREADS_EPOS=${NTHREADS_EPOS:-1}
 
@@ -34,11 +30,11 @@ SENDDBN=${SENDDBN:-"NO"}
 
 # Fix files
 LEVS=${LEVS:-64}
-HYBENSMOOTH=${HYBENSMOOTH:-$FIXgsi/global_hybens_smoothinfo.l${LEVS}.txt}
+HYBENSMOOTH=${HYBENSMOOTH:-${FIXgfs}/gsi/global_hybens_smoothinfo.l${LEVS}.txt}
 
 # Executables.
-GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x}
-GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x}
+GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECgfs}/getsigensmeanp_smooth.x}
+GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECgfs}/getsfcensmeanp.x}
 
 # Other variables.
 PREFIX=${PREFIX:-""}
@@ -46,10 +42,11 @@ FHMIN=${FHMIN_EPOS:-3}
 FHMAX=${FHMAX_EPOS:-9}
 FHOUT=${FHOUT_EPOS:-3}
 
-if [[ $CDUMP == "gfs" ]]; then
+if [[ "${RUN}" == "enkfgfs" ]]; then
    NMEM_ENS=${NMEM_ENS_GFS:-${NMEM_ENS:-30}}
+else
+   NMEM_ENS=${NMEM_ENS:-80}
 fi
-NMEM_ENS=${NMEM_ENS:-80}
 SMOOTH_ENKF=${SMOOTH_ENKF:-"NO"}
 ENKF_SPREAD=${ENKF_SPREAD:-"NO"}
 
@@ -69,7 +66,7 @@ export OMP_NUM_THREADS=$NTHREADS_EPOS
 # Forecast ensemble member files
 for imem in $(seq 1 $NMEM_ENS); do
    memchar="mem"$(printf %03i "${imem}")
-   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL
+   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl -x COM_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL
 
    for fhr in $(seq $FHMIN $FHOUT $FHMAX); do
       fhrchar=$(printf %03i $fhr)
@@ -79,7 +76,7 @@ for imem in $(seq 1 $NMEM_ENS); do
 done
 
 # Forecast ensemble mean and smoothed files
-MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_STAT:COM_ATMOS_HISTORY_TMPL
+MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY_STAT:COM_ATMOS_HISTORY_TMPL
 if [[ ! -d "${COM_ATMOS_HISTORY_STAT}" ]]; then mkdir -p "${COM_ATMOS_HISTORY_STAT}"; fi
 
 for fhr in $(seq $FHMIN $FHOUT $FHMAX); do
@@ -89,7 +86,7 @@ for fhr in $(seq $FHMIN $FHOUT $FHMAX); do
    if [ $SMOOTH_ENKF = "YES" ]; then
       for imem in $(seq 1 $NMEM_ENS); do
          memchar="mem"$(printf %03i "${imem}")
-         MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY
+         MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} declare_from_tmpl -x COM_ATMOS_HISTORY
          ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhrchar}${ENKF_SUFFIX}.nc" "atmf${fhrchar}${ENKF_SUFFIX}_${memchar}"
       done
    fi
diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh
index 2ad624bcdb..d0018d1099 100755
--- a/scripts/exgdas_enkf_select_obs.sh
+++ b/scripts/exgdas_enkf_select_obs.sh
@@ -17,16 +17,13 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Directories.
 pwd=$(pwd)
 
-# Utilities
-export NLN=${NLN:-"/bin/ln -sf"}
-
 # Scripts.
-ANALYSISSH=${ANALYSISSH:-$HOMEgfs/scripts/exglobal_atmos_analysis.sh}
+ANALYSISSH=${ANALYSISSH:-${SCRgfs}/exglobal_atmos_analysis.sh}
 
 # Select obs
 export RUN_SELECT=${RUN_SELECT:-"YES"}
diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh
index 81d68fb9fe..2720dd5d5f 100755
--- a/scripts/exgdas_enkf_sfc.sh
+++ b/scripts/exgdas_enkf_sfc.sh
@@ -17,21 +17,20 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Directories.
 pwd=$(pwd)
 
 # Base variables
 DONST=${DONST:-"NO"}
+GSI_SOILANAL=${GSI_SOILANAL:-"NO"}
 DOSFCANL_ENKF=${DOSFCANL_ENKF:-"YES"}
 export CASE=${CASE:-384}
 ntiles=${ntiles:-6}
 
 # Utilities
-NCP=${NCP:-"/bin/cp -p"}
-NLN=${NLN:-"/bin/ln -sf"}
-NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 
 # Scripts
 
@@ -46,16 +45,22 @@ GPREFIX=${GPREFIX:-""}
 GPREFIX_ENS=${GPREFIX_ENS:-${GPREFIX}}
 
 # Variables
-NMEM_ENS=${NMEM_ENS:-80}
+NMEM_ENS_MAX=${NMEM_ENS:-80}
+if [ "${RUN}" = "enkfgfs" ]; then
+   NMEM_ENS=${NMEM_ENS_GFS:-30}
+   ec_offset=${NMEM_ENS_GFS_OFFSET:-20}
+   mem_offset=$((ec_offset * cyc/6))
+else
+   NMEM_ENS=${NMEM_ENS:-80}
+   mem_offset=0
+fi
 DOIAU=${DOIAU_ENKF:-"NO"}
 
 # Global_cycle stuff
-CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh}
-export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle}
+CYCLESH=${CYCLESH:-${USHgfs}/global_cycle.sh}
+export CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle}
 APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}}
 NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}}
-export FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
 export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"}
 export FHOUR=${FHOUR:-0}
 export DELTSFC=${DELTSFC:-6}
@@ -63,7 +68,6 @@ export DELTSFC=${DELTSFC:-6}
 APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}}
 NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}}
 
-
 ################################################################################
 # Preprocessing
 mkdata=NO
@@ -134,18 +138,24 @@ if [ $DOIAU = "YES" ]; then
         export TILE_NUM=$n
 
         for imem in $(seq 1 $NMEM_ENS); do
-
+            smem=$((imem + mem_offset))
+            if (( smem > NMEM_ENS_MAX )); then
+               smem=$((smem - NMEM_ENS_MAX))
+            fi
+            gmemchar="mem"$(printf %03i "$smem")
             cmem=$(printf %03i $imem)
             memchar="mem$cmem"
 
-            MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \
+            MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \
                 COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL
 
-            MEMDIR=${memchar} RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com \
+            MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \
                 COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL
 
-            [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}"
+            MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \
+                COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL
 
+            [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}"
             ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \
                 "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc"
             ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \
@@ -155,7 +165,12 @@ if [ $DOIAU = "YES" ]; then
             ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc"     "${DATA}/fngrid.${cmem}"
             ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}"
 
-        done
+            if [[ ${GSI_SOILANAL} = "YES" ]]; then
+                FHR=6
+                ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \
+                   "${DATA}/lnd_incr.${cmem}"
+            fi
+        done # ensembles
 
         CDATE="${PDY}${cyc}" ${CYCLESH}
         export err=$?; err_chk
@@ -170,14 +185,18 @@ if [ $DOSFCANL_ENKF = "YES" ]; then
         export TILE_NUM=$n
 
         for imem in $(seq 1 $NMEM_ENS); do
-
+            smem=$((imem + mem_offset))
+            if (( smem > NMEM_ENS_MAX )); then
+               smem=$((smem - NMEM_ENS_MAX))
+            fi
+            gmemchar="mem"$(printf %03i "$smem")
             cmem=$(printf %03i $imem)
             memchar="mem$cmem"
 
-            MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \
+            MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \
                 COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL
 
-            RUN="${GDUMP_ENS}" MEMDIR=${memchar} YMD=${gPDY} HH=${gcyc} generate_com \
+            RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \
                 COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL
 
             [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}"
diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh
index 1f11026ac4..e924274d39 100755
--- a/scripts/exgdas_enkf_update.sh
+++ b/scripts/exgdas_enkf_update.sh
@@ -17,15 +17,13 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Directories.
 pwd=$(pwd)
 
 # Utilities
-NCP=${NCP:-"/bin/cp -p"}
-NLN=${NLN:-"/bin/ln -sf"}
-NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 USE_CFP=${USE_CFP:-"NO"}
 CFP_MP=${CFP_MP:-"NO"}
 nm=""
@@ -37,7 +35,7 @@ APRUN_ENKF=${APRUN_ENKF:-${APRUN:-""}}
 NTHREADS_ENKF=${NTHREADS_ENKF:-${NTHREADS:-1}}
 
 # Executables
-ENKFEXEC=${ENKFEXEC:-$HOMEgfs/exec/enkf.x}
+ENKFEXEC=${ENKFEXEC:-${EXECgfs}/enkf.x}
 
 # Cycling and forecast hour specific parameters
 CDATE=${CDATE:-"2001010100"}
@@ -56,7 +54,6 @@ ENKFSTAT=${ENKFSTAT:-${APREFIX}enkfstat}
 
 # Namelist parameters
 USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"NO"}
-NMEM_ENS=${NMEM_ENS:-80}
 NAM_ENKF=${NAM_ENKF:-""}
 SATOBS_ENKF=${SATOBS_ENKF:-""}
 OZOBS_ENKF=${OZOBS_ENKF:-""}
@@ -70,7 +67,7 @@ corrlength=${corrlength:-1250}
 lnsigcutoff=${lnsigcutoff:-2.5}
 analpertwt=${analpertwt:-0.85}
 readin_localization_enkf=${readin_localization_enkf:-".true."}
-reducedgrid=${reducedgrid:-".true."}
+reducedgrid=${reducedgrid:-".false."}
 letkf_flag=${letkf_flag:-".false."}
 getkf=${getkf:-".false."}
 denkf=${denkf:-".false."}
@@ -81,12 +78,19 @@ cnvw_option=${cnvw_option:-".false."}
 netcdf_diag=${netcdf_diag:-".true."}
 modelspace_vloc=${modelspace_vloc:-".false."} # if true, 'vlocal_eig.dat' is needed
 IAUFHRS_ENKF=${IAUFHRS_ENKF:-6}
-if [ $RUN = "enkfgfs" ]; then
+NMEM_ENS_MAX=${NMEM_ENS:-80}
+if [ "${RUN}" = "enkfgfs" ]; then
    DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"}
+   NMEM_ENS=${NMEM_ENS_GFS:-30}
+   ec_offset=${NMEM_ENS_GFS_OFFSET:-20}
+   mem_offset=$((ec_offset * cyc/6))
 else
    DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"}
+   NMEM_ENS=${NMEM_ENS:-80}
+   mem_offset=0
 fi
 INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"}
+GSI_SOILANAL=${GSI_SOILANAL:-"NO"}
 
 ################################################################################
 
@@ -105,14 +109,14 @@ else
 fi
 LATA_ENKF=${LATA_ENKF:-$LATB_ENKF}
 LONA_ENKF=${LONA_ENKF:-$LONB_ENKF}
-SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt}
-SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt}
-CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt}
-OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt}
-SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt}
-HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS_ENKF}.txt}
-ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt}
-VLOCALEIG=${VLOCALEIG:-${FIXgsi}/vlocal_eig_l${LEVS_ENKF}.dat}
+SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt}
+SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt}
+CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt}
+OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt}
+SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt}
+HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS_ENKF}.txt}
+ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS_ENKF}.txt}
+VLOCALEIG=${VLOCALEIG:-${FIXgfs}/gsi/vlocal_eig_l${LEVS_ENKF}.dat}
 ENKF_SUFFIX="s"
 [[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX=""
 
@@ -178,12 +182,17 @@ else
 fi
 nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g')
 for imem in $(seq 1 $NMEM_ENS); do
+   smem=$((imem + mem_offset))
+   if (( smem > NMEM_ENS_MAX )); then
+      smem=$((smem - NMEM_ENS_MAX))
+   fi
+   gmemchar="mem"$(printf %03i $smem)
    memchar="mem"$(printf %03i $imem)
 
-   MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \
+   MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -x \
       COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL
 
-   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \
+   MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl -x \
       COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL
 
    if [ $lobsdiag_forenkf = ".false." ]; then
@@ -203,6 +212,10 @@ for imem in $(seq 1 $NMEM_ENS); do
    for FHR in $nfhrs; do
       ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}.nc" \
          "sfg_${PDY}${cyc}_fhr0${FHR}_${memchar}"
+      if [ $GSI_SOILANAL = "YES" ]; then
+         ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}${ENKF_SUFFIX}.nc" \
+             "bfg_${PDY}${cyc}_fhr0${FHR}_${memchar}"
+      fi
       if [ $cnvw_option = ".true." ]; then
          ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}.nc" \
             "sfgsfc_${PDY}${cyc}_fhr0${FHR}_${memchar}"
@@ -224,6 +237,10 @@ for imem in $(seq 1 $NMEM_ENS); do
                "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}"
          fi
       fi
+      if [ $GSI_SOILANAL = "YES" ]; then
+          ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}sfci00${FHR}.nc" \
+           "sfcincr_${PDY}${cyc}_fhr0${FHR}_${memchar}"
+      fi
    done
 done
 
@@ -238,11 +255,11 @@ for FHR in $nfhrs; do
    fi
 done
 
-if [ $USE_CFP = "YES" ]; then
+if [[ $USE_CFP = "YES" ]]; then
    chmod 755 $DATA/mp_untar.sh
    ncmd=$(cat $DATA/mp_untar.sh | wc -l)
-   if [ $ncmd -gt 0 ]; then
-      ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max))
+   if [[ $ncmd -gt 0 ]]; then
+      ncmd_max=$((ncmd < max_tasks_per_node ? ncmd : max_tasks_per_node))
       APRUNCFP=$(eval echo $APRUNCFP)
       $APRUNCFP $DATA/mp_untar.sh
       export err=$?; err_chk
@@ -398,8 +415,8 @@ cat stdout stderr > "${COM_ATMOS_ANALYSIS_STAT}/${ENKFSTAT}"
 
 ################################################################################
 #  Postprocessing
-cd $pwd
-[[ $mkdata = "YES" ]] && rm -rf $DATA
+cd "$pwd"
+[[ $mkdata = "YES" ]] && rm -rf "${DATA}"
 
 
-exit $err
+exit ${err}
diff --git a/scripts/exgdas_global_marine_analysis_letkf.py b/scripts/exgdas_global_marine_analysis_letkf.py
new file mode 100755
index 0000000000..37ca837889
--- /dev/null
+++ b/scripts/exgdas_global_marine_analysis_letkf.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# exgdas_global_marine_analysis_letkf.py
+# This script creates an MarineLETKF class
+# and runs the initialize, run, and finalize methods
+# which currently are stubs
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs.task.marine_letkf import MarineLETKF
+
+# Initialize root logger
+logger = Logger(level='DEBUG', colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the marine letkf task
+    MarineLetkf = MarineLETKF(config)
+    MarineLetkf.initialize()
+    MarineLetkf.run()
+    MarineLetkf.finalize()
diff --git a/scripts/exgfs_aero_init_aerosol.py b/scripts/exgfs_aero_init_aerosol.py
index 1c81880ca9..d098368202 100755
--- a/scripts/exgfs_aero_init_aerosol.py
+++ b/scripts/exgfs_aero_init_aerosol.py
@@ -14,7 +14,7 @@
 CDATE: 		Initial time in YYYYMMDDHH format
 STEP_GFS: 	Forecast cadence (frequency) in hours
 FHMAX_GFS: 	Forecast length in hours
-CDUMP: 		Forecast phase (gfs or gdas). Currently always expected to be gfs.
+RUN: 		Forecast phase (gfs or gdas). Currently always expected to be gfs.
 ROTDIR: 	Rotating (COM) directory
 USHgfs: 	Path to global-workflow `ush` directory
 PARMgfs: 	Path to global-workflow `parm` directory
@@ -41,14 +41,14 @@
 from functools import partial
 
 # Constants
-atm_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/model_data/atmos/input"       # Location of atmosphere ICs
+atm_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model_data/atmos/input"         # Location of atmosphere ICs
 atm_file_pattern = "{path}/gfs_data.{tile}.nc"                                # Atm IC file names
 atm_ctrl_pattern = "{path}/gfs_ctrl.nc"                                       # Atm IC control file name
-restart_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/model_data/atmos/restart"   # Location of restart files (time of previous run)
+restart_base_pattern = "{rot_dir}/{run}.%Y%m%d/%H/model_data/atmos/restart"   # Location of restart files (time of previous run)
 restart_file_pattern = "{file_base}/{timestamp}fv_core.res.{tile}.nc"         # Name of restart data files (time when restart is valid)
 tracer_file_pattern = "{file_base}/{timestamp}fv_tracer.res.{tile}.nc"        # Name of restart tracer files (time when restart is valid)
 dycore_file_pattern = "{file_base}/{timestamp}fv_core.res.nc"                 # Name of restart dycore file (time when restart is valid)
-tracer_list_file_pattern = "{parm_gfs}/ufs/gocart/gocart_tracer.list"               # Text list of tracer names to copy
+tracer_list_file_pattern = "{parm_gfs}/ufs/gocart/gocart_tracer.list"         # Text list of tracer names to copy
 merge_script_pattern = "{ush_gfs}/merge_fv3_aerosol_tile.py"
 n_tiles = 6
 max_lookback = 4                                                              # Maximum number of past cycles to look for for tracer data
@@ -68,7 +68,7 @@ def main() -> None:
     cdate = get_env_var("CDATE")
     incr = int(get_env_var('STEP_GFS'))
     fcst_length = int(get_env_var('FHMAX_GFS'))
-    cdump = get_env_var("CDUMP")
+    run = get_env_var("RUN")
     rot_dir = get_env_var("ROTDIR")
     ush_gfs = get_env_var("USHgfs")
     parm_gfs = get_env_var("PARMgfs")
@@ -86,7 +86,7 @@ def main() -> None:
             print(f'{var} = {f"{var}"}')
 
     atm_files, ctrl_files = get_atm_files(atm_source_path)
-    tracer_files, rest_files, core_files = get_restart_files(time, incr, max_lookback, fcst_length, rot_dir, cdump)
+    tracer_files, rest_files, core_files = get_restart_files(time, incr, max_lookback, fcst_length, rot_dir, run)
 
     if (tracer_files is not None):
         merge_tracers(merge_script, atm_files, tracer_files, rest_files, core_files[0], ctrl_files[0], tracer_list_file)
@@ -167,7 +167,7 @@ def get_atm_files(path: str) -> typing.List[typing.List[str]]:
     return file_list
 
 
-def get_restart_files(time: datetime, incr: int, max_lookback: int, fcst_length: int, rot_dir: str, cdump: str) -> typing.List[typing.List[str]]:
+def get_restart_files(time: datetime, incr: int, max_lookback: int, fcst_length: int, rot_dir: str, run: str) -> typing.List[typing.List[str]]:
     '''
     Determines the last cycle where all the necessary restart files are available. Ideally the immediate previous cycle
 
@@ -183,8 +183,8 @@ def get_restart_files(time: datetime, incr: int, max_lookback: int, fcst_length:
             Length of forecast in hours
     rot_dir : str
             Path to the ROTDIR (COM) directory
-    cdump : str
-            CDUMP of current forecast portion (currently should always be 'gfs')
+    run : str
+            RUN of current forecast portion (currently should always be 'gfs')
 
     Returns
     ----------
diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh
index 7546f3cabe..4959bbd8e8 100755
--- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh
+++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh
@@ -19,7 +19,7 @@
 # echo " "
 ###############################################################################
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 fcsthrs="$1"
 num=$#
@@ -38,25 +38,19 @@ fi
 cd "${DATA}" || exit 2
 
 # "Import" functions used in this script
-source "${HOMEgfs}/ush/product_functions.sh"
+source "${USHgfs}/product_functions.sh"
 
 ###############################################
 # Wait for the availability of the pgrb file
 ###############################################
-icnt=1
-while (( icnt < 1000 )); do
-   if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then
-      break
-   fi
-
-   sleep 10
-   icnt=$((icnt + 1))
-   if (( icnt >= 180 )); then
-      msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting"
-      err_exit "${msg}"
-      exit 5
-   fi
-done
+sleep_interval=10
+max_tries=180
+idxfile="${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx"
+if ! wait_for_file "${idxfile}" "${sleep_interval}" "${max_tries}"; then
+  msg="FATAL ERROR: No GFS pgrb2 file after waiting"
+  err_exit "${msg}"
+  exit 5
+fi
 
 ########################################
 
@@ -91,7 +85,7 @@ export opt28=' -new_grid_interpolation budget -fi '
 cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2${fcsthrs}"
 cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b${fcsthrs}"
 cat "tmpfile2${fcsthrs}" "tmpfile2b${fcsthrs}" > "tmpfile${fcsthrs}"
-${WGRIB2} "tmpfile${fcsthrs}" | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \
+${WGRIB2} "tmpfile${fcsthrs}" | grep -F -f "${PARMgfs}/product/gfs_awips_parmlist_g2" | \
    ${WGRIB2} -i -grib masterfile "tmpfile${fcsthrs}"
 export err=$?
 if [[ $err -ne 0 ]]; then
@@ -179,7 +173,7 @@ for GRID in conus ak prico pac 003; do
       export FORT31="awps_file_fi${fcsthrs}_${GRID}"
       export FORT51="grib2.awpgfs${fcsthrs}.${GRID}"
 
-      cp "${PARMwmo}/grib2_awpgfs${fcsthrs}.${GRID}" "parm_list"
+      cp "${PARMgfs}/wmo/grib2_awpgfs${fcsthrs}.${GRID}" "parm_list"
       if [[ ${DO_WAVE} != "YES" ]]; then
          # Remove wave field it not running wave model
          grep -vw "5WAVH" "parm_list" > "parm_list_temp"
@@ -213,7 +207,7 @@ for GRID in conus ak prico pac 003; do
       export FORT31="awps_file_fi${fcsthrs}_${GRID}"
       export FORT51="grib2.awpgfs_20km_${GRID}_f${fcsthrs}"
 
-      cp "${PARMwmo}/grib2_awpgfs_20km_${GRID}f${fcsthrs}" "parm_list"
+      cp "${PARMgfs}/wmo/grib2_awpgfs_20km_${GRID}f${fcsthrs}" "parm_list"
       if [[ ${DO_WAVE} != "YES" ]]; then
          # Remove wave field it not running wave model
          grep -vw "5WAVH" "parm_list" > "parm_list_temp"
diff --git a/scripts/exgfs_atmos_fbwind.sh b/scripts/exgfs_atmos_fbwind.sh
index 735a906bff..401ce51380 100755
--- a/scripts/exgfs_atmos_fbwind.sh
+++ b/scripts/exgfs_atmos_fbwind.sh
@@ -14,15 +14,15 @@
 # echo "         Nov 2019 - B Vuong  Removed WINTEMV bulletin (retired)"
 #####################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${HOMEgfs}/ush/preamble.sh"
 
-cd $DATA
+cd "${DATA}" || exit 2
 
 ######################
 # Set up Here Files.
 ######################
 
-job_name=$(echo $job|sed 's/[jpt]gfs/gfs/')
+outfile_name="${COMOUT}/${RUN}.atmos.t${cyc}z.fbwind.pacific.ascii"
 
 set +x
 echo " "
@@ -34,20 +34,17 @@ echo " "
 set_trace
 
 export pgm=bulls_fbwndgfs
-. prep_step
-
-for fhr in 006 012 024
-do
-
-  cp $COMIN/gfs.${cycle}.pgrb2.0p25.f${fhr}   tmp_pgrb2_0p25${fhr} 
-  cp $COMIN/gfs.${cycle}.pgrb2b.0p25.f${fhr}  tmp_pgrb2b_0p25${fhr} 
-  cat tmp_pgrb2_0p25${fhr} tmp_pgrb2b_0p25${fhr} > tmp0p25filef${fhr} 
-  $WGRIB2 tmp0p25filef${fhr} | grep  -F -f $PARMproduct/gfs_fbwnd_parmlist_g2 | $WGRIB2 -i -grib tmpfilef${fhr} tmp0p25filef${fhr}
-  $CNVGRIB -g21 tmpfilef${fhr} tmpfilef${fhr}.grib1
-  $GRBINDEX tmpfilef${fhr}.grib1 tmpfilef${fhr}.grib1i
-  mv tmpfilef${fhr}.grib1   gfs.t${cyc}z.grbf${fhr}_grb1
-  mv tmpfilef${fhr}.grib1i  gfs.t${cyc}z.grbif${fhr}_grb1
-
+source prep_step
+
+for fhr3 in 006 012 024; do
+  cp "${COMIN_ATMOS_GRIB_0p25}/gfs.${cycle}.pgrb2.0p25.f${fhr3}"   "tmp_pgrb2_0p25${fhr3}" 
+  cp "${COMIN_ATMOS_GRIB_0p25}/gfs.${cycle}.pgrb2b.0p25.f${fhr3}"  "tmp_pgrb2b_0p25${fhr3}"
+  cat "tmp_pgrb2_0p25${fhr3}" "tmp_pgrb2b_0p25${fhr3}" > "tmp0p25filef${fhr3}"
+  # shellcheck disable=SC2312
+  ${WGRIB2} "tmp0p25filef${fhr3}" | grep -F -f "${PARMgfs}/product/gfs_fbwnd_parmlist_g2" | \
+    ${WGRIB2} -i -grib "tmpfilef${fhr3}" "tmp0p25filef${fhr3}"
+  ${CNVGRIB} -g21 "tmpfilef${fhr3}" "gfs.t${cyc}z.grbf${fhr3}_grb1"
+  ${GRBINDEX} "gfs.t${cyc}z.grbf${fhr3}_grb1" "gfs.t${cyc}z.grbf${fhr3}_grb1.idx"
 done
 
 export FORT11="gfs.t${cyc}z.grbf006_grb1"
@@ -56,9 +53,9 @@ export FORT13="gfs.t${cyc}z.grbf024_grb1"
 
 #       GFS grib index files
 
-export FORT31="gfs.t${cyc}z.grbif006_grb1"
-export FORT32="gfs.t${cyc}z.grbif012_grb1"
-export FORT33="gfs.t${cyc}z.grbif024_grb1"
+export FORT31="gfs.t${cyc}z.grbf006_grb1.idx"
+export FORT32="gfs.t${cyc}z.grbf012_grb1.idx"
+export FORT33="gfs.t${cyc}z.grbf024_grb1.idx"
 
 #
 #   1280 byte transmission file
@@ -66,21 +63,11 @@ export FORT33="gfs.t${cyc}z.grbif024_grb1"
 
 export FORT51="tran.fbwnd_pacific"
 
-startmsg
+cp "${PARMgfs}/product/fbwnd_pacific.stnlist" fbwnd_pacific.stnlist
 
-$EXECgfs/fbwndgfs < $PARMproduct/fbwnd_pacific.stnlist >> $pgmout 2> errfile
+"${EXECgfs}/fbwndgfs.x" < fbwnd_pacific.stnlist >> "${pgmout}" 2> errfile
 export err=$?; err_chk
 
-
-cp tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name
-
-if test "$SENDDBN" = 'YES'
-then
-#    make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name
-   ${USHgfs}/make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name
-fi
-
-#####################################################################
-
+"${USHgfs}/make_ntc_bull.pl" WMOBH NONE KWNO NONE tran.fbwnd_pacific "${outfile_name}"
 
 ############################### END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh
index 2dd7fa886a..f7e981c6b6 100755
--- a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh
+++ b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh
@@ -7,107 +7,79 @@
 # in the future, we should move it above somewhere else.
 ##############################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${HOMEgfs}/ush/preamble.sh"
 
-cd $DATA
+cd "${DATA}" || exit 1
 
-export NTS=$USHgempak/restore
+export NTS="${HOMEgfs}/gempak/ush/restore"
 
-if [ $MODEL = GDAS -o $MODEL = GFS ]
-then
-    case $MODEL in
-      GDAS) fcsthrs="00";;
-      GFS)  fcsthrs="00 12 24 36 48";;
+if [[ "${MODEL}" == GDAS ]] || [[ "${MODEL}" == GFS ]]; then
+    case "${MODEL}" in
+        GDAS) fcsthrs="0";;
+        GFS)  fcsthrs="0 12 24 36 48";;
+        *)
+            echo "FATAL ERROR: Unrecognized model type ${MODEL}"
+            exit 5
+            ;;
     esac
 
-    export fhr
-    for fhr in $fcsthrs
-    do
-        icnt=1
-        maxtries=180
-        export GRIBFILE=${COMIN}/${RUN}_${PDY}${cyc}f0${fhr}
-        while [ $icnt -lt 1000 ]
-        do
-          if [ -r ${COMIN}/${RUN}_${PDY}${cyc}f0${fhr} ] ; then
-            sleep 5
-            break
-          else
-            echo "The process is waiting ... ${GRIBFILE} file to proceed."
-            sleep 20
-            let "icnt=icnt+1"
-          fi
-          if [ $icnt -ge $maxtries ]
-          then
-            echo "ABORTING: after 1 hour of waiting for ${GRIBFILE} file at F$fhr to end."
-            export err=7 ; err_chk
-            exit $err
-          fi
-        done
-
-       cp ${COMIN}/${RUN}_${PDY}${cyc}f0${fhr} gem_grids${fhr}.gem
-
-#       if [ $cyc -eq 00 -o $cyc -eq 12 ]
-       #then
-          $USHgempak/gempak_${RUN}_f${fhr}_gif.sh
-       #fi
-
+    sleep_interval=20
+    max_tries=180
+    for fhr in ${fcsthrs}; do
+        fhr3=$(printf %03d "${fhr}")
+        export GRIBFILE=${COM_ATMOS_GEMPAK_1p00}/${RUN}_1p00_${PDY}${cyc}f${fhr3}
+        if ! wait_for_file "${GRIBFILE}" "${sleep_interval}" "${max_tries}" ; then
+            echo "FATAL ERROR: ${GRIBFILE} not found after ${max_tries} iterations"
+            exit 10
+        fi
+
+        cp "${GRIBFILE}" "gem_grids${fhr3}.gem"
+        export fhr3
+        if (( fhr == 0 )); then
+            "${HOMEgfs}/gempak/ush/gempak_${RUN}_f000_gif.sh"
+        else
+            "${HOMEgfs}/gempak/ush/gempak_${RUN}_fhhh_gif.sh"
+        fi
     done
 fi
 
-####################################################################################
-# echo "-----------------------------------------------------------------------------"
-# echo "GFS MAG postprocessing script exmag_sigman_skew_k_gfs_gif_ncdc_skew_t.sh "
-# echo "-----------------------------------------------------------------------------"
-# echo "History: Mar 2012 added to processing for enhanced MAG skew_t"
-# echo "2012-03-11 Mabe -- reworked script to add significant level "
-# echo "  data to existing mandatory level data in a new file"
-# echo "2013-04-24 Mabe -- Reworked to remove unneeded output with "
-# echo "  conversion to WCOSS"
-# Add ms to filename to make it different since it has both mandatory
-# and significant level data      $COMOUT/${RUN}.${cycle}.msupperair
-#                             $COMOUT/${RUN}.${cycle}.msupperairtble
-#####################################################################################
-
-cd $DATA
-
-export RSHPDY=$(echo $PDY | cut -c5-)$(echo $PDY | cut -c3-4)
-
-cp $HOMEgfs/gempak/dictionaries/sonde.land.tbl .
-cp $HOMEgfs/gempak/dictionaries/metar.tbl .
+cd "${DATA}" || exit 1
+
+export RSHPDY="${PDY:4:}${PDY:2:2}"
+
+cp "${HOMEgfs}/gempak/dictionaries/sonde.land.tbl" sonde.land.tbl
+cp "${HOMEgfs}/gempak/dictionaries/metar.tbl" metar.tbl
 sort -k 2n,2 metar.tbl > metar_stnm.tbl
-cp $COMINobsproc/${model}.$cycle.adpupa.tm00.bufr_d fort.40
-export err=$?
-if [[ $err -ne 0 ]] ; then
-   echo " File ${model}.$cycle.adpupa.tm00.bufr_d does not exist."
-   exit $err
+cp "${COM_OBS}/${model}.${cycle}.adpupa.tm00.bufr_d" fort.40
+err=$?
+if (( err != 0 )) ; then
+   echo "FATAL ERROR: File ${model}.${cycle}.adpupa.tm00.bufr_d could not be copied (does it exist?)."
+   exit "${err}"
 fi
-# $RDBFMSUA  >> $pgmout 2> errfile
-${UTILgfs}/exec/rdbfmsua >> $pgmout 2> errfile
 
+"${HOMEgfs}/exec/rdbfmsua.x" >> "${pgmout}" 2> errfile
 err=$?;export err ;err_chk
 
+# shellcheck disable=SC2012,SC2155
 export filesize=$( ls -l rdbfmsua.out | awk '{print $5}' )
 
 ################################################################
 #   only run script if rdbfmsua.out contained upper air data.
 ################################################################
 
-if [ $filesize -gt 40 ]
-then
-
-    cp rdbfmsua.out $COMOUT/${RUN}.${cycle}.msupperair
-    cp sonde.idsms.tbl $COMOUT/${RUN}.${cycle}.msupperairtble
-    if [ $SENDDBN = "YES" ]; then
-        $DBNROOT/bin/dbn_alert DATA MSUPPER_AIR $job $COMOUT/${RUN}.${cycle}.msupperair
-        $DBNROOT/bin/dbn_alert DATA MSUPPER_AIRTBL $job $COMOUT/${RUN}.${cycle}.msupperairtble
+if (( filesize > 40 )); then
+    cp rdbfmsua.out "${COM_ATMOS_GEMPAK_UPPER_AIR}/${RUN}.${cycle}.msupperair"
+    cp sonde.idsms.tbl "${COM_ATMOS_GEMPAK_UPPER_AIR}/${RUN}.${cycle}.msupperairtble"
+    if [[ ${SENDDBN} = "YES" ]]; then
+        "${DBNROOT}/bin/dbn_alert" DATA MSUPPER_AIR "${job}" "${COM_ATMOS_GEMPAK_UPPER_AIR}/${RUN}.${cycle}.msupperair"
+        "${DBNROOT}/bin/dbn_alert" DATA MSUPPER_AIRTBL "${job}" "${COM_ATMOS_GEMPAK_UPPER_AIR}/${RUN}.${cycle}.msupperairtble"
     fi
-
 fi
 
 ############################################################
 
-if [ -e "$pgmout" ] ; then
-   cat $pgmout
+if [[ -e "${pgmout}" ]] ; then
+   cat "${pgmout}"
 fi
 
 
diff --git a/scripts/exgfs_atmos_gempak_meta.sh b/scripts/exgfs_atmos_gempak_meta.sh
index 04f4f1fc5c..6ae8c77cfb 100755
--- a/scripts/exgfs_atmos_gempak_meta.sh
+++ b/scripts/exgfs_atmos_gempak_meta.sh
@@ -1,138 +1,91 @@
 #! /usr/bin/env bash
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${HOMEgfs}/ush/preamble.sh"
 
-cd $DATA
-
-GEMGRD1=${RUN}_${PDY}${cyc}f
-#find out what fcst hr to start processing
-fhr=$fhend
+GEMGRD1="${RUN}_1p00_${PDY}${cyc}f"
 
 export numproc=23
 
-while [ $fhr -ge $fhbeg ] ; do
-  fhr=$(printf "%03d" $fhr)
-  ls -l $COMIN/$GEMGRD1${fhr}
-  err1=$?
-  if [ $err1 -eq 0 -o $fhr -eq $fhbeg ] ; then
+# Find the last hour available
+for (( fhr = fhend; fhr >= fhbeg; fhr = fhr - fhinc )) ; do
+  fhr3=$(printf "%03d" "${fhr}")
+  if [[ -r "${COM_ATMOS_GEMPAK_1p00}/${GEMGRD1}${fhr3}" ]]; then
     break
   fi
-  fhr=$(expr $fhr - $fhinc)
 done
 
-maxtries=180
+sleep_interval=20
+max_tries=180
 first_time=0
 do_all=0
 
 #loop through and process needed forecast hours
-while [ $fhr -le $fhend ]
-do
-   #
-   # First check to see if this is a rerun.  If so make all Meta files
-   if [ $fhr -gt 126 -a $first_time -eq 0 ] ; then
-     do_all=1
-   fi
-   first_time=1
-
-   if [ $fhr -eq 120 ] ; then
-      fhr=126
-   fi
-   icnt=1
-
-   while [ $icnt -lt 1000 ]
-   do
-      ls -l $COMIN/$GEMGRD1${fhr}
-      err1=$?
-      if [ $err1 -eq 0 ] ; then
-         break
-      else
-         sleep 20
-         let "icnt= icnt + 1"
-      fi
-      if [ $icnt -ge $maxtries ]
-      then
-         echo "ABORTING after 1 hour of waiting for gempak grid F$fhr to end."
-         export err=7 ; err_chk
-         exit $err
-      fi
-   done
-
-   export fhr
-
-   ########################################################
-   # Create a script to be poe'd
-   #
-   #  Note:  The number of scripts to be run MUST match the number
-   #  of total_tasks set in the ecf script, or the job will fail.
-   #
-#   if [ -f $DATA/poescript ]; then
-      rm $DATA/poescript
-#   fi
-
-   fhr=$(printf "%02d" $fhr)
-
-   if [ $do_all -eq 1 ] ; then
-     do_all=0
-     awk '{print $1}' $FIXgempak/gfs_meta > $DATA/tmpscript
-   else
-     #
-     #     Do not try to grep out 12, it will grab the 12 from 126.
-     #     This will work as long as we don't need 12 fhr metafiles
-     #
-     if [ $fhr -ne 12 ] ; then
-       grep $fhr $FIXgempak/gfs_meta |awk -F" [0-9]" '{print $1}' > $DATA/tmpscript
-     fi
-   fi
-
-   for script in $(cat $DATA/tmpscript)
-   do
-     eval "echo $script" >> $DATA/poescript
-   done
-
-   num=$(cat $DATA/poescript |wc -l)
-
-   while [ $num -lt $numproc ] ; do
-      echo "hostname" >>poescript
-      num=$(expr $num + 1)
-   done
-
-   chmod 775 $DATA/poescript
-   cat $DATA/poescript
-   export MP_PGMMODEL=mpmd
-   export MP_CMDFILE=$DATA/poescript
-
-#  If this is the final fcst hour, alert the
-#  file to all centers.
-#
-   if [ 10#$fhr -ge $fhend ] ; then
-      export DBN_ALERT_TYPE=GFS_METAFILE_LAST
-   fi
-
-   export fend=$fhr
-
-  sleep 20
-  ntasks=${NTASKS_META:-$(cat $DATA/poescript | wc -l)}
-  ptile=${PTILE_META:-4}
-  threads=${NTHREADS_META:-1}
-  export OMP_NUM_THREADS=$threads
-  APRUN="mpiexec -l -n $ntasks -ppn $ntasks --cpu-bind verbose,core cfp"
-
-  APRUN_METACFP=${APRUN_METACFP:-$APRUN}
-  APRUNCFP=$(eval echo $APRUN_METACFP)
-
-  $APRUNCFP $DATA/poescript
-  export err=$?; err_chk
+while (( fhr <= fhend )); do
+  #
+  # First check to see if this is a rerun.  If so make all Meta files
+  if (( fhr > 126 )) && (( first_time == 0 )); then
+    do_all=1
+  fi
+  first_time=1
 
-      fhr=$(printf "%03d" $fhr)
-      if [ $fhr -eq 126 ] ; then
-        let fhr=fhr+6
-      else
-	let fhr=fhr+fhinc
-      fi
-done
+  if (( fhr == 120 )); then
+    fhr=126
+  fi
 
-#####################################################################
+  gempak_file="${COM_ATMOS_GEMPAK_1p00}/${GEMGRD1}${fhr3}"
+  if ! wait_for_file "${gempak_file}" "${sleep_interval}" "${max_tries}"; then
+    echo "FATAL ERROR: gempak grid file ${gempak_file} not available after maximum wait time."
+    exit 7
+  fi
+
+  export fhr
 
+  ########################################################
+  # Create a script to be poe'd
+  #
+  #  Note:  The number of scripts to be run MUST match the number
+  #  of total_tasks set in the ecf script, or the job will fail.
+  #
+  if [[ -f poescript ]]; then
+    rm poescript
+  fi
+
+  fhr3=$(printf "%03d" "${fhr}")
+
+  if (( do_all == 1 )) ; then
+    do_all=0
+    # shellcheck disable=SC2312
+    awk '{print $1}' "${HOMEgfs}/gempak/fix/gfs_meta" | envsubst > "poescript"
+  else
+    #
+    #    Do not try to grep out 12, it will grab the 12 from 126.
+    #    This will work as long as we don't need 12 fhr metafiles
+    #
+    if (( fhr != 12 )) ; then
+      # shellcheck disable=SC2312
+      grep "${fhr}" "${HOMEgfs}/gempak/fix/gfs_meta" | awk -F" [0-9]" '{print $1}' | envsubst > "poescript"
+    fi
+  fi
+
+  #  If this is the final fcst hour, alert the
+  #  file to all centers.
+  #
+  if (( fhr >= fhend )) ; then
+    export DBN_ALERT_TYPE=GFS_METAFILE_LAST
+  fi
+
+  export fend=${fhr}
+
+  cat poescript
+
+  "${HOMEgfs}/ush/run_mpmd.sh" poescript
+  export err=$?; err_chk
+
+  if (( fhr == 126 )) ; then
+    fhr=$((fhr + 6))
+  else
+    fhr=$((fhr + fhinc))
+  fi
+done
 
 exit
-#
diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh
index 583593fef8..86b0eea795 100755
--- a/scripts/exgfs_atmos_goes_nawips.sh
+++ b/scripts/exgfs_atmos_goes_nawips.sh
@@ -1,41 +1,37 @@
 #! /usr/bin/env bash
 
 ###################################################################
-# echo "----------------------------------------------------"
 # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids"
-# echo "----------------------------------------------------"
-# echo "History: Mar 2000 - First implementation of this new script."
-# echo "S Lilly: May 2008 - add logic to make sure that all of the "
-# echo "                    data produced from the restricted ECMWF"
-# echo "                    data on the CCS is properly protected."
-# echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac."
-#####################################################################
-
-source "$HOMEgfs/ush/preamble.sh"
-
-cd $DATA
-
-cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl
-cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl
-cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl
-cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl
-
-#
-# NAGRIB_TABLE=$FIXgempak/nagrib.tbl
-NAGRIB=$GEMEXE/nagrib2
-#
-
-entry=$(grep "^$RUN2 " $NAGRIB_TABLE | awk 'index($1,"#") != 1 {print $0}')
-
-if [ "$entry" != "" ] ; then
-  cpyfil=$(echo $entry  | awk 'BEGIN {FS="|"} {print $2}')
-  garea=$(echo $entry   | awk 'BEGIN {FS="|"} {print $3}')
-  gbtbls=$(echo $entry  | awk 'BEGIN {FS="|"} {print $4}')
-  maxgrd=$(echo $entry  | awk 'BEGIN {FS="|"} {print $5}')
-  kxky=$(echo $entry    | awk 'BEGIN {FS="|"} {print $6}')
-  grdarea=$(echo $entry | awk 'BEGIN {FS="|"} {print $7}')
-  proj=$(echo $entry    | awk 'BEGIN {FS="|"} {print $8}')
-  output=$(echo $entry  | awk 'BEGIN {FS="|"} {print $9}')
+###################################################################
+
+source "${USHgfs}/preamble.sh"
+
+cd "${DATA}" || exit 1
+fhr3=$1
+
+# "Import" functions used in this script
+source "${USHgfs}/product_functions.sh"
+
+for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do
+  cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \
+    ( echo "FATAL ERROR: ${table} is missing" && exit 2 )
+done
+
+NAGRIB_TABLE="${HOMEgfs}/gempak/fix/nagrib.tbl"
+NAGRIB="${GEMEXE}/nagrib2"
+
+# shellcheck disable=SC2312
+entry=$(grep "^${RUN2} " "${NAGRIB_TABLE}" | awk 'index($1,"#") != 1 {print $0}')
+
+if [[ "${entry}" != "" ]] ; then
+  cpyfil=$(echo "${entry}"  | awk 'BEGIN {FS="|"} {print $2}')
+  garea=$(echo "${entry}"   | awk 'BEGIN {FS="|"} {print $3}')
+  gbtbls=$(echo "${entry}"  | awk 'BEGIN {FS="|"} {print $4}')
+  maxgrd=$(echo "${entry}"  | awk 'BEGIN {FS="|"} {print $5}')
+  kxky=$(echo "${entry}"    | awk 'BEGIN {FS="|"} {print $6}')
+  grdarea=$(echo "${entry}" | awk 'BEGIN {FS="|"} {print $7}')
+  proj=$(echo "${entry}"    | awk 'BEGIN {FS="|"} {print $8}')
+  output=$(echo "${entry}"  | awk 'BEGIN {FS="|"} {print $9}')
 else
   cpyfil=gds
   garea=dset
@@ -48,74 +44,49 @@ else
 fi  
 pdsext=no
 
-maxtries=180
-fhcnt=$fstart
-while [ $fhcnt -le $fend ] ; do
-  fhr=$(printf "%03d" $fhcnt)
-  fhcnt3=$(expr $fhr % 3)
-
-  fhr3=$(printf "03d" $fhcnt)
-  GRIBIN=$COMIN/${model}.${cycle}.${GRIB}${fhr}${EXT}
-  GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3}
-
-  GRIBIN_chk=$GRIBIN
-
-  icnt=1
-  while [ $icnt -lt 1000 ]
-  do
-    if [ -r $GRIBIN_chk ] ; then
-      break
-    else
-      sleep 20
-      let "icnt=icnt+1"
-    fi
-    if [ $icnt -ge $maxtries ]
-    then
-      echo "ABORTING after 1 hour of waiting for F$fhr to end."
-      export err=7 ; err_chk
-      exit $err
-    fi
-  done
-
-  cp $GRIBIN grib$fhr
-
-  export pgm="nagrib_nc F$fhr"
-  startmsg
-
-   $NAGRIB << EOF
-   GBFILE   = grib$fhr
-   INDXFL   = 
-   GDOUTF   = $GEMGRD
-   PROJ     = $proj
-   GRDAREA  = $grdarea
-   KXKY     = $kxky
-   MAXGRD   = $maxgrd
-   CPYFIL   = $cpyfil
-   GAREA    = $garea
-   OUTPUT   = $output
-   GBTBLS   = $gbtbls
-   GBDIAG   = 
-   PDSEXT   = $pdsext
-  l
-  r
-EOF
-  export err=$?;err_chk
 
-  $GEMEXE/gpend
 
-  cp $GEMGRD $COMOUT/.$GEMGRD
-  mv $COMOUT/.$GEMGRD $COMOUT/$GEMGRD
-  if [ $SENDDBN = "YES" ] ; then
-      $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \
-			     $COMOUT/$GEMGRD
-  else
-      echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####"
-  fi
+GEMGRD="${RUN2}_${PDY}${cyc}f${fhr3}"
+GRIBIN="${COM_ATMOS_GOES}/${model}.${cycle}.${GRIB}${fhr3}${EXT}"
+GRIBIN_chk="${GRIBIN}"
+
+if [[ ! -r "${GRIBIN_chk}" ]]; then
+  echo "FATAL ERROR: GRIB index file ${GRIBIN_chk} not found!"
+  export err=7 ; err_chk
+  exit "${err}"
+fi
+
+cp "${GRIBIN}" "grib${fhr3}"
+
+export pgm="nagrib_nc F${fhr3}"
+startmsg
+
+${NAGRIB} << EOF
+GBFILE   = grib${fhr3}
+INDXFL   = 
+GDOUTF   = ${GEMGRD}
+PROJ     = ${proj}
+GRDAREA  = ${grdarea}
+KXKY     = ${kxky}
+MAXGRD   = ${maxgrd}
+CPYFIL   = ${cpyfil}
+GAREA    = ${garea}
+OUTPUT   = ${output}
+GBTBLS   = ${gbtbls}
+GBDIAG   = 
+PDSEXT   = ${pdsext}
+l
+r
+EOF
 
-  let fhcnt=fhcnt+finc
-done
+export err=$?; err_chk
 
-#####################################################################
+cpfs "${GEMGRD}" "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}"
+if [[ ${SENDDBN} == "YES" ]] ; then
+  "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+    "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}"
+fi
 
+"${GEMEXE}/gpend"
 
 ############################### END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh
index a43c279ae6..63f5518b54 100755
--- a/scripts/exgfs_atmos_grib2_special_npoess.sh
+++ b/scripts/exgfs_atmos_grib2_special_npoess.sh
@@ -7,9 +7,9 @@
 # echo "-----------------------------------------------------"
 #####################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
-cd $DATA
+cd "${DATA}" || exit 2
 
 ############################################################
 #  Define Variables:
@@ -80,17 +80,17 @@ fi
 ##############################################################################
 # Specify Forecast Hour Range F000 - F024 for GFS_NPOESS_PGRB2_0P5DEG
 ##############################################################################
-export SHOUR=000
-export FHOUR=024
-export FHINC=003
-if [[ "${FHOUR}" -gt "${FHMAX_GFS}" ]]; then
+export SHOUR=0
+export FHOUR=24
+export FHINC=3
+if (( FHOUR > FHMAX_GFS )); then
    export FHOUR="${FHMAX_GFS}"
 fi
 
 ############################################################
 # Loop Through the Post Forecast Files 
 ############################################################
-for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do
+for (( fhr=SHOUR; fhr <= FHOUR; fhr = fhr + FHINC )); do
 
    fhr3=$(printf "%03d" "${fhr}")
 
@@ -99,34 +99,22 @@ for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do
    # existence of the restart files
    ###############################
    export pgm="postcheck"
-   ic=1
-   while (( ic <= SLEEP_LOOP_MAX )); do
-      if [[ -f "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}.idx" ]]; then
-         break
-      else
-         ic=$((ic + 1))
-         sleep "${SLEEP_INT}"
-      fi
-      ###############################
-      # If we reach this point assume
-      # fcst job never reached restart
-      # period and error exit
-      ###############################
-      if (( ic == SLEEP_LOOP_MAX )); then
-         echo "FATAL ERROR: 0p50 grib file not available after max sleep time"
-         export err=9
-         err_chk || exit "${err}"
-      fi
-   done
+   grib_file="${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}.idx"
+   if ! wait_for_file "${grib_file}" "${SLEEP_INT}" "${SLEEP_LOOP_MAX}"; then
+      echo "FATAL ERROR: 0p50 grib file not available after max sleep time"
+      export err=9
+      err_chk || exit "${err}"
+   fi
 
    ######################################################################
    # Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024  #
    ######################################################################
-   paramlist=${PARMproduct}/global_npoess_paramlist_g2
+   paramlist="${PARMgfs}/product/global_npoess_paramlist_g2"
    cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2.0p50.f${fhr3}" tmpfile2
    cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}" tmpfile2b
    cat tmpfile2 tmpfile2b > tmpfile
-   ${WGRIB2} tmpfile | grep -F -f ${paramlist} | ${WGRIB2} -i -grib  pgb2file tmpfile
+   # shellcheck disable=SC2312
+   ${WGRIB2} tmpfile | grep -F -f "${paramlist}" | ${WGRIB2} -i -grib  pgb2file tmpfile
    export err=$?; err_chk
 
    cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess"
@@ -135,8 +123,7 @@ for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do
        "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGBNPOESS "${job}" \
 				  "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess"
    else
-       msg="File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net."
-       postmsg "${msg}" || echo "${msg}"
+       echo "File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net."
    fi
    echo "${PDY}${cyc}${fhr3}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.halfdeg.npoess"
    rm tmpfile pgb2file
@@ -146,18 +133,15 @@ done
 ################################################################
 # Specify Forecast Hour Range F000 - F180 for GOESSIMPGRB files 
 ################################################################
-export SHOUR=000
-export FHOUR=180
-export FHINC=003
-if [[ "${FHOUR}" -gt "${FHMAX_GFS}" ]]; then
-   export FHOUR="${FHMAX_GFS}"
-fi
+export SHOUR=${FHMIN_GFS}
+export FHOUR=${FHMAX_GOES}
+export FHINC=${FHOUT_GOES}
 
 #################################
 # Process GFS PGRB2_SPECIAL_POST
 #################################
 
-for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do
+for (( fhr=SHOUR; fhr <= FHOUR; fhr = fhr + FHINC )); do
 
    fhr3=$(printf "%03d" "${fhr}")
 
@@ -165,38 +149,26 @@ for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do
    # Start Looping for the 
    # existence of the restart files
    ###############################
-   set +x
    export pgm="postcheck"
-   ic=1
-   while (( ic <= SLEEP_LOOP_MAX )); do
-      if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}.idx" ]]; then
-         break
-      else
-         ic=$((ic + 1))
-         sleep "${SLEEP_INT}"
-      fi
-      ###############################
-      # If we reach this point assume
-      # fcst job never reached restart
-      # period and error exit
-      ###############################
-      if (( ic == SLEEP_LOOP_MAX )); then
-         echo "FATAL ERROR: Special goes grib file not available after max sleep time"
-         export err=9
-         err_chk || exit "${err}"
-      fi
-   done
-   set_trace
+   # grib_file="${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.goesmasterf${fhr3}.grb2"
+   grib_file="${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.special.grb2f${fhr3}"
+   if ! wait_for_file "${grib_file}" "${SLEEP_INT}" "${SLEEP_LOOP_MAX}"; then
+      echo "FATAL ERROR: GOES master grib file ${grib_file} not available after max sleep time"
+      export err=9
+      err_chk || exit "${err}"
+   fi
    ###############################
    # Put restart files into /nwges 
    # for backup to start Model Fcst
    ###############################
-   cp "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}" masterfile
+   cp "${grib_file}" masterfile
    export grid0p25="latlon 0:1440:0.25 90:721:-0.25"
+   # shellcheck disable=SC2086,SC2248
    ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \
       ${opt27} ${opt28} -new_grid ${grid0p25} pgb2file
 
    export gridconus="lambert:253.0:50.0:50.0 214.5:349:32463.0 1.0:277:32463.0"
+   # shellcheck disable=SC2086,SC2248
    ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \
       ${opt27} ${opt28} -new_grid ${gridconus} pgb2file2
 
diff --git a/scripts/exgfs_atmos_grib_awips.sh b/scripts/exgfs_atmos_grib_awips.sh
deleted file mode 100755
index 037b4ce191..0000000000
--- a/scripts/exgfs_atmos_grib_awips.sh
+++ /dev/null
@@ -1,135 +0,0 @@
-#! /usr/bin/env bash
-
-######################################################################
-#  UTILITY SCRIPT NAME :  exgfs_grib_awips.sh
-#         DATE WRITTEN :  10/04/2004
-#
-#  Abstract:  This utility script produces the  GFS AWIPS GRIB
-#
-#     Input:  1 arguments are passed to this script.
-#             1st argument - Forecast Hour - format of 2I
-#
-#####################################################################
-# echo "------------------------------------------------"
-# echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing"
-# echo "------------------------------------------------"
-# echo "History: OCT 2004 - First implementation of this new script."
-# echo "         JUN 2014 - Modified to remove process for AWIPS in GRIB2"
-# echo "                    to script exgfs_grib_awips_g2.sh and this "
-# echo "                    script only process AWIPS GRIB1 (211 and 225)"
-# echo "         AUG 2015 - Modified for WCOSS phase2"
-# echo "         FEB 2019 - Removed grid 225"
-#####################################################################
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-fcsthrs="$1"
-num=$#
-job_name=${job/[jpt]gfs/gfs}
-
-if (( num != 1 )); then
-   echo ""
-   echo " FATAL ERROR: Incorrect number of arguments "
-   echo ""
-   echo ""
-   echo "Usage: $0  \${fcsthrs} (3 digits) "
-   echo ""
-   exit 16
-fi
-
-cd "${DATA}/awips_g1" || exit 2
-
-# "Import" functions used in this script
-source "${HOMEgfs}/ush/product_functions.sh"
-
-###############################################
-# Wait for the availability of the pgrb file
-###############################################
-icnt=1
-while (( icnt < 1000 )); do
-   if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then
-      break
-   fi
-
-   sleep 10
-   icnt=$((icnt + 1))
-   if (( icnt >= 180 )); then
-      msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting"
-      err_exit "${msg}"
-      exit 5
-   fi
-done
-
-echo " ------------------------------------------"
-echo " BEGIN MAKING GFS GRIB1 AWIPS PRODUCTS"
-echo " ------------------------------------------"
-
-set +x
-echo " "
-echo "###############################################"
-echo " Process GFS GRIB1 AWIP PRODUCTS (211) "
-echo "###############################################"
-echo " "
-set_trace
-
-cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2"
-cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b"
-cat tmpfile2 tmpfile2b > tmpfile
-${WGRIB2} tmpfile | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \
-   ${WGRIB2} -i -grib masterfile tmpfile
-scale_dec masterfile
-${CNVGRIB} -g21 masterfile masterfile.grib1
-
-ln -s masterfile.grib1 fort.11
-
-"${HOMEgfs}/exec/overgridid.x" << EOF
-255
-EOF
-
-mv fort.51 "master.grbf${fcsthrs}"
-rm fort.11
-
-${GRBINDEX} "master.grbf${fcsthrs}" "master.grbif${fcsthrs}"
-
-###############################################################
-#    Process GFS GRIB1 AWIP GRIDS 211 PRODUCTS
-###############################################################
-
-DBNALERT_TYPE=GRIB_LOW
-
-startmsg
-
-# GRID=211 out to 240 hours:
-
-export GRID=211
-export FORT11="master.grbf${fcsthrs}"
-export FORT31="master.grbif${fcsthrs}"
-export FORT51="xtrn.awpgfs${fcsthrs}.${GRID}"
-#   $MKGFSAWPS < $PARMwmo/grib_awpgfs${fcsthrs}.${GRID} parm=KWBC >> $pgmout 2>errfile
-"${HOMEgfs}/exec/mkgfsawps.x" < "${PARMwmo}/grib_awpgfs${fcsthrs}.${GRID}" parm=KWBC >> "${pgmout}" 2>errfile
-export err=$?; err_chk 
-##############################
-# Post Files to ${COM_ATMOS_WMO}
-##############################
-
-cp "xtrn.awpgfs${fcsthrs}.${GRID}" "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}"
-
-##############################
-# Distribute Data
-##############################
-
-if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]] ; then
-    "${DBNROOT}/bin/dbn_alert" "${DBNALERT_TYPE}" "${NET}" "${job}" \
-			       "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}"
-else
-    echo "File xtrn.awpgfs${fcsthrs}.${GRID}.${job_name} not posted to db_net."
-fi
-
-if [[ -e "${pgmout}" ]] ; then
-   cat "${pgmout}"
-fi
-
-###############################################################################
-
-
-############## END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh
index ebb509d392..9cf1969f65 100755
--- a/scripts/exgfs_atmos_nawips.sh
+++ b/scripts/exgfs_atmos_nawips.sh
@@ -1,16 +1,10 @@
 #! /usr/bin/env bash
 
 ###################################################################
-# echo "----------------------------------------------------"
 # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids"
-# echo "----------------------------------------------------"
-# echo "History: Mar 2000 - First implementation of this new script."
-# echo "S Lilly: May 2008 - add logic to make sure that all of the "
-# echo "                    data produced from the restricted ECMWF"
-# echo "                    data on the CCS is properly protected."
-#####################################################################
+###################################################################
 
-source "${HOMEgfs}/ush/preamble.sh" "${2}"
+source "${USHgfs}/preamble.sh" "${2}"
 
 #### If EMC GFS PARA runs hourly file are not available, The ILPOST
 #### will set to 3 hour in EMC GFS PARA.
@@ -18,21 +12,19 @@ source "${HOMEgfs}/ush/preamble.sh" "${2}"
 export ILPOST=${ILPOST:-1}
 
 cd "${DATA}" || exit 1
-RUN2=$1
-fend=$2
+grid=$1
+fhr3=$2
 DBN_ALERT_TYPE=$3
 destination=$4
 
-DATA_RUN="${DATA}/${RUN2}"
+DATA_RUN="${DATA}/${grid}"
 mkdir -p "${DATA_RUN}"
 cd "${DATA_RUN}" || exit 1
 
 # "Import" functions used in this script
-source "${HOMEgfs}/ush/product_functions.sh"
+source "${USHgfs}/product_functions.sh"
 
-#
 NAGRIB="${GEMEXE}/nagrib2"
-#
 
 cpyfil=gds
 garea=dset
@@ -44,90 +36,68 @@ proj=
 output=T
 pdsext=no
 
-maxtries=360
-fhcnt=${fstart}
-while (( fhcnt <= fend )) ; do
-
-  if mkdir "lock.${fhcnt}" ; then
-    cd "lock.${fhcnt}" || exit 1
-    cp "${FIXgempak}/g2varswmo2.tbl" "g2varswmo2.tbl"
-    cp "${FIXgempak}/g2vcrdwmo2.tbl" "g2vcrdwmo2.tbl"
-    cp "${FIXgempak}/g2varsncep1.tbl" "g2varsncep1.tbl"
-    cp "${FIXgempak}/g2vcrdncep1.tbl" "g2vcrdncep1.tbl"
-
-    fhr=$(printf "%03d" "${fhcnt}")
-
-    GEMGRD="${RUN2}_${PDY}${cyc}f${fhr}"
-
-    # Set type of Interpolation for WGRIB2
-    export opt1=' -set_grib_type same -new_grid_winds earth '
-    export opt1uv=' -set_grib_type same -new_grid_winds grid '
-    export opt21=' -new_grid_interpolation bilinear -if '
-    export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):"
-    export opt23=' -new_grid_interpolation neighbor -fi '
-    export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if '
-    export opt25=":(APCP|ACPCP|PRATE|CPRAT):"
-    export opt26=' -set_grib_max_bits 25 -fi -if '
-    export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):"
-    export opt28=' -new_grid_interpolation budget -fi '
-
-    case ${RUN2} in
-      # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km?
-      'gfs_0p50' | 'gfs_0p25') res=${RUN2: -4};;
-      *) res="1p00";;
-    esac
-
-    source_var="COM_ATMOS_GRIB_${res}"
-    export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}"
-    GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}.idx"
-
-    icnt=1
-    while (( icnt < 1000 )); do
-      if [[ -r "${GRIBIN_chk}" ]] ; then
-        # File available, wait 5 seconds then proceed
-        sleep 5
-        break
-      else
-        # File not available yet, wait 10 seconds and try again
-        echo "The process is waiting ... ${GRIBIN_chk} file to proceed."
-        sleep 10
-        icnt=$((icnt+1))
-      fi
-      if (( icnt >= maxtries )); then
-        echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr} to end."
-        export err=7 ; err_chk
-        exit "${err}"
-      fi
-    done
-
-    case "${RUN2}" in
-      gfs35_pac)
-        export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312'
-        # shellcheck disable=SC2086,SC2248
-        "${WGRIB2}" "${GRIBIN}" ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs35_pac} "grib${fhr}"
-        trim_rh "grib${fhr}"
-        ;;
-      gfs35_atl)
-        export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312'
-        # shellcheck disable=SC2086,SC2248
-        "${WGRIB2}" "${GRIBIN}" ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs35_atl} "grib${fhr}"
-        trim_rh "grib${fhr}"
-        ;;
-      gfs40)
-        export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0'
-        # shellcheck disable=SC2086,SC2248
-        "${WGRIB2}" "${GRIBIN}" ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs40} "grib${fhr}"
-        trim_rh "grib${fhr}"
-        ;;
-     *)
-        cp "${GRIBIN}" "grib${fhr}"
-    esac
-
-    export pgm="nagrib2 F${fhr}"
-    startmsg
-
-    ${NAGRIB} << EOF
-GBFILE   = grib${fhr}
+sleep_interval=10
+max_tries=360
+
+
+mkdir -p "lock.${fhr3}"
+cd "lock.${fhr3}" || exit 1
+
+for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do
+  cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \
+     ( echo "FATAL ERROR: ${table} is missing" && exit 2 )
+done
+
+GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}"
+
+# Set type of Interpolation for WGRIB2
+export opt1=' -set_grib_type same -new_grid_winds earth '
+export opt1uv=' -set_grib_type same -new_grid_winds grid '
+export opt21=' -new_grid_interpolation bilinear -if '
+export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):"
+export opt23=' -new_grid_interpolation neighbor -fi '
+export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if '
+export opt25=":(APCP|ACPCP|PRATE|CPRAT):"
+export opt26=' -set_grib_max_bits 25 -fi -if '
+export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):"
+export opt28=' -new_grid_interpolation budget -fi '
+
+case ${grid} in
+  # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km?
+  '0p50' | '0p25') grid_in=${grid};;
+  *) grid_in="1p00";;
+esac
+
+source_var="COM_ATMOS_GRIB_${grid_in}"
+export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}"
+GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}.idx"
+
+if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then
+  echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr3} to end."
+  export err=7 ; err_chk
+  exit "${err}"
+fi
+
+case "${grid}" in
+  35km_pac) grid_spec='latlon 130.0:416:0.312 75.125:186:-0.312';;
+  35km_atl) grid_spec='latlon 230.0:480:0.312 75.125:242:-0.312';;
+  40km)     grid_spec='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0';;
+  *)        grid_spec='';;
+esac
+
+if [[ "${grid_spec}" != "" ]]; then
+  # shellcheck disable=SC2086,SC2248
+  "${WGRIB2}" "${GRIBIN}" ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${grid_spec} "grib${fhr3}"
+  trim_rh "grib${fhr3}"
+else
+  cp "${GRIBIN}" "grib${fhr3}"
+fi
+
+export pgm="nagrib2 F${fhr3}"
+startmsg
+
+${NAGRIB} << EOF
+GBFILE   = grib${fhr3}
 INDXFL   = 
 GDOUTF   = ${GEMGRD}
 PROJ     = ${proj}
@@ -143,32 +113,16 @@ PDSEXT   = ${pdsext}
 l
 r
 EOF
-    export err=$?;err_chk
-
-    cpfs "${GEMGRD}" "${destination}/${GEMGRD}"
-    if [[ ${SENDDBN} == "YES" ]] ; then
-        "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
-				   "${destination}/${GEMGRD}"
-    fi
-    cd "${DATA_RUN}" || exit 1
-  else
-    if (( fhcnt <= 240 )) ; then
-    	if (( fhcnt < 276 )) && [[ "${RUN2}" = "gfs_0p50" ]] ; then
-    	    fhcnt=$((fhcnt+6))
-    	else
-    	    fhcnt=$((fhcnt+12))
-    	fi
-    elif ((fhcnt < 120)) && [[ "${RUN2}" = "gfs_0p25" ]] ; then
-      ####    let fhcnt=fhcnt+1
-    	fhcnt=$((hcnt + ILPOST))
-    else
-      fhcnt=$((ILPOST > finc ? fhcnt+ILPOST : fhcnt+finc ))
-    fi
-  fi
-done
 
-"${GEMEXE}/gpend"
-#####################################################################
+export err=$?;err_chk
 
+cpfs "${GEMGRD}" "${destination}/${GEMGRD}"
+if [[ ${SENDDBN} == "YES" ]] ; then
+    "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \
+      "${destination}/${GEMGRD}"
+fi
+cd "${DATA_RUN}" || exit 1
+
+"${GEMEXE}/gpend"
 
 ############################### END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh
index 368f001ed0..caf5443a50 100755
--- a/scripts/exgfs_atmos_postsnd.sh
+++ b/scripts/exgfs_atmos_postsnd.sh
@@ -20,7 +20,7 @@
 #   9) 2019-12-18       Guang Ping Lou generalizing to reading in NetCDF or nemsio
 ################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 cd $DATA
 
@@ -44,12 +44,14 @@ export NINT3=${FHOUT_GFS:-3}
 
 rm -f -r "${COM_ATMOS_BUFR}"
 mkdir -p "${COM_ATMOS_BUFR}"
-GETDIM="${HOMEgfs}/ush/getncdimlen"
+GETDIM="${USHgfs}/getncdimlen"
 LEVS=$(${GETDIM} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf000.${atmfm}" pfull)
 declare -x LEVS
 
 ### Loop for the hour and wait for the sigma and surface flux file:
 export FSTART=$STARTHOUR
+sleep_interval=10
+max_tries=360
 #
 while [ $FSTART -lt $ENDHOUR ]
 do
@@ -69,29 +71,18 @@ export FINT=$NINT1
        export MAKEBUFR=YES
    fi
 
-   ic=0
-   while [ $ic -lt 1000 ]; do
-      if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}" ]]; then
-          sleep 10
-          ic=$(expr $ic + 1)
-      else
-          break
-      fi
-
-      if [ $ic -ge 360 ]
-      then
-         err_exit "COULD NOT LOCATE logf$FEND file AFTER 1 HOUR"
-      fi
-   done
+   filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}"
+   if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then
+     err_exit "FATAL ERROR: logf${FEND} not found after waiting $((sleep_interval * ( max_tries - 1) )) secs"
+   fi
 
 ## 1-hourly output before $NEND1, 3-hourly output after
-   if [ $FEND -gt $NEND1 ]; then
+   if [[ $((10#$FEND)) -gt $((10#$NEND1)) ]]; then
      export FINT=$NINT3
    fi
-##   $USHbufrsnd/gfs_bufr.sh
-   $USHbufrsnd/gfs_bufr.sh
+   ${USHgfs}/gfs_bufr.sh
   
-   export FSTART=$FEND
+   export FSTART="${FEND}"
 done
 
 ##############################################################
@@ -114,8 +105,8 @@ fi
 # add appropriate WMO Headers.
 ########################################
 rm -rf poe_col
-for (( m = 1; m <10 ; m++ )); do
-    echo "sh ${USHbufrsnd}/gfs_sndp.sh ${m} " >> poe_col
+for (( m = 1; m <= NUM_SND_COLLECTIVES ; m++ )); do
+    echo "sh ${USHgfs}/gfs_sndp.sh ${m} " >> poe_col
 done
 
 if [[ ${CFP_MP:-"NO"} == "YES" ]]; then
@@ -129,7 +120,7 @@ chmod +x cmdfile
 
 ${APRUN_POSTSNDCFP} cmdfile
 
-sh "${USHbufrsnd}/gfs_bfr2gpk.sh"
+sh "${USHgfs}/gfs_bfr2gpk.sh"
 
 
 ############## END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_wafs_blending_0p25.sh b/scripts/exgfs_atmos_wafs_blending_0p25.sh
new file mode 100755
index 0000000000..293325185e
--- /dev/null
+++ b/scripts/exgfs_atmos_wafs_blending_0p25.sh
@@ -0,0 +1,298 @@
+#!/bin/ksh
+################################################################################
+####  UNIX Script Documentation Block
+#                      .                                             .
+# Script name:         exgfs_atmos_wafs_blending_0p25.sh (copied from exgfs_atmos_wafs_blending.sh)
+# Script description:  This scripts looks for US and UK WAFS Grib2 products at 1/4 deg,
+# wait for specified period of time, and then run $USHgfs/wafs_blending_0p25.sh
+# if both WAFS data are available.  Otherwise, the job aborts with error massage
+#
+# Author:        Y Mao       Org: EMC         Date: 2020-04-02
+#
+#
+# Script history log:
+# 2020-04-02 Y Mao
+# Oct 2021 - Remove jlogfile
+# 2022-05-25 | Y Mao | Add ICAO new milestone Nov 2023
+
+set -x
+echo "JOB $job HAS BEGUN"
+export SEND_AWC_US_ALERT=NO
+export SEND_AWC_UK_ALERT=NO
+export SEND_US_WAFS=NO
+export SEND_UK_WAFS=NO
+
+cd $DATA
+export SLEEP_LOOP_MAX=`expr $SLEEP_TIME / $SLEEP_INT`
+
+echo "start blending US and UK WAFS products at 1/4 degree for " $cyc " z cycle"
+export ffhr=$SHOUR
+
+export ic_uk=1
+
+while test $ffhr -le $EHOUR
+do
+
+##########################
+# look for US WAFS data
+##########################
+
+     export ic=1
+     while [ $ic -le $SLEEP_LOOP_MAX ]
+     do 
+       if [ -s ${COMINus}/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 ] ; then
+          break
+       fi
+       if [ $ic -eq $SLEEP_LOOP_MAX ] ; then
+          echo "US WAFS GRIB2 file  $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 not found after waiting over $SLEEP_TIME seconds"
+	  echo "US WAFS GRIB2 file " $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 "not found after waiting ",$SLEEP_TIME, "exitting"
+	  SEND_UK_WAFS=YES
+	  break
+       else
+	   ic=`expr $ic + 1`
+	   sleep $SLEEP_INT
+       fi
+     done
+
+##########################
+# look for UK WAFS data.
+##########################
+
+     SLEEP_LOOP_MAX_UK=$SLEEP_LOOP_MAX
+     
+    #  export ic=1
+     while [ $ic_uk -le $SLEEP_LOOP_MAX_UK ]
+     do
+       # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb
+       ukfiles=`ls $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 | wc -l`
+       if [ $ukfiles -ge 3 ] ; then
+           break
+       fi
+
+       if [ $ic_uk -eq $SLEEP_LOOP_MAX_UK ] ; then
+          echo "UK WAFS GRIB2 file  $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2  not found"
+	  echo "UK WAFS GRIB2 file " $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 " not found"
+          export SEND_US_WAFS=YES
+	  break
+       else
+          ic_uk=`expr $ic_uk + 1`
+          sleep $SLEEP_INT
+       fi
+     done
+
+##########################
+# If both UK and US data are missing.
+##########################
+
+     if [ $SEND_UK_WAFS = 'YES' -a $SEND_US_WAFS = 'YES' ] ; then
+	 SEND_US_WAFS=NO
+	 SEND_UK_WAFS=NO
+	 echo "BOTH UK and US data are missing, no blended for $PDY$cyc$ffhr"
+	 export err=1; err_chk
+	 continue
+     fi
+ 
+##########################
+# Blending or unblended
+##########################
+
+     if [ $SEND_US_WAFS = 'YES' ] ; then
+	 echo "turning back on dbn alert for unblended US WAFS product"
+     elif [ $SEND_UK_WAFS = 'YES' ] ; then
+	 echo "turning back on dbn alert for unblended UK WAFS product"
+	 # retrieve UK products
+	 # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb
+	 cat $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 > EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2
+     else # elif [ $SEND_US_WAFS = "NO" -a $SEND_UK_WAFS = "NO" ] ; then
+	 # retrieve UK products
+	 # Three(3) unblended UK files for each cycle+fhour: icing, turb, cb
+	 cat $COMINuk/EGRR_WAFS_0p25_*_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 > EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2
+
+	 # pick up US data
+	 cp ${COMINus}/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 .
+
+	 # run blending code
+	 export pgm=wafs_blending_0p25.x
+	 . prep_step
+
+	 startmsg
+	 $EXECgfs/$pgm gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 \
+                              EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2 \
+                              0p25_blended_${PDY}${cyc}f${ffhr}.grib2 > f${ffhr}.out
+
+	 err1=$?
+	 if test "$err1" -ne 0
+	 then
+	     echo "WAFS blending 0p25 program failed at " ${PDY}${cyc}F${ffhr} " turning back on dbn alert for unblended US WAFS product"
+	     SEND_US_WAFS=YES
+	 fi
+     fi
+
+##########################
+# Date dissemination
+##########################
+
+     if [ $SEND_US_WAFS = "YES" ] ; then
+
+	 ##############################################################################################
+	 #
+	 #  checking any US WAFS product was sent due to No UK WAFS GRIB2 file or WAFS blending program
+	 #  (Alert once for all forecast hours)
+	 #
+	 if [ $SEND_AWC_US_ALERT = "NO" ] ; then
+	     echo "WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending. Send alert message to AWC ......"
+	     make_NTC_file.pl NOXX10 KKCI $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/wifs_0p25_admin_msg
+	     make_NTC_file.pl NOXX10 KWBC $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/iscs_0p25_admin_msg
+	     if [ $SENDDBN_NTC = "YES" ] ; then
+		 $DBNROOT/bin/dbn_alert NTC_LOW WAFS  $job $PCOM/wifs_0p25_admin_msg
+		 $DBNROOT/bin/dbn_alert NTC_LOW WAFS  $job $PCOM/iscs_0p25_admin_msg
+	     fi
+
+             if [ $envir != prod ]; then
+		 export maillist='nco.spa@noaa.gov'
+             fi
+             export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'}
+             export subject="WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending, $PDY t${cyc}z $job"
+             echo "*************************************************************" > mailmsg
+             echo "*** WARNING! No UK WAFS GRIB2 0P25 file for WAFS blending ***" >> mailmsg
+             echo "*************************************************************" >> mailmsg
+             echo >> mailmsg
+             echo "Send alert message to AWC ...... " >> mailmsg
+             echo >> mailmsg
+             cat mailmsg > $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_usonly.emailbody
+             cat $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_usonly.emailbody | mail.py -s "$subject" $maillist -v
+
+	     export SEND_AWC_US_ALERT=YES
+	 fi
+	 ##############################################################################################
+	 #
+	 #   Distribute US WAFS unblend Data to NCEP FTP Server (WOC) and TOC
+	 #
+	 echo "altering the unblended US WAFS products - $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2 "
+	 echo "and $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2.idx "
+
+	 if [ $SENDDBN = "YES" ] ; then
+	     $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2 $job $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2
+	     $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2_WIDX $job $COMINus/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2.idx
+	 fi
+
+#	 if [ $SENDDBN_NTC = "YES" ] ; then
+#	     $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr}.grib2
+#	 fi
+
+
+	 export SEND_US_WAFS=NO
+
+     elif [ $SEND_UK_WAFS = "YES" ] ; then
+	 ##############################################################################################
+	 #
+	 #  checking any UK WAFS product was sent due to No US WAFS GRIB2 file
+	 #  (Alert once for all forecast hours)
+	 #
+	 if [ $SEND_AWC_UK_ALERT = "NO" ] ; then
+	     echo "WARNING: No US WAFS GRIB2 0P25 file for WAFS blending. Send alert message to AWC ......"
+	     make_NTC_file.pl NOXX10 KKCI $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/wifs_0p25_admin_msg
+	     make_NTC_file.pl NOXX10 KWBC $PDY$cyc NONE $FIXgfs/wafs_blending_0p25_admin_msg $PCOM/iscs_0p25_admin_msg
+	     if [ $SENDDBN_NTC = "YES" ] ; then
+		 $DBNROOT/bin/dbn_alert NTC_LOW WAFS  $job $PCOM/wifs_0p25_admin_msg
+		 $DBNROOT/bin/dbn_alert NTC_LOW WAFS  $job $PCOM/iscs_0p25_admin_msg
+	     fi
+
+             if [ $envir != prod ]; then
+                 export maillist='nco.spa@noaa.gov'
+             fi
+             export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'}
+             export subject="WARNING! No US WAFS GRIB2 0P25 file for WAFS blending, $PDY t${cyc}z $job"
+             echo "*************************************************************" > mailmsg
+             echo "*** WARNING! No US WAFS GRIB2 0P25 file for WAFS blending ***" >> mailmsg
+             echo "*************************************************************" >> mailmsg
+             echo >> mailmsg
+             echo "Send alert message to AWC ...... " >> mailmsg
+             echo >> mailmsg
+             cat mailmsg > $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_ukonly.emailbody
+             cat $COMOUT/${RUN}.t${cyc}z.wafs_blend_0p25_ukonly.emailbody | mail.py -s "$subject" $maillist -v
+
+	     export SEND_AWC_UK_ALERT=YES
+	 fi
+	 ##############################################################################################
+	 #
+	 #   Distribute UK WAFS unblend Data to NCEP FTP Server (WOC) and TOC
+	 #
+	 echo "altering the unblended UK WAFS products - EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2"
+
+	 if [ $SENDDBN = "YES" ] ; then
+	     $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_UKMET_0P25_UBL_GB2 $job EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2
+	 fi
+
+#	 if [ $SENDDBN_NTC = "YES" ] ; then
+#	     $DBNROOT/bin/dbn_alert NTC_LOW $NET $job EGRR_WAFS_0p25_unblended_${PDY}_${cyc}z_t${ffhr}.grib2
+#	 fi
+	 export SEND_UK_WAFS=NO
+
+
+     else
+	 ##############################################################################################
+	 #
+	 # TOCGRIB2 Processing WAFS Blending GRIB2 (Icing, CB, GTG)
+
+	 # As in August 2020, no WMO header is needed for WAFS data at 1/4 deg
+	 ## . prep_step
+	 ## export pgm=$TOCGRIB2
+	 ## startmsg
+
+	 ## export FORT11=0p25_blended_${PDY}${cyc}f${ffhr}.grib2
+	 ## export FORT31=" "
+	 ## export FORT51=grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr}
+
+	 ## $TOCGRIB2 <  $FIXgfs/grib2_blended_wafs_wifs_f${ffhr}.0p25 >> $pgmout 2> errfile
+
+	 ## err=$?;export err ;err_chk
+	 ## echo " error from tocgrib=",$err
+
+	 ##############################################################################################
+	 #
+	 #   Distribute US WAFS unblend Data to NCEP FTP Server (WOC) and TOC
+	 #
+	 if [ $SENDCOM = YES ]; then
+	     cp 0p25_blended_${PDY}${cyc}f${ffhr}.grib2 $COMOUT/WAFS_0p25_blended_${PDY}${cyc}f${ffhr}.grib2
+	     ## cp grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr}  $PCOM/grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr}
+	 fi
+
+	 if [ $SENDDBN_NTC = "YES" ] ; then
+	     #   Distribute Data to NCEP FTP Server (WOC) and TOC
+	     echo "No WMO header yet"
+	     ## $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.WAFS_0p25_blended_f${ffhr}
+	 fi
+
+	 if [ $SENDDBN = "YES" ] ; then
+	     $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_BL_GB2 $job $COMOUT/WAFS_0p25_blended_${PDY}${cyc}f${ffhr}.grib2
+	 fi 
+     fi
+
+##########################
+# Next loop
+##########################
+
+     echo "$PDY$cyc$ffhr" > $COMOUT/${RUN}.t${cyc}z.control.wafsblending_0p25
+
+     if [ $FHOUT_GFS -eq 3 ] ; then
+	 FHINC=03
+     else
+	 if [ $ffhr -lt 24 ] ; then
+	     FHINC=01
+	 else
+	     FHINC=03
+	 fi
+     fi
+
+     ffhr=`expr $ffhr + $FHINC`
+     if test $ffhr -lt 10
+     then
+         ffhr=0${ffhr}
+     fi
+
+done
+################################################################################
+
+exit 0
+#
diff --git a/scripts/exgfs_atmos_wafs_gcip.sh b/scripts/exgfs_atmos_wafs_gcip.sh
new file mode 100755
index 0000000000..98e367e5f9
--- /dev/null
+++ b/scripts/exgfs_atmos_wafs_gcip.sh
@@ -0,0 +1,242 @@
+#!/bin/ksh
+######################################################################
+#  UTILITY SCRIPT NAME :  exgfs_atmos_wafs_gcip.sh
+#         DATE WRITTEN :  01/28/2015
+#
+#  Abstract:  This utility script produces the WAFS GCIP. 
+#
+#            GCIP runs f00 f03 for each cycle, 4 times/day,
+#            to make the output valid every 3 hours
+#
+# History:  01/28/2015
+#         - GFS post master file as first guess
+#              /com/prod/gfs.YYYYMMDD
+#         - Nesdis composite global satellite data 
+#              /dcom (ftp?)
+#         - Metar/ships/lightning/pireps
+#              ksh /nwprod/ush/dumpjb YYYYMMDDHH hours output >/dev/null
+#         - Radar data over CONUS
+#              /com/hourly/prod/radar.YYYYMMDD/refd3d.tHHz.grbf00
+#         - output of current icing potential
+#####################################################################
+echo "-----------------------------------------------------"
+echo "JGFS_ATMOS_WAFS_GCIP at 00Z/06Z/12Z/18Z GFS postprocessing"
+echo "-----------------------------------------------------"
+echo "History: 2015 - First implementation of this new script."
+echo "Oct 2021 - Remove jlogfile"
+echo " "
+#####################################################################
+
+set -xa
+
+# Set up working dir for parallel runs based on ffhr
+ffhr=$1
+DATA=$DATA/$ffhr
+mkdir -p $DATA
+cd $DATA
+# Overwrite TMPDIR for dumpjb
+export TMPDIR=$DATA
+
+SLEEP_LOOP_MAX=`expr $SLEEP_TIME / $SLEEP_INT`
+
+configFile=gcip.config
+
+echo 'before preparing data' `date`
+
+# valid time. no worry, it won't be across to another date
+vhour=$(( $ffhr + $cyc ))
+vhour="$(printf "%02d" $(( 10#$vhour )) )"
+
+########################################################
+# Preparing data
+
+if [ $RUN = "gfs" ] ; then
+
+  # model data
+  masterFile=$COMINgfs/gfs.t${cyc}z.master.grb2f$ffhr
+
+  # check the availability of model file
+  icnt=1
+  while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do
+      if [ -s $masterFile ] ; then
+	  break
+      fi
+      sleep $SLEEP_INT
+      icnt=$((icnt + 1))
+      if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then
+          msg="ABORTING after $SLEEP_TIME seconds of waiting for gfs master file!"
+          err_exit $msg
+      fi
+  done
+
+  cpreq $PARMgfs/wafs_gcip_gfs.cfg $configFile
+
+  modelFile=modelfile.grb
+#  ${NLN} $masterFile $modelFile
+  $WGRIB2 $masterFile | egrep ":HGT:|:VVEL:|:CLMR:|:TMP:|:SPFH:|:RWMR:|:SNMR:|:GRLE:|:ICMR:|:RH:" | egrep "00 mb:|25 mb:|50 mb:|75 mb:|:HGT:surface" | $WGRIB2 -i $masterFile -grib $modelFile
+
+  # metar / ships / lightning / pireps
+  # dumped data files' suffix is ".ibm"
+  obsfiles="metar ships ltngsr pirep"
+  for obsfile in $obsfiles ; do 
+#      ksh $USHobsproc_dump/dumpjb ${PDY}${vhour} 1.5 $obsfile >/dev/null
+      ksh $DUMPJB ${PDY}${vhour} 1.5 $obsfile 
+  done
+  metarFile=metar.ibm
+  shipFile=ships.ibm
+  lightningFile=ltngsr.ibm
+  pirepFile=pirep.ibm
+
+  satFiles=""
+  channels="VIS SIR LIR SSR"
+  # If one channel is missing, satFiles will be empty
+  for channel in $channels ; do
+      satFile=GLOBCOMP$channel.${PDY}${vhour}
+      if [[ $COMINsat == *ftp:* ]] ; then
+	  curl -O $COMINsat/$satFile
+      else
+
+        # check the availability of satellite data file
+	icnt=1
+	while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do
+	  if [ -s $COMINsat/$satFile ] ; then
+	    break
+	  fi
+	  sleep $SLEEP_INT
+	  icnt=$((icnt + 1))
+	  if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then
+            msg="GCIP at ${vhour}z ABORTING after $SLEEP_TIME seconds of waiting for satellite $channel file!"
+            echo "$msg"
+            rc=1
+            echo $msg >> $COMOUT/${RUN}.gcip.log
+            
+            if [ $envir != prod ]; then
+              export maillist='nco.spa@noaa.gov'
+            fi
+            export maillist=${maillist:-'nco.spa@noaa.gov,ncep.sos@noaa.gov'}
+
+            export subject="Missing GLOBCOMPVIS Satellite Data for $PDY t${cyc}z $job"
+            echo "*************************************************************" > mailmsg
+            echo "*** WARNING !! COULD NOT FIND GLOBCOMPVIS Satellite Data  *** " >> mailmsg
+            echo "*************************************************************" >> mailmsg
+            echo >> mailmsg
+            echo "One or more GLOBCOMPVIS Satellite Data files are missing, including " >> mailmsg
+            echo "   $COMINsat/$satFile " >> mailmsg
+            echo >> mailmsg
+            echo "$job will gracfully exited" >> mailmsg
+            cat mailmsg > $COMOUT/${RUN}.t${cyc}z.gcip.emailbody
+            cat $COMOUT/${RUN}.t${cyc}z.gcip.emailbody | mail.py -s "$subject" $maillist -v
+
+            exit $rc
+	  fi
+	done
+
+	cp $COMINsat/$satFile .
+      fi
+      if [[ -s $satFile ]] ; then
+	  satFiles="$satFiles $satFile"
+      else
+	  satFiles=""
+	  break
+      fi
+  done
+
+    # radar data
+    sourceRadar=$COMINradar/refd3d.t${vhour}z.grb2f00
+
+    # check the availability of radar data file
+    icnt=1
+    while [ $icnt -lt $SLEEP_LOOP_MAX ] ; do
+	if [ -s $sourceRadar ] ; then
+	    break
+	fi
+	sleep $SLEEP_INT
+	icnt=$((icnt + 1))
+	if [ $icnt -ge $SLEEP_LOOP_MAX ] ; then
+            echo "WARNING: radar data is not available after $SLEEP_TIME seconds of waiting!"
+	fi
+    done
+
+    radarFile=radarFile.grb
+    if [ -s $sourceRadar ] ; then
+      cp $sourceRadar $radarFile
+    fi
+
+  fi # RUN model name
+
+########################################################
+# Composite gcip command options
+
+outputfile=gfs.t${vhour}z.gcip.f00.grib2
+
+cmdoptions="-t ${PDY}${vhour} -c $configFile -model $modelFile"
+if [[ -s $metarFile ]] ; then
+    cmdoptions="$cmdoptions -metar $metarFile"
+else
+    err_exit "There are no METAR observations."
+fi
+if [[ -s $shipFile ]] ; then
+    cmdoptions="$cmdoptions -ship $shipFile"
+fi
+# empty if a channel data is missing
+if [[ -n $satFiles ]] ; then
+    cmdoptions="$cmdoptions -sat $satFiles"
+else
+    err_exit "Satellite data are not available or completed."
+fi
+if [[ -s $lightningFile ]] ; then
+    cmdoptions="$cmdoptions -lightning $lightningFile"
+fi
+if [[ -s $pirepFile ]] ; then
+    cmdoptions="$cmdoptions -pirep $pirepFile"
+fi
+if [[ -s $radarFile ]] ; then
+    cmdoptions="$cmdoptions -radar $radarFile"
+fi
+cmdoptions="$cmdoptions -o $outputfile"
+
+#######################################################
+# Run GCIP
+
+echo 'after preparing data' `date`
+
+export pgm=wafs_gcip.x
+
+cpreq $FIXgfs/gcip_near_ir_refl.table near_ir_refl.table
+
+startmsg
+$EXECgfs/$pgm >> $pgmout $cmdoptions 2> errfile &
+wait
+export err=$?; err_chk
+
+
+if [[ -s $outputfile ]] ; then
+    ############################## 
+    # Post Files to COM
+    ##############################
+    if [ $SENDCOM = "YES" ] ; then
+      cp $outputfile $COMOUT/$outputfile
+      if [ $SENDDBN = "YES" ] ; then
+	# $DBNROOT/bin/dbn_alert GFS_WAFS GCIP $job $COMOUT/$outputfile
+#alert removed in v15.0	$DBNROOT/bin/dbn_alert MODEL GFS_WAFS_GCIP $job $COMOUT/$outputfile
+	  :
+      fi
+    fi
+else
+    err_exit "Output $outputfile was not generated"
+fi
+
+
+################################################################################
+# GOOD RUN
+set +x
+echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GCIP.SH COMPLETED NORMALLY ON THE IBM"
+set -x
+################################################################################
+
+exit 0
+
+############## END OF SCRIPT #######################
+
diff --git a/scripts/exgfs_atmos_wafs_grib.sh b/scripts/exgfs_atmos_wafs_grib.sh
new file mode 100755
index 0000000000..e81f0e99da
--- /dev/null
+++ b/scripts/exgfs_atmos_wafs_grib.sh
@@ -0,0 +1,146 @@
+#!/bin/sh
+######################################################################
+#  UTILITY SCRIPT NAME :  exgfs_atmos_wafs_grib.sh
+#         DATE WRITTEN :  10/04/2004
+#
+#  Abstract:  This utility script produces the  WAFS GRIB
+#
+#     Input:  1 arguments are passed to this script.
+#             1st argument - Forecast Hour - format of 2I
+#
+#     Logic:   If we are processing fcsthrss 12-30, we have the
+#              added variable of the a or b in the process accordingly.
+#              The other fcsthrss, the a or b  is dropped.
+#
+#####################################################################
+echo "------------------------------------------------"
+echo "JWAFS_00/06/12/18 GFS postprocessing"
+echo "------------------------------------------------"
+echo "History: OCT 2004 - First implementation of this new script."
+echo "         Aug 2015 - Modified for Phase II"
+echo "         Dec 2015 - Modified for input model data in Grib2"
+echo "         Oct 2021 - Remove jlogfile"
+echo " "
+#####################################################################
+set +x
+fcsthrs_list="$1"
+num=$#
+
+if test "$num" -ge 1
+then
+   echo " Appropriate number of arguments were passed"
+   set -x
+   export DBNALERT_TYPE=${DBNALERT_TYPE:-GRIB}
+#   export job=${job:-interactive}
+else
+   echo ""
+   echo "Usage: exgfs_atmos_wafs_grib.sh  \$fcsthrs "
+   echo ""
+   exit 16
+fi
+
+cd $DATA
+
+set -x
+
+# To fix bugzilla 628 ( removing 'j' ahead of $job )
+export jobsuffix=gfs_atmos_wafs_f${fcsthrs}_$cyc
+
+###############################################
+# Wait for the availability of the pgrib file
+###############################################
+# file name and forecast hour of GFS model data in Grib2 are 3 digits
+export fcsthrs000="$(printf "%03d" $(( 10#$fcsthrs )) )"
+icnt=1
+while [ $icnt -lt 1000 ]
+do
+#  if [ -s $COMIN/${RUN}.${cycle}.pgrbf$fcsthrs ]
+  if [ -s $COMIN/${RUN}.${cycle}.pgrb2.1p00.f$fcsthrs000 ]
+  then
+     break
+  fi
+
+  sleep 10
+  icnt=$((icnt + 1))
+  if [ $icnt -ge 180 ]
+  then
+      msg="ABORTING after 30 min of waiting for the pgrib filei!"
+      err_exit $msg
+  fi
+done
+
+########################################
+echo "HAS BEGUN!"
+########################################
+
+echo " ------------------------------------------"
+echo " BEGIN MAKING GFS WAFS PRODUCTS"
+echo " ------------------------------------------"
+
+####################################################
+#
+#    GFS WAFS PRODUCTS MUST RUN IN CERTAIN ORDER
+#    BY REQUIREMENT FROM FAA.
+#    PLEASE DO NOT ALTER ORDER OF PROCESSING WAFS
+#    PRODUCTS CONSULTING WITH MR. BRENT GORDON.
+#
+####################################################
+
+set +x
+echo " "
+echo "#####################################"
+echo " Process GRIB WAFS PRODUCTS (mkwafs)"
+echo " FORECAST HOURS 00 - 72."
+echo "#####################################"
+echo " "
+set -x
+
+if test $fcsthrs -eq 0
+then
+    echo "  "
+fi
+
+#    If we are processing fcsthrss 12-30, we have the
+#    added variable of the a  or b in the process.
+#    The other fcsthrss, the a or b  is dropped.
+
+if test $fcsthrs -ge 12 -a $fcsthrs -le 24
+then
+    sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} a
+fi
+
+if test $fcsthrs -eq 30
+then
+    sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} a
+    for fcsthrs in 12 18 24 30
+    do
+       sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} b
+    done
+    sh $USHgfs/wafs_mkgbl.sh 00 x
+    sh $USHgfs/wafs_mkgbl.sh 06 x
+fi
+
+if test $fcsthrs -gt 30 -a $fcsthrs -le 48
+then
+    sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} x
+fi
+
+if test $fcsthrs -eq 60 -o $fcsthrs -eq 72
+then
+    sh $USHgfs/wafs_mkgbl.sh ${fcsthrs} x
+fi
+
+################################################################################
+# GOOD RUN
+set +x
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB.SH COMPLETED NORMALLY ON THE IBM"
+set -x
+################################################################################
+
+echo "HAS COMPLETED NORMALLY!"
+
+exit 0
+
+############## END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_wafs_grib2.sh b/scripts/exgfs_atmos_wafs_grib2.sh
new file mode 100755
index 0000000000..4631a10d8c
--- /dev/null
+++ b/scripts/exgfs_atmos_wafs_grib2.sh
@@ -0,0 +1,227 @@
+#!/bin/sh
+######################################################################
+#  UTILITY SCRIPT NAME :  exgfs_atmos_wafs_grib2.sh
+#         DATE WRITTEN :  07/15/2009
+#
+#  Abstract:  This utility script produces the WAFS GRIB2. The output 
+#             GRIB files are posted on NCEP ftp server and the grib2 files
+#             are pushed via dbnet to TOC to WAFS (ICSC).  
+#             This is a joint project of WAFC London and WAFC Washington.
+#
+#             We are processing WAFS grib2 for fcsthrs from 06 - 36 
+#             with 3-hour time increment.
+#
+# History:  08/20/2014
+#              - ingest master file in grib2 (or grib1 if grib2 fails)
+#              - output of icng tcld cat cb are in grib2
+#           02/21/2020
+#              - Prepare unblended icing severity and GTG tubulence
+#                for blending at 0.25 degree
+#           02/22/2022
+#              - Add grib2 data requested by FAA
+#              - Stop generating grib1 data for WAFS
+#####################################################################
+echo "-----------------------------------------------------"
+echo "JGFS_ATMOS_WAFS_GRIB2 at 00Z/06Z/12Z/18Z GFS postprocessing"
+echo "-----------------------------------------------------"
+echo "History: AUGUST  2009 - First implementation of this new script."
+echo "Oct 2021 - Remove jlogfile"
+echo "Feb 2022 - Add FAA data, stop grib1 data"
+echo " "
+#####################################################################
+
+set -x
+
+fcsthrs=$1
+
+DATA=$DATA/$fcsthrs
+mkdir -p $DATA
+cd $DATA
+
+##########################################################
+# Wait for the availability of the gfs master pgrib file
+##########################################################
+# file name and forecast hour of GFS model data in Grib2 are 3 digits
+export fcsthrs000="$(printf "%03d" $(( 10#$fcsthrs )) )"
+
+# 2D data
+master2=$COMIN/${RUN}.${cycle}.master.grb2f${fcsthrs000}
+master2i=$COMIN/${RUN}.${cycle}.master.grb2if${fcsthrs000}
+# 3D data
+wafs2=$COMIN/${RUN}.${cycle}.wafs.grb2f${fcsthrs000}
+wafs2i=$COMIN/${RUN}.${cycle}.wafs.grb2f${fcsthrs000}.idx
+# 3D data (on ICAO standard level)
+icao2=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${fcsthrs000}
+icao2i=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${fcsthrs000}.idx
+
+icnt=1
+while [ $icnt -lt 1000 ]
+do
+    if [[ -s $master2i && -s $wafs2i ]] ; then
+      break
+    fi
+
+    sleep 10
+    icnt=$((icnt + 1))
+    if [ $icnt -ge 180 ] ;    then
+        msg="ABORTING after 30 min of waiting for the gfs master and wafs file!"
+        err_exit $msg
+    fi
+done
+
+########################################
+echo "HAS BEGUN!"
+########################################
+
+echo " ------------------------------------------"
+echo " BEGIN MAKING GFS WAFS GRIB2 PRODUCTS"
+echo " ------------------------------------------"
+
+set +x
+echo " "
+echo "#####################################"
+echo "      Process GRIB WAFS PRODUCTS     "
+echo " FORECAST HOURS 06 - 36."
+echo "#####################################"
+echo " "
+set -x
+
+
+if [ $fcsthrs -le 36 -a $fcsthrs -gt 0 ] ; then
+    wafs_timewindow=yes
+else
+    wafs_timewindow=no
+fi
+
+#---------------------------
+# 1) Grib2 data for FAA
+#---------------------------
+$WGRIB2 $master2 | grep -F -f $FIXgfs/grib2_gfs_awf_master.list | $WGRIB2 -i $master2 -grib tmpfile_gfsf${fcsthrs}
+# F006 master file has two records of 0-6 hour APCP and ACPCP each, keep only one
+# FAA APCP ACPCP: included every 6 forecast hour (0, 48], every 12 forest hour [48, 72] (controlled by $FIXgfs/grib2_gfs_awf_master3d.list)
+if [ $fcsthrs -eq 6 ] ; then
+    $WGRIB2 tmpfile_gfsf${fcsthrs} -not "(APCP|ACPCP)" -grib tmp.grb2
+    $WGRIB2 tmpfile_gfsf${fcsthrs} -match APCP -append -grib tmp.grb2 -quit
+    $WGRIB2 tmpfile_gfsf${fcsthrs} -match ACPCP -append -grib tmp.grb2 -quit
+    mv tmp.grb2 tmpfile_gfsf${fcsthrs}
+fi
+# U V will have the same grid message number by using -ncep_uv.
+# U V will have the different grid message number without -ncep_uv.
+$WGRIB2 tmpfile_gfsf${fcsthrs} \
+                      -set master_table 6 \
+                      -new_grid_winds earth -set_grib_type jpeg \
+                      -new_grid_interpolation bilinear -if ":(UGRD|VGRD):max wind" -new_grid_interpolation neighbor -fi \
+                      -new_grid latlon 0:288:1.25 90:145:-1.25 gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2
+$WGRIB2 -s gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 > gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx
+
+# For FAA, add WMO header. The header is different from WAFS
+export pgm=$TOCGRIB2
+. prep_step
+startmsg
+export FORT11=gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2
+export FORT31=" "
+export FORT51=grib2.t${cyc}z.awf_grbf${fcsthrs}.45
+$TOCGRIB2 <  $FIXgfs/grib2_gfs_awff${fcsthrs}.45 >> $pgmout 2> errfile
+err=$?;export err ;err_chk
+echo " error from tocgrib=",$err
+
+if [ $wafs_timewindow = 'yes' ] ; then
+#---------------------------
+# 2) traditional WAFS fields
+#---------------------------
+    # 3D data from $wafs2, on exact model pressure levels
+    $WGRIB2 $wafs2 | grep -F -f $FIXgfs/grib2_gfs_wafs_wafsmaster.list | $WGRIB2 -i $wafs2 -grib tmpfile_gfsf${fcsthrs}
+    # 2D data from $master2
+    tail -5 $FIXgfs/grib2_gfs_wafs_wafsmaster.list > grib2_gfs_wafs_wafsmaster.list.2D
+    $WGRIB2 $master2 | grep -F -f grib2_gfs_wafs_wafsmaster.list.2D | $WGRIB2 -i $master2 -grib tmpfile_gfsf${fcsthrs}.2D
+    # Complete list of WAFS data
+    cat tmpfile_gfsf${fcsthrs}.2D >> tmpfile_gfsf${fcsthrs}
+    # WMO header
+    cp $FIXgfs/grib2_gfs_wafsf${fcsthrs}.45 wafs_wmo_header45
+    # U V will have the same grid message number by using -ncep_uv.
+    # U V will have the different grid message number without -ncep_uv.
+    $WGRIB2 tmpfile_gfsf${fcsthrs} \
+            -set master_table 6 \
+            -new_grid_winds earth -set_grib_type jpeg \
+            -new_grid_interpolation bilinear -if ":(UGRD|VGRD):max wind" -new_grid_interpolation neighbor -fi \
+            -new_grid latlon 0:288:1.25 90:145:-1.25 gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2
+    $WGRIB2 -s gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 > gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx
+
+    # For WAFS, add WMO header. Processing WAFS GRIB2 grid 45 for ISCS and WIFS
+    export pgm=$TOCGRIB2
+    . prep_step
+    startmsg
+    export FORT11=gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2
+    export FORT31=" "
+    export FORT51=grib2.t${cyc}z.wafs_grbf${fcsthrs}.45
+    $TOCGRIB2 < wafs_wmo_header45 >> $pgmout 2> errfile
+    err=$?;export err ;err_chk
+    echo " error from tocgrib=",$err
+
+fi # wafs_timewindow
+
+if [ $SENDCOM = "YES" ] ; then
+
+    ##############################
+    # Post Files to COM
+    ##############################
+
+    # FAA data
+    mv gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2 $COMOUT/gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2
+    mv gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx $COMOUT/gfs.t${cyc}z.awf_grb45f${fcsthrs}.grib2.idx
+
+    # WAFS data
+    if [ $wafs_timewindow = 'yes' ] ; then
+	mv gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2 $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2
+	mv gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2.idx
+    fi
+
+    ##############################
+    # Post Files to PCOM
+    ##############################
+
+    mv grib2.t${cyc}z.awf_grbf${fcsthrs}.45  $PCOM/grib2.t${cyc}z.awf_grbf${fcsthrs}.45
+
+    if [ $wafs_timewindow = 'yes' ] ; then
+	mv grib2.t${cyc}z.wafs_grbf${fcsthrs}.45  $PCOM/grib2.t${cyc}z.wafs_grbf${fcsthrs}.45
+    fi
+fi
+
+######################
+# Distribute Data
+######################
+
+if [ $SENDDBN = "YES" ] ; then
+
+#  
+#    Distribute Data to WOC
+#  
+    if [ $wafs_timewindow = 'yes' ] ; then
+	$DBNROOT/bin/dbn_alert MODEL GFS_WAFS_1P25_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_grb45f${fcsthrs}.grib2
+#
+#       Distribute Data to TOC TO WIFS FTP SERVER (AWC)
+#
+	$DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.wafs_grbf${fcsthrs}.45
+    fi
+#
+#   Distribute data to FAA
+#
+    $DBNROOT/bin/dbn_alert NTC_LOW $NET $job $PCOM/grib2.t${cyc}z.awf_grbf${fcsthrs}.45
+
+
+fi
+
+################################################################################
+# GOOD RUN
+set +x
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2.SH COMPLETED NORMALLY ON THE IBM"
+set -x
+################################################################################
+
+echo "HAS COMPLETED NORMALLY!"
+
+exit 0
+
+############## END OF SCRIPT #######################
diff --git a/scripts/exgfs_atmos_wafs_grib2_0p25.sh b/scripts/exgfs_atmos_wafs_grib2_0p25.sh
new file mode 100755
index 0000000000..ec53966430
--- /dev/null
+++ b/scripts/exgfs_atmos_wafs_grib2_0p25.sh
@@ -0,0 +1,200 @@
+#!/bin/sh
+######################################################################
+#  UTILITY SCRIPT NAME :  exgfs_atmos_wafs_grib2_0p25.sh
+#         DATE WRITTEN :  03/20/2020
+#
+#  Abstract:  This utility script produces the WAFS GRIB2 at 0.25 degree.
+#             The output GRIB files are posted on NCEP ftp server and the
+#             grib2 files are pushed via dbnet to TOC to WAFS (ICSC).  
+#             This is a joint project of WAFC London and WAFC Washington.
+#
+#             We are processing WAFS grib2 for ffhr:
+#             hourly: 006 - 024
+#             3 hour: 027 - 048
+#             6 hour: 054 - 120 (for U/V/T/RH, not for turbulence/icing/CB)
+#
+# History:  
+#####################################################################
+echo "-----------------------------------------------------"
+echo "JGFS_ATMOS_WAFS_GRIB2_0P25 at 00Z/06Z/12Z/18Z GFS postprocessing"
+echo "-----------------------------------------------------"
+echo "History: MARCH  2020 - First implementation of this new script."
+echo "Oct 2021 - Remove jlogfile"
+echo "Aug 2022 - ffhr expanded from 36 to 120"
+echo " "
+#####################################################################
+
+cd $DATA
+
+set -x
+
+
+ffhr=$1
+export ffhr="$(printf "%03d" $(( 10#$ffhr )) )"
+export ffhr2="$(printf "%02d" $(( 10#$ffhr )) )"
+
+DATA=$DATA/$ffhr
+mkdir -p $DATA
+cd $DATA
+
+
+if [ $ffhr -le 48 ] ; then
+    hazard_timewindow=yes
+else
+    hazard_timewindow=no
+fi
+
+
+##########################################################
+# Wait for the availability of the gfs WAFS file
+##########################################################
+
+# 3D data (on new ICAO model pressure levels) and 2D data (CB)
+wafs2=$COMIN/${RUN}.${cycle}.wafs.grb2f${ffhr}
+wafs2i=$COMIN/${RUN}.${cycle}.wafs.grb2f${ffhr}.idx
+
+# 2D data from master file (U/V/H on max wind level, T/H at tropopause)
+master2=$COMIN/${RUN}.${cycle}.master.grb2f${ffhr}
+
+# 3D data (on standard atmospheric pressure levels)
+# Up to fhour=48
+# Will be removed in GFS.v17
+icao2=$COMIN/${RUN}.${cycle}.wafs_icao.grb2f${ffhr}
+  
+icnt=1
+while [ $icnt -lt 1000 ]
+do
+    if [[ -s $wafs2i ]] ; then
+      break
+    fi
+
+    sleep 10
+    icnt=$((icnt + 1))
+    if [ $icnt -ge 180 ] ;    then
+        msg="ABORTING after 30 min of waiting for the gfs wafs file!"
+        err_exit $msg
+    fi
+done
+
+
+########################################
+echo "HAS BEGUN!"
+########################################
+
+echo " ------------------------------------------"
+echo " BEGIN MAKING GFS WAFS GRIB2 0.25 DEG PRODUCTS"
+echo " ------------------------------------------"
+
+set +x
+echo " "
+echo "#####################################"
+echo "      Process GRIB2 WAFS 0.25 DEG PRODUCTS     "
+echo "#####################################"
+echo " "
+set -x
+
+opt1=' -set_grib_type same -new_grid_winds earth '
+opt21=' -new_grid_interpolation bilinear  -if '
+opt22="(:ICESEV|parm=37):"
+opt23=' -new_grid_interpolation neighbor -fi '
+opt24=' -set_bitmap 1 -set_grib_max_bits 16 '
+opt25=":(UGRD|VGRD):max wind"
+newgrid="latlon 0:1440:0.25 90:721:-0.25"
+
+# WAFS 3D data
+$WGRIB2 $wafs2 $opt1 $opt21 $opt22 $opt23 $opt24 -new_grid $newgrid tmp_wafs_0p25.grb2
+# Master 2D data
+$WGRIB2 $master2 | grep -F -f $FIXgfs/grib2_0p25_gfs_master2d.list \
+    | $WGRIB2 -i $master2 -set master_table 25 -grib tmp_master.grb2
+$WGRIB2 tmp_master.grb2 $opt1 $opt21 ":(UGRD|VGRD):max wind" $opt23 $opt24 -new_grid $newgrid tmp_master_0p25.grb2
+
+#---------------------------
+# Product 1: WAFS u/v/t/rh gfs.tHHz.wafs_0p25.fFFF.grib2
+#---------------------------
+$WGRIB2 tmp_wafs_0p25.grb2 | egrep "UGRD|VGRD|TMP|HGT|RH" \
+    | $WGRIB2 -i tmp_wafs_0p25.grb2 -set master_table 25 -grib tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+cat tmp_master_0p25.grb2 >> tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+# Convert template 5 to 5.40
+#$WGRIB2 tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 -set_grib_type jpeg -grib_out gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+mv tmp.gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+$WGRIB2 -s gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 > gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx
+
+if [ $hazard_timewindow = 'yes' ] ; then
+#---------------------------
+# Product 2: For AWC and Delta airline: EDPARM CAT MWT ICESEV CB  gfs.tHHz.awf_0p25.fFFF.grib2
+#---------------------------
+    criteria1=":EDPARM:|:ICESEV:|parm=37:"
+    criteria2=":CATEDR:|:MWTURB:"
+    criteria3=":CBHE:|:ICAHT:"
+    $WGRIB2 tmp_wafs_0p25.grb2 | egrep "${criteria1}|$criteria2|$criteria3" \
+	| $WGRIB2 -i tmp_wafs_0p25.grb2 -grib gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2
+    $WGRIB2 -s gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 > gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx
+
+#---------------------------
+# Product 3: WAFS unblended EDPARM, ICESEV, CB (No CAT MWT) gfs.tHHz.wafs_0p25_unblended.fFF.grib2
+#---------------------------
+    $WGRIB2 tmp_wafs_0p25.grb2 | grep -F -f $FIXgfs/grib2_0p25_gfs_hazard.list \
+	| $WGRIB2 -i tmp_wafs_0p25.grb2 -set master_table 25 -grib tmp_wafs_0p25.grb2.forblend
+
+    # Convert template 5 to 5.40
+    #$WGRIB2 tmp_wafs_0p25.grb2.forblend -set_grib_type jpeg -grib_out gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2
+    mv tmp_wafs_0p25.grb2.forblend gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2
+    $WGRIB2 -s gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 > gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx
+fi
+
+if [ $SENDCOM = "YES" ] ; then
+
+   ##############################
+   # Post Files to COM
+   ##############################
+
+    mv gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2 $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+    mv gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2.idx
+
+   if [ $hazard_timewindow = 'yes' ] ; then
+       mv gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2 $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2
+       mv gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2.idx
+       
+       mv gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2 $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2
+       mv gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2.idx
+   fi
+
+   #############################
+   # Post Files to PCOM
+   ##############################
+   ## mv gfs.t${cyc}z.wafs_0p25_unblended_wifs.f${ffhr2}.grib2 $PCOM/gfs.t${cyc}z.wafs_0p25_unblended_wifs.f${ffhr2}.grib2
+fi
+
+
+if [ $SENDDBN = "YES" ] ; then
+   ######################
+   # Distribute Data
+   ######################
+
+    if [ $hazard_timewindow = 'yes' ] ; then
+	# Hazard WAFS data (ICESEV EDR CAT MWT on 100mb to 1000mb or on new ICAO 2023 levels) sent to AWC and to NOMADS for US stakeholders
+	$DBNROOT/bin/dbn_alert MODEL GFS_AWF_0P25_GB2 $job $COMOUT/gfs.t${cyc}z.awf_0p25.f${ffhr}.grib2
+
+	# Unblended US WAFS data sent to UK for blending, to the same server as 1.25 deg unblended data: wmo/grib2.tCCz.wafs_grb_wifsfFF.45
+	$DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_UBL_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_0p25_unblended.f${ffhr2}.grib2
+    fi
+
+    # WAFS U/V/T/RH data sent to the same server as the unblended data as above
+    $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_0P25_GB2 $job $COMOUT/gfs.t${cyc}z.wafs_0p25.f${ffhr}.grib2
+
+fi
+
+################################################################################
+# GOOD RUN
+set +x
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM"
+echo "**************JOB EXGFS_ATMOS_WAFS_GRIB2_0P25.SH COMPLETED NORMALLY ON THE IBM"
+set -x
+################################################################################
+
+echo "HAS COMPLETED NORMALLY!"
+
+exit 0
+
+############## END OF SCRIPT #######################
diff --git a/scripts/exgfs_pmgr.sh b/scripts/exgfs_pmgr.sh
index a417bbed55..c3b9a5befa 100755
--- a/scripts/exgfs_pmgr.sh
+++ b/scripts/exgfs_pmgr.sh
@@ -6,7 +6,7 @@
 #  This script monitors the progress of the gfs_fcst job
 #
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 hour=00
 TEND=384
diff --git a/scripts/exgfs_prdgen_manager.sh b/scripts/exgfs_prdgen_manager.sh
index 7d0a95696b..01e8c58c87 100755
--- a/scripts/exgfs_prdgen_manager.sh
+++ b/scripts/exgfs_prdgen_manager.sh
@@ -6,7 +6,7 @@
 #  This script monitors the progress of the gfs_fcst job
 #
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 hour=00
 TEND=384
diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh
index ce903a2284..17e6cec042 100755
--- a/scripts/exgfs_wave_init.sh
+++ b/scripts/exgfs_wave_init.sh
@@ -26,7 +26,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -83,27 +83,27 @@ source "${HOMEgfs}/ush/preamble.sh"
   grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ')
 
   for grdID in ${grdALL}; do
-    if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+    if [[ -f "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
       set +x
-      echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+      echo " Mod def file for ${grdID} found in ${COMOUT_WAVE_PREP}. copying ...."
       set_trace
-      cp "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+      cp "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
 
     else
       set +x
-      echo " Mod def file for ${grdID} not found in ${COM_WAVE_PREP}. Setting up to generate ..."
+      echo " Mod def file for ${grdID} not found in ${COMOUT_WAVE_PREP}. Setting up to generate ..."
       echo ' '
       set_trace
-      if [ -f $FIXwave/ww3_grid.inp.$grdID ]
+      if [ -f ${FIXgfs}/wave/ww3_grid.inp.$grdID ]
       then
-        cp $FIXwave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID
+        cp ${FIXgfs}/wave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID
       fi
 
       if [ -f ww3_grid.inp.$grdID ]
       then
         set +x
         echo ' '
-        echo "   ww3_grid.inp.$grdID copied ($FIXwave/ww3_grid.inp.$grdID)."
+        echo "   ww3_grid.inp.$grdID copied (${FIXgfs}/wave/ww3_grid.inp.$grdID)."
         echo ' '
         set_trace
       else
@@ -118,11 +118,17 @@ source "${HOMEgfs}/ush/preamble.sh"
         err=2;export err;${errchk}
       fi
 
-      [[ ! -d "${COM_WAVE_PREP}" ]] && mkdir -m 775 -p "${COM_WAVE_PREP}"
+
+      if [ -f ${FIXgfs}/wave/${grdID}.msh ]
+      then
+        cp "${FIXgfs}/wave/${grdID}.msh" "${grdID}.msh"
+      fi
+      #TO DO: how do we say "it's unstructured, and therefore need to have error check here" 
+
       if [ ${CFP_MP:-"NO"} = "YES" ]; then
-        echo "$nmoddef $USHwave/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
+        echo "$nmoddef ${USHgfs}/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
       else
-        echo "$USHwave/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
+        echo "${USHgfs}/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
       fi
 
       nmoddef=$(expr $nmoddef + 1)
@@ -166,7 +172,7 @@ source "${HOMEgfs}/ush/preamble.sh"
       exit=$?
     fi
 
-    if [ "$exit" != '0' ]
+    if [[ "$exit" != '0' ]]
     then
       set +x
       echo ' '
@@ -183,7 +189,7 @@ source "${HOMEgfs}/ush/preamble.sh"
 # 1.a.3 File check
 
   for grdID in ${grdALL}; do
-    if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+    if [[ -f "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
       set +x
       echo ' '
       echo " mod_def.$grdID succesfully created/copied "
@@ -195,9 +201,9 @@ source "${HOMEgfs}/ush/preamble.sh"
       echo '********************************************** '
       echo '*** FATAL ERROR : NO MODEL DEFINITION FILE *** '
       echo '********************************************** '
-      echo "                                grdID = $grdID"
+      echo "                                grdID = ${grdID}"
       echo ' '
-      sed "s/^/$grdID.out : /g"  $grdID.out
+      sed "s/^/${grdID}.out : /g"  "${grdID}.out"
       set_trace
       err=3;export err;${errchk}
     fi
@@ -206,10 +212,10 @@ source "${HOMEgfs}/ush/preamble.sh"
 # Copy to other members if needed
 if (( NMEM_ENS > 0 )); then
   for mem in $(seq -f "%03g" 1 "${NMEM_ENS}"); do
-    MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com COM_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL
-    mkdir -p "${COM_WAVE_PREP_MEM}"
+    MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL
+    mkdir -p "${COMOUT_WAVE_PREP_MEM}"
     for grdID in ${grdALL}; do
-      ${NLN} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COM_WAVE_PREP_MEM}/"
+      ${NLN} "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COMOUT_WAVE_PREP_MEM}/${RUN}wave.mod_def.${grdID}"
     done
   done
 fi
diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh
index 63690ff1b0..949425cbc1 100755
--- a/scripts/exgfs_wave_nawips.sh
+++ b/scripts/exgfs_wave_nawips.sh
@@ -11,7 +11,7 @@
 #  March-2020 Roberto.Padilla@noaa.gov                                   
 #####################################################################
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #export grids=${grids:-'glo_30m at_10m ep_10m wc_10m ao_9km'} #Interpolated grids
 export grids=${grids:-'glo_30m'}  #Native grids
@@ -24,7 +24,6 @@ export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3}
 export maxtries=${maxtries:-720}
 export cycle=${cycle:-t${cyc}z}
 export GEMwave=${GEMwave:-${HOMEgfs}/gempak}
-export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave}
 export DATA=${DATA:-${DATAROOT:?}/${jobid}}
 if [ ! -d ${DATA} ];then
   mkdir -p ${DATA}
@@ -45,6 +44,7 @@ pdsext=no
 g2tbls=g2varswmo2.tbl
 NAGRIB=nagrib2
 
+sleep_interval=20
 maxtries=15
 fhcnt=${fstart}
 while [ ${fhcnt} -le ${FHMAX_WAV} ]; do
@@ -73,28 +73,11 @@ while [ ${fhcnt} -le ${FHMAX_WAV} ]; do
     esac
     GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdIDin}.f${fhr}.grib2"
     GRIBIN_chk=${GRIBIN}.idx
-
-    icnt=1
-    while [ ${icnt} -lt 1000 ]; do
-      if [ -r ${GRIBIN_chk} ] ; then
-        break
-      else
-        let "icnt=icnt+1"
-        sleep 20
-      fi
-      if [ ${icnt} -ge ${maxtries} ]; then
-        msg="ABORTING after 5 minutes of waiting for ${GRIBIN}."
-        echo ' '
-        echo '**************************** '
-        echo '*** ERROR : NO GRIB FILE *** '
-        echo '**************************** '
-        echo ' '
-        echo ${msg}
-        set_trace
-        echo "${RUNwave} ${grdID} ${fhr} prdgen ${date} ${cycle} : GRIB file missing." >> ${wavelog}
-        err=1;export err;${errchk} || exit ${err}
-      fi
-    done
+    if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${maxtries}"; then
+      echo "FATAL ERROR: ${GRIBIN_chk} not found after waiting $((sleep_interval * ( max_tries - 1))) secs"
+      echo "${RUNwave} ${grdID} ${fhr} prdgen ${date} ${cycle} : GRIB file missing." >> "${wavelog}"
+      err=1;export err;"${errchk}" || exit "${err}"
+    fi
 
     #if [ "$grdIDin" = "global.0p25" && "$grid" = "glo_30m" ]; then
     if [ "${grdIDin}" = "global.0p25" ]; then
diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh
index af362b1c45..b0cca34bd1 100755
--- a/scripts/exgfs_wave_post_gridded_sbs.sh
+++ b/scripts/exgfs_wave_post_gridded_sbs.sh
@@ -20,6 +20,8 @@
 # 2020-06-10  J-Henrique Alves: Porting to R&D machine Hera
 # 2020-07-31  Jessica Meixner: Removing points, now gridded data only
 #
+# COM inputs:
+#
 # $Id$
 #
 # Attributes:
@@ -30,7 +32,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -103,12 +105,12 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 1.a.1 Copy model definition files
   for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do
-    if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+    if [[ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
       set +x
-      echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+      echo " Mod def file for ${grdID} found in ${COMIN_WAVE_PREP}. copying ...."
       set_trace
 
-      cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+      cp -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
     fi
   done
 
@@ -139,9 +141,9 @@ source "$HOMEgfs/ush/preamble.sh"
   then
     for intGRD in $waveinterpGRD
     do
-      if [ -f $PARMwave/${intGRD}_interp.inp.tmpl ]
+      if [ -f ${PARMgfs}/wave/${intGRD}_interp.inp.tmpl ]
       then
-        cp -f $PARMwave/${intGRD}_interp.inp.tmpl ${intGRD}_interp.inp.tmpl
+        cp -f ${PARMgfs}/wave/${intGRD}_interp.inp.tmpl ${intGRD}_interp.inp.tmpl
       fi
 
       if [ -f ${intGRD}_interp.inp.tmpl ]
@@ -168,9 +170,9 @@ source "$HOMEgfs/ush/preamble.sh"
   then
     for grbGRD in $waveinterpGRD $wavepostGRD
     do
-      if [ -f $PARMwave/ww3_grib2.${grbGRD}.inp.tmpl ]
+      if [ -f ${PARMgfs}/wave/ww3_grib2.${grbGRD}.inp.tmpl ]
       then
-        cp -f $PARMwave/ww3_grib2.${grbGRD}.inp.tmpl ww3_grib2.${grbGRD}.inp.tmpl
+        cp -f ${PARMgfs}/wave/ww3_grib2.${grbGRD}.inp.tmpl ww3_grib2.${grbGRD}.inp.tmpl
       fi
 
       if [ -f ww3_grib2.${grbGRD}.inp.tmpl ]
@@ -231,6 +233,7 @@ source "$HOMEgfs/ush/preamble.sh"
     fhr=$FHMIN_WAV
   fi
   fhrg=$fhr
+  sleep_interval=10
   iwaitmax=120 # Maximum loop cycles for waiting until wave component output file is ready (fails after max)
   while [ $fhr -le $FHMAX_WAV ]; do
 
@@ -253,33 +256,27 @@ source "$HOMEgfs/ush/preamble.sh"
     export GRDIDATA=${DATA}/output_$YMDHMS
 
 # Gridded data (main part, need to be run side-by-side with forecast
-
+    
     if [ $fhr = $fhrg ]
     then
-      iwait=0
-      for wavGRD in ${waveGRD} ; do
-        gfile=${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS}
-        while [ ! -s ${gfile} ]; do sleep 10; let iwait=iwait+1; done
-        if [ $iwait -eq $iwaitmax ]; then
-          echo '*************************************************** '
-          echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID "
-          echo '*************************************************** '
-          echo ' '
-          set_trace
+      for wavGRD in ${waveGRD}; do
+        gfile="${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS}"
+        if ! wait_for_file "${gfile}" "${sleep_interval}" "${iwaitmax}"; then
+          echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.${grdID}"
           echo "${WAV_MOD_TAG} post ${grdID} ${PDY} ${cycle} : field output missing."
-          err=3; export err;${errchk}
-          exit $err
+          err=3; export err; "${errchk}"
+          exit "${err}"
         fi
-        ln -s ${gfile} ./out_grd.${wavGRD}
+        ${NLN} "${gfile}" "./out_grd.${wavGRD}"
       done
-
+      
       if [ "$DOGRI_WAV" = 'YES' ]
       then
         nigrd=1
         for grdID in $waveinterpGRD
         do
           ymdh_int=$($NDATE -${WAVHINDH} $ymdh); dt_int=3600.; n_int=9999 ;
-          echo "$USHwave/wave_grid_interp_sbs.sh $grdID $ymdh_int $dt_int $n_int > grint_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd}
+          echo "${USHgfs}/wave_grid_interp_sbs.sh $grdID $ymdh_int $dt_int $n_int > grint_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd}
           if [ "$DOGRB_WAV" = 'YES' ]
           then
             gribFL=\'$(echo ${OUTPARS_WAV})\'
@@ -287,6 +284,7 @@ source "$HOMEgfs/ush/preamble.sh"
               glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11 ;;
               reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11 ;;
               glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11 ;;
+              glo_100) GRDNAME='global' ; GRDRES=1p00 ; GRIDNR=255  ; MODNR=11 ;;
               glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255  ; MODNR=11 ;;
               glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255  ; MODNR=11 ;;
               glo_30mxt) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255  ; MODNR=11 ;;
@@ -296,7 +294,7 @@ source "$HOMEgfs/ush/preamble.sh"
               wc_10m) GRDNAME='wcoast' ; GRDRES=0p16 ; GRIDNR=255  ; MODNR=11   ;;
               ak_10m) GRDNAME='alaska' ; GRDRES=0p16 ; GRIDNR=255  ; MODNR=11   ;;
             esac
-            echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd}
+            echo "${USHgfs}/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd}
           fi
           echo "${GRIBDATA}/${fcmdigrd}.${nigrd}" >> ${fcmdnow}
           chmod 744 ${fcmdigrd}.${nigrd}
@@ -321,11 +319,12 @@ source "$HOMEgfs/ush/preamble.sh"
               glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11   ;;
               reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11   ;;
               glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255  ; MODNR=11 ;;
-              glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255  ; MODNR=11 ;;
+              glo_100) GRDNAME='global' ; GRDRES=1p00 ; GRIDNR=255  ; MODNR=11 ;;
+	      glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255  ; MODNR=11 ;;
               glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255  ; MODNR=11 ;;
               gwes_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255  ; MODNR=10 ;;
           esac
-          echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdnow}
+          echo "${USHgfs}/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdnow}
         done
       fi
 
@@ -407,7 +406,7 @@ source "$HOMEgfs/ush/preamble.sh"
       ENSTAG=""
       if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi
       gribchk="${RUN}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2"
-      if [ ! -s ${COM_WAVE_GRID}/${gribchk} ]; then
+      if [ ! -s ${COMOUT_WAVE_GRID}/${gribchk} ]; then
         set +x
         echo ' '
         echo '********************************************'
diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh
index a7aa957564..0b8874f3fb 100755
--- a/scripts/exgfs_wave_post_pnt.sh
+++ b/scripts/exgfs_wave_post_pnt.sh
@@ -22,6 +22,10 @@
 # 2020-07-30  Jessica Meixner: Points only - no gridded data
 # 2020-09-29  Jessica Meixner: optimized by changing loop structures
 #
+# COM inputs:
+#  - ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}
+#  - ${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}
+#
 # $Id$
 #
 # Attributes:
@@ -32,7 +36,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -40,7 +44,7 @@ source "$HOMEgfs/ush/preamble.sh"
 
   # Set wave model ID tag to include member number
   # if ensemble; waveMEMB var empty in deterministic
-  export WAV_MOD_TAG=${CDUMP}wave${waveMEMB}
+  export WAV_MOD_TAG=${RUN}wave${waveMEMB}
 
   echo "HAS BEGUN on $(hostname)"
   echo "Starting WAVE PNT POSTPROCESSOR SCRIPT for $WAV_MOD_TAG"
@@ -117,12 +121,12 @@ source "$HOMEgfs/ush/preamble.sh"
 # Copy model definition files
   iloop=0
   for grdID in ${waveuoutpGRD}; do
-    if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+    if [[ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
       set +x
-      echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+      echo " Mod def file for ${grdID} found in ${COMIN_WAVE_PREP}. copying ...."
       set_trace
 
-      cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+      cp -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
       iloop=$((iloop + 1))
     fi
   done
@@ -151,12 +155,16 @@ source "$HOMEgfs/ush/preamble.sh"
 
   rm -f buoy.loc
 
-  if [ -f $PARMwave/wave_${NET}.buoys ]
+  if [ -f ${PARMgfs}/wave/wave_${NET}.buoys ]
   then
-    cp -f $PARMwave/wave_${NET}.buoys buoy.loc.temp
+    cp -f ${PARMgfs}/wave/wave_${NET}.buoys buoy.loc.temp
     if [ "$DOBNDPNT_WAV" = YES ]; then
       #only do boundary points
-      sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc
+      sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc || {
+          echo "WARNING: No boundary points found in buoy file ${PARMgfs}/wave/wave_${NET}.buoys"
+          echo "         Ending job without doing anything."
+          exit 0
+        }
     else
       #exclude boundary points
       sed -n '/^\$.*/!p' buoy.loc.temp | grep -v IBP > buoy.loc
@@ -166,7 +174,7 @@ source "$HOMEgfs/ush/preamble.sh"
   if [ -s buoy.loc ]
   then
     set +x
-    echo "   buoy.loc and buoy.ibp copied and processed ($PARMwave/wave_${NET}.buoys)."
+    echo "   buoy.loc and buoy.ibp copied and processed (${PARMgfs}/wave/wave_${NET}.buoys)."
     set_trace
   else
     set +x
@@ -184,9 +192,9 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 1.d Input template files
 
-  if [ -f $PARMwave/ww3_outp_spec.inp.tmpl ]
+  if [ -f ${PARMgfs}/wave/ww3_outp_spec.inp.tmpl ]
   then
-    cp -f $PARMwave/ww3_outp_spec.inp.tmpl ww3_outp_spec.inp.tmpl
+    cp -f ${PARMgfs}/wave/ww3_outp_spec.inp.tmpl ww3_outp_spec.inp.tmpl
   fi
 
   if [ -f ww3_outp_spec.inp.tmpl ]
@@ -207,9 +215,9 @@ source "$HOMEgfs/ush/preamble.sh"
     DOBLL_WAV='NO'
   fi
 
-  if [ -f $PARMwave/ww3_outp_bull.inp.tmpl ]
+  if [ -f ${PARMgfs}/wave/ww3_outp_bull.inp.tmpl ]
   then
-    cp -f $PARMwave/ww3_outp_bull.inp.tmpl ww3_outp_bull.inp.tmpl
+    cp -f ${PARMgfs}/wave/ww3_outp_bull.inp.tmpl ww3_outp_bull.inp.tmpl
   fi
 
   if [ -f ww3_outp_bull.inp.tmpl ]
@@ -243,10 +251,10 @@ source "$HOMEgfs/ush/preamble.sh"
         -e "s/FORMAT/F/g" \
                                ww3_outp_spec.inp.tmpl > ww3_outp.inp
 
-    ln -s mod_def.$waveuoutpGRD mod_def.ww3
+    ${NLN} mod_def.$waveuoutpGRD mod_def.ww3
     HMS="${cyc}0000"
-    if [[ -f "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then
-      ln -s "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \
+    if [[ -f "${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then
+      ${NLN} "${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \
         "./out_pnt.${waveuoutpGRD}"
     else
       echo '*************************************************** '
@@ -259,10 +267,10 @@ source "$HOMEgfs/ush/preamble.sh"
     fi
 
     rm -f buoy_tmp.loc buoy_log.ww3 ww3_oup.inp
-    ln -fs ./out_pnt.${waveuoutpGRD} ./out_pnt.ww3
-    ln -fs ./mod_def.${waveuoutpGRD} ./mod_def.ww3
+    ${NLN} ./out_pnt.${waveuoutpGRD} ./out_pnt.ww3
+    ${NLN} ./mod_def.${waveuoutpGRD} ./mod_def.ww3
     export pgm=ww3_outp;. prep_step
-    $EXECwave/ww3_outp > buoy_lst.loc 2>&1
+    ${EXECgfs}/ww3_outp > buoy_lst.loc 2>&1
     export err=$?;err_chk
 
 
@@ -285,14 +293,14 @@ source "$HOMEgfs/ush/preamble.sh"
     fi
 
 # Create new buoy_log.ww3
-    cat buoy.loc | awk '{print $3}' | sed 's/'\''//g' > ibp_tags
+    awk '{print $3}' buoy.loc | sed 's/'\''//g' > ibp_tags
     grep -F -f ibp_tags buoy_log.ww3 > buoy_log.tmp
     rm -f buoy_log.dat
     mv buoy_log.tmp buoy_log.dat
 
     grep -F -f ibp_tags buoy_lst.loc > buoy_tmp1.loc
     #sed    '$d' buoy_tmp1.loc > buoy_tmp2.loc
-    buoys=$(awk '{ print $1 }' buoy_tmp1.loc)
+    awk '{ print $1 }' buoy_tmp1.loc > buoy_lst.txt
     Nb=$(wc buoy_tmp1.loc | awk '{ print $1 }')
     rm -f buoy_tmp1.loc
 
@@ -345,6 +353,8 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 1.a.2 Loop over forecast time to generate post files
   fhr=$FHMIN_WAV
+  # Generated sed-searchable paths
+  escaped_USHgfs="${USHgfs//\//\\\/}"
   while [ $fhr -le $FHMAX_WAV_PNT ]; do
 
     echo "   Creating the wave point scripts at : $(date)"
@@ -361,13 +371,14 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # Create instances of directories for spec and gridded output
     export SPECDATA=${DATA}/output_$YMDHMS
+    escaped_SPECDATA="${SPECDATA//\//\\\/}"
     export BULLDATA=${DATA}/output_$YMDHMS
     cp $DATA/mod_def.${waveuoutpGRD} mod_def.${waveuoutpGRD}
 
-    pfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}"
+    pfile="${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}"
     if [ -f  ${pfile} ]
     then
-      ln -fs ${pfile} ./out_pnt.${waveuoutpGRD}
+      ${NLN} ${pfile} ./out_pnt.${waveuoutpGRD}
     else
       echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.$waveuoutpGRD.${YMD}.${HMS} "
       echo ' '
@@ -381,19 +392,15 @@ source "$HOMEgfs/ush/preamble.sh"
     if [ "$DOSPC_WAV" = 'YES' ]
     then
       export dtspec=3600.
-      for buoy in $buoys
-      do
-        echo "$USHwave/wave_outp_spec.sh $buoy $ymdh spec $SPECDATA > $SPECDATA/spec_$buoy.out 2>&1" >> tmpcmdfile.$FH3
-      done
+      # Construct the wave_outp_spec (spec) command to run on each buoy in buoy_lst.txt
+      sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_spec.sh \1 ${ymdh} spec ${escaped_SPECDATA} > ${escaped_SPECDATA}\/spec_\1.out 2>\&1/" buoy_lst.txt >> "tmpcmdfile.${FH3}"
     fi
 
     if [ "$DOBLL_WAV" = 'YES' ]
     then
       export dtspec=3600.
-      for buoy in $buoys
-      do
-        echo "$USHwave/wave_outp_spec.sh $buoy $ymdh bull $SPECDATA > $SPECDATA/bull_$buoy.out 2>&1" >> tmpcmdfile.$FH3
-      done
+      # Construct the wave_outp_spec (bull) command to run on each buoy in buoy_lst.txt
+      sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_spec.sh \1 ${ymdh} bull ${escaped_SPECDATA} > ${escaped_SPECDATA}\/bull_\1.out 2>\&1/" buoy_lst.txt >> "tmpcmdfile.${FH3}"
     fi
 
     split -n l/1/10  tmpcmdfile.$FH3 > cmdfile.${FH3}.01
@@ -499,27 +506,24 @@ source "$HOMEgfs/ush/preamble.sh"
 
   cd $DATA
 
-  echo "Before create cmdfile for cat bouy : $(date)"
-  rm -f cmdfile.bouy
-  touch cmdfile.bouy
-  chmod 744 cmdfile.bouy
+  echo "Before create cmdfile for cat buoy : $(date)"
+  rm -f cmdfile.buoy
+  touch cmdfile.buoy
+  chmod 744 cmdfile.buoy
   CATOUTDIR=${DATA}/pnt_cat_out
+  escaped_CATOUTDIR="${CATOUTDIR//\//\\\/}"
   mkdir -p ${CATOUTDIR}
 
   if [ "$DOSPC_WAV" = 'YES' ]
   then
-    for buoy in $buoys
-    do
-      echo "$USHwave/wave_outp_cat.sh $buoy $FHMAX_WAV_PNT spec > ${CATOUTDIR}/spec_cat_$buoy.out 2>&1" >> cmdfile.bouy
-    done
+    # Construct wave_outp_cat (spec) call for each buoy in buoy_lst.txt
+    sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_cat.sh \1 ${FHMAX_WAV_PNT} spec > ${escaped_CATOUTDIR}\/spec_cat_\1.out 2>\&1/" buoy_lst.txt >> cmdfile.buoy
   fi
 
   if [ "$DOBLL_WAV" = 'YES' ]
   then
-    for buoy in $buoys
-    do
-      echo "$USHwave/wave_outp_cat.sh $buoy $FHMAX_WAV_PNT bull > ${CATOUTDIR}/bull_cat_$buoy.out 2>&1" >> cmdfile.bouy
-    done
+    # Construct wave_outp_cat (bull) call for each buoy in buoy_lst.txt
+    sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_cat.sh \1 ${FHMAX_WAV_PNT} bull > ${escaped_CATOUTDIR}\/bull_cat_\1.out 2>\&1/" buoy_lst.txt >> cmdfile.buoy
   fi
 
   if [ ${CFP_MP:-"NO"} = "YES" ]; then
@@ -527,18 +531,18 @@ source "$HOMEgfs/ush/preamble.sh"
     ifile=0
     iline=1
     ifirst='yes'
-    nlines=$( wc -l cmdfile.bouy | awk '{print $1}' )
+    nlines=$( wc -l < cmdfile.buoy)
     while [ $iline -le $nlines ]; do
-      line=$( sed -n ''$iline'p' cmdfile.bouy )
+      line=$( sed -n ''$iline'p' cmdfile.buoy )
       if [ -z "$line" ]; then
         break
       else
         if [ "$ifirst" = 'yes' ]; then
-          echo "#!/bin/sh" > cmdfile.bouy.$nfile
-          echo "$nfile cmdfile.bouy.$nfile" >> cmdmprogbouy
-          chmod 744 cmdfile.bouy.$nfile
+          echo "#!/bin/sh" > cmdfile.buoy.$nfile
+          echo "$nfile cmdfile.buoy.$nfile" >> cmdmprogbuoy
+          chmod 744 cmdfile.buoy.$nfile
         fi
-        echo $line >> cmdfile.bouy.$nfile
+        echo $line >> cmdfile.buoy.$nfile
         nfile=$(( nfile + 1 ))
         if [ $nfile -eq $NTASKS ]; then
           nfile=0
@@ -549,7 +553,7 @@ source "$HOMEgfs/ush/preamble.sh"
     done
   fi
 
-  wavenproc=$(wc -l cmdfile.bouy | awk '{print $1}')
+  wavenproc=$(wc -l < cmdfile.buoy)
   wavenproc=$(echo $((${wavenproc}<${NTASKS}?${wavenproc}:${NTASKS})))
 
   set +x
@@ -562,9 +566,11 @@ source "$HOMEgfs/ush/preamble.sh"
   if [ "$wavenproc" -gt '1' ]
   then
     if [ ${CFP_MP:-"NO"} = "YES" ]; then
-      ${wavempexec} -n ${wavenproc} ${wave_mpmd} cmdmprogbouy
+      # shellcheck disable=SC2086
+      ${wavempexec} -n "${wavenproc}" ${wave_mpmd} cmdmprogbuoy
     else
-      ${wavempexec} ${wavenproc} ${wave_mpmd} cmdfile.bouy
+      # shellcheck disable=SC2086
+      ${wavempexec} "${wavenproc}" ${wave_mpmd} cmdfile.buoy
     fi
     exit=$?
   else
@@ -610,43 +616,43 @@ source "$HOMEgfs/ush/preamble.sh"
   if [ ${CFP_MP:-"NO"} = "YES" ] && [ "$DOBLL_WAV" = "YES" ]; then
     if [ "$DOBNDPNT_WAV" = YES ]; then
       if [ "$DOSPC_WAV" = YES ]; then
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG ibp $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibp $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
       fi
       if [ "$DOBLL_WAV" = YES ]; then
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG ibpbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibpbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG ibpcbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibpcbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
       fi
     else
       if [ "$DOSPC_WAV" = YES ]; then
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG spec $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG spec $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
       fi
       if [ "$DOBLL_WAV" = YES ]; then
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG bull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG bull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
-        echo "$nm $USHwave/wave_tar.sh $WAV_MOD_TAG cbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "$nm ${USHgfs}/wave_tar.sh $WAV_MOD_TAG cbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
         nm=$(( nm + 1 ))
       fi
     fi
   else
     if [ "$DOBNDPNT_WAV" = YES ]; then
       if [ "$DOSPC_WAV" = YES ]; then
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG ibp $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibp $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
       fi
       if [ "$DOBLL_WAV" = YES ]; then
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG ibpbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG ibpcbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibpbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG ibpcbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
       fi
     else
       if [ "$DOSPC_WAV" = YES ]; then
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG spec $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG spec $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
       fi
       if [ "$DOBLL_WAV" = YES ]; then
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG bull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
-        echo "$USHwave/wave_tar.sh $WAV_MOD_TAG cbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG bull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
+        echo "${USHgfs}/wave_tar.sh $WAV_MOD_TAG cbull $Nb > ${WAV_MOD_TAG}_spec_tar.out 2>&1 "   >> cmdtarfile
       fi
     fi
   fi
@@ -693,6 +699,6 @@ source "$HOMEgfs/ush/preamble.sh"
 # 4.  Ending output
 
 
-exit $exit_code
+exit "${exit_code}"
 
 # End of MWW3 point prostprocessor script ---------------------------------------- #
diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh
index 2e6cb2071b..5f5b2c531e 100755
--- a/scripts/exgfs_wave_prdgen_bulls.sh
+++ b/scripts/exgfs_wave_prdgen_bulls.sh
@@ -8,6 +8,10 @@
 # Remarks :                                                                   #
 # - Supplemental error output is witten to the gfswave_prdgbulls.log file.    #
 #                                                                             #
+# COM inputs:                                                                 #
+#  - ${COMIN_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar                      #
+# COM outputs:                                                                #
+#  - ${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}                         #
 #                                                                             #
 # Origination  : 05/02/2007                                                   #
 # Last update  : 08/20/2020                                                   # 
@@ -18,7 +22,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -29,12 +33,6 @@ source "$HOMEgfs/ush/preamble.sh"
  export cycle=${cycle:-t${cyc}z}
  export pgmout=OUTPUT.$$
  export DATA=${DATA:-${DATAROOT:?}/${job}.$$}
- #export CODEwave=${CODEwave:-${PACKAGEROOT}/${NET}_code.${wave_code_ver}/${code_pkg}}
- export EXECwave=${EXECwave:-$HOMEgfs/exec}
- export FIXwave=${FIXwave:-$HOMEgfs/fix}
- export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave}
- export USHwave=${USHwave:-$HOMEgfs/ush}
- #export EXECcode=${EXECcode:-CODEwave/exec}
 
  mkdir -p $DATA
  cd $DATA
@@ -58,11 +56,11 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 1.  Get necessary files
  set +x
- echo "   Copying bulletins from ${COM_WAVE_STATION}"
+ echo "   Copying bulletins from ${COMIN_WAVE_STATION}"
  set_trace
 
 # 1.a Link the input file and untar it
- BullIn="${COM_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar"
+ BullIn="${COMIN_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar"
  if [ -f $BullIn ]; then
    cp $BullIn cbull.tar
  else
@@ -117,8 +115,8 @@ source "$HOMEgfs/ush/preamble.sh"
   echo '   --------------------------'
   echo ' '
 # 1.c Get the datat cards
- if [ -f $PARMwave/bull_awips_gfswave ]; then
-   cp $PARMwave/bull_awips_gfswave awipsbull.data
+ if [ -f ${PARMgfs}/wave/bull_awips_gfswave ]; then
+   cp ${PARMgfs}/wave/bull_awips_gfswave awipsbull.data
  else
    msg="ABNORMAL EXIT: NO AWIPS BULLETIN HEADER DATA FILE"
    set +x
@@ -176,7 +174,7 @@ source "$HOMEgfs/ush/preamble.sh"
    set_trace
    
    formbul.pl -d "${headr}" -f "${fname}" -j "${job}" -m "${RUNwave}" \
-              -p "${COM_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1
+              -p "${COMOUT_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1
    OK=$?
 
    if [ "$OK" != '0' ] || [ ! -f $oname ]; then
@@ -202,15 +200,15 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 3. Send output files to the proper destination
 set_trace
-cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
+cp "awipsbull.${cycle}.${RUNwave}" "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
 if [ "$SENDDBN_NTC" = YES ]; then
     make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" "${DATA}/awipsbull.${cycle}.${RUNwave}" \
-		     "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
+		     "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
 else
     if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then
 	echo "Making NTC bulletin for parallel environment, but do not alert."
 	(export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \
-					     "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}")
+					     "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}")
     fi
 fi
 
diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh
index b0cbc124ce..9111c81273 100755
--- a/scripts/exgfs_wave_prdgen_gridded.sh
+++ b/scripts/exgfs_wave_prdgen_gridded.sh
@@ -8,6 +8,11 @@
 # Remarks :                                                                   #
 # - Supplemental error output is witten to the wave.log file.                 #
 #                                                                             #
+# COM inputs:                                                                 #
+#  - ${COMIN_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2            #
+#                                                                             #
+# COM outputs:                                                                #
+#  - ${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}             #
 #                                                                             #
 # Origination  : 05/02/2007                                                   #
 # Last update  : 10/08/2020                                                   # 
@@ -19,7 +24,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -31,9 +36,6 @@ source "$HOMEgfs/ush/preamble.sh"
  export FHOUT_WAV=${FHOUT_WAV:-6}         #from 72 to 180 inc=6
  export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3}
  export maxtries=720
- export FIXwave=${FIXwave:-$HOMEgfs/fix/wave}
- export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave}
- export USHwave=${USHwave:-$HOMEgfs/ush}
  export cyc=${cyc:-00}
  export cycle=${cycle:-t${cyc}z}
  export pgmout=OUTPUT.$$
@@ -99,31 +101,14 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m}
      #
 
      GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2"
-     GRIBIN_chk=$GRIBIN.idx
-
-     icnt=1
-     while [ $icnt -lt 1000 ]; do
-       if [ -r $GRIBIN_chk ] ; then
-         break
-       else
-         echo "Waiting for input file: $GRIBIN"
-         let "icnt=icnt+1"
-         sleep 5
-       fi
-       if [ $icnt -ge $maxtries ]; then
-         msg="ABNORMAL EXIT: NO GRIB FILE FOR GRID $GRIBIN"
-         echo ' '
-         echo '**************************** '
-         echo '*** ERROR : NO GRIB FILE *** '
-         echo '**************************** '
-         echo ' '
-         echo $msg
-         set_trace
-         echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog
-         err=1;export err;${errchk} || exit ${err}
-       fi
-     done
-
+     GRIBIN_chk="${GRIBIN}.idx"
+     sleep_interval=5
+     max_tries=1000
+     if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then
+       echo "FATAL ERROR: ${GRIBIN_chk} not found after waiting $((sleep_interval * ( max_tries - 1))) secs"
+       echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog
+       err=1;export err;${errchk} || exit ${err}
+     fi
      GRIBOUT=$RUNwave.$cycle.$grdID.f${fhr}.clipped.grib2
 
      iparam=1
@@ -154,13 +139,13 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m}
      GRIBIN=$RUNwave.$cycle.$grdID.f${fhr}.clipped.grib2
      GRIBIN_chk=$GRIBIN.idx
 
-     ln -s $GRIBIN gribfile.$grdID.f${fhr}
+     ${NLN} $GRIBIN gribfile.$grdID.f${fhr}
 
      #
 # 1.d Input template files
-     parmfile=$PARMwave/grib2_${RUNwave}.$grdOut.f${fhr}
+     parmfile=${PARMgfs}/wave/grib2_${RUNwave}.$grdOut.f${fhr}
      if [ -f $parmfile ]; then
-       ln -s $parmfile awipsgrb.$grdID.f${fhr}
+       ${NLN} $parmfile awipsgrb.$grdID.f${fhr}
      else
        echo '*** ERROR : NO template  grib2_${RUNwave}.$grdID.f${fhr} *** '
        echo "$RUNwave $grdID $fhr prdgen $date $cycle : GRIB template file missing." >> $wavelog
@@ -235,16 +220,16 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m}
      #set_trace
      #set +x
      echo "      Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}"
-     echo "          in ${COM_WAVE_WMO}"
+     echo "          in ${COMOUT_WAVE_WMO}"
      #set_trace
-     cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
+     cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
      #set +x
 
 
      if [ "$SENDDBN" = 'YES' ]
      then
        echo "      Sending $AWIPSGRB.$grdID.f${fhr} to DBRUN."
-       "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
+       "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
      fi
      rm -f $AWIPSGRB.$grdID.f${fhr} tocgrib2.out
    done # For grids
diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh
index be006c1c85..f83ead2c22 100755
--- a/scripts/exgfs_wave_prep.sh
+++ b/scripts/exgfs_wave_prep.sh
@@ -17,13 +17,20 @@
 # Remarks :                                                                   #
 # - For non-fatal errors output is witten to the wave.log file.               #
 #                                                                             #
+# COM inputs:                                                                 #
+#  - ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}                           #
+#  - ${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f#HHH_prog.nc        #
+#                                                                             #
+# COM outputs:                                                                #
+#  - ${COMOUT_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur                 #
+#                                                                             #
 #  Update record :                                                            #
 #                                                                             #
 # - Origination:                                               01-Mar-2007    #
 #                                                                             #
 # Update log                                                                  #
 # Mar2007 HTolman - Added NCO note on resources on mist/dew                   #
-# Apr2007 HTolman - Renaming mod_def files in $FIX_wave.                      #
+# Apr2007 HTolman - Renaming mod_def files in ${FIXgfs}/wave.                 #
 # Mar2011 AChawla - Migrating to a vertical structure                         #
 # Nov2012 JHAlves - Transitioning to WCOSS                                    #
 # Apr2019 JHAlves - Transitioning to GEFS workflow                            #
@@ -40,7 +47,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -162,12 +169,12 @@ source "$HOMEgfs/ush/preamble.sh"
 
   for grdID in $grdINP $waveGRD
   do
-    if [ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]
+    if [ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]
     then
       set +x
-      echo " Mod def file for $grdID found in ${COM_WAVE_PREP}. copying ...."
+      echo " Mod def file for $grdID found in ${COMIN_WAVE_PREP}. copying ...."
       set_trace
-      cp ${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID
+      cp ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID
 
     else
       set +x
@@ -207,16 +214,16 @@ source "$HOMEgfs/ush/preamble.sh"
               ;;
      esac 
 
-     if [ -f $PARMwave/ww3_prnc.${type}.$grdID.inp.tmpl ]
+     if [ -f ${PARMgfs}/wave/ww3_prnc.${type}.$grdID.inp.tmpl ]
      then
-       cp $PARMwave/ww3_prnc.${type}.$grdID.inp.tmpl .
+       cp ${PARMgfs}/wave/ww3_prnc.${type}.$grdID.inp.tmpl .
      fi
 
      if [ -f ww3_prnc.${type}.$grdID.inp.tmpl ]
      then
        set +x
        echo ' '
-       echo "   ww3_prnc.${type}.$grdID.inp.tmpl copied ($PARMwave)."
+       echo "   ww3_prnc.${type}.$grdID.inp.tmpl copied (${PARMgfs}/wave)."
        echo ' '
        set_trace
      else
@@ -247,7 +254,7 @@ source "$HOMEgfs/ush/preamble.sh"
     if [ "${RUNMEM}" = "-1" ] || [ "${WW3ICEIENS}" = "T" ] || [ "$waveMEMB" = "00" ]
     then
 
-      $USHwave/wave_prnc_ice.sh > wave_prnc_ice.out 
+      ${USHgfs}/wave_prnc_ice.sh > wave_prnc_ice.out
       ERR=$?
     
       if [ -d ice ]
@@ -322,19 +329,19 @@ source "$HOMEgfs/ush/preamble.sh"
       ymdh_rtofs=$ymdh_beg
 
       if [  "$FHMAX_WAV_CUR" -le 72 ]; then 
-        rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc"
-        rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc"
-        rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc"
+        rtofsfile1="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc"
+        rtofsfile2="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc"
+        rtofsfile3="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc"
         if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ]; then 
            #Needed current files are not available, so use RTOFS from previous day 
            export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8)
         fi 
       else
-        rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc"
-        rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc"
-        rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc"
-        rtofsfile4="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc"
-        rtofsfile5="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc"
+        rtofsfile1="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc"
+        rtofsfile2="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc"
+        rtofsfile3="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc"
+        rtofsfile4="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc"
+        rtofsfile5="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc"
         if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ] ||
             [ ! -f $rtofsfile4 ] || [ ! -f $rtofsfile5 ]; then
             #Needed current files are not available, so use RTOFS from previous day 
@@ -360,8 +367,8 @@ source "$HOMEgfs/ush/preamble.sh"
         fhr_rtofs=$(${NHOUR} ${ymdh_rtofs} ${RPDY}00)
         fh3_rtofs=$(printf "%03d" "${fhr_rtofs#0}")
 
-        curfile1h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
-        curfile3h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
+        curfile1h=${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
+        curfile3h=${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
 
         if [ -s ${curfile1h} ]  && [ "${FLGHF}" = "T" ] ; then
           curfile=${curfile1h}
@@ -389,10 +396,10 @@ source "$HOMEgfs/ush/preamble.sh"
         fi
 
         if [ ${CFP_MP:-"NO"} = "YES" ]; then
-          echo "$nm $USHwave/wave_prnc_cur.sh $ymdh_rtofs $curfile $fhr_rtofs $FLGFIRST > cur_$ymdh_rtofs.out 2>&1" >> cmdfile
+          echo "$nm ${USHgfs}/wave_prnc_cur.sh $ymdh_rtofs $curfile $fhr_rtofs $FLGFIRST > cur_$ymdh_rtofs.out 2>&1" >> cmdfile
           nm=$(expr $nm + 1)
         else
-          echo "$USHwave/wave_prnc_cur.sh $ymdh_rtofs $curfile $fhr_rtofs $FLGFIRST > cur_$ymdh_rtofs.out 2>&1" >> cmdfile
+          echo "${USHgfs}/wave_prnc_cur.sh $ymdh_rtofs $curfile $fhr_rtofs $FLGFIRST > cur_$ymdh_rtofs.out 2>&1" >> cmdfile
         fi
 
         if [ "${FLGFIRST}" = "T" ] ; then
@@ -465,7 +472,7 @@ source "$HOMEgfs/ush/preamble.sh"
         cat $file >> cur.${WAVECUR_FID}
       done
 
-      cp -f cur.${WAVECUR_FID} ${COM_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur 
+      cp -f cur.${WAVECUR_FID} ${COMOUT_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur 
 
     else
       echo ' '
diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py
new file mode 100755
index 0000000000..ec8154317f
--- /dev/null
+++ b/scripts/exglobal_archive.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+
+import os
+
+from pygfs.task.archive import Archive
+from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, chdir, logit
+
+# initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+@logit(logger)
+def main():
+
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the Archive object
+    archive = Archive(config)
+
+    # Pull out all the configuration keys needed to run the rest of archive steps
+    keys = ['ATARDIR', 'current_cycle', 'FHMIN', 'FHMAX', 'FHOUT', 'RUN', 'PDY',
+            'DO_VERFRAD', 'DO_VMINMON', 'DO_VERFOZN', 'DO_ICE', 'DO_AERO', 'DO_PREP_OBS_AERO',
+            'PARMgfs', 'DO_OCN', 'DO_WAVE', 'WRITE_DOPOST', 'PSLOT', 'HPSSARCH', 'DO_MOS',
+            'DO_JEDISNOWDA', 'LOCALARCH', 'REALTIME', 'ROTDIR', 'ARCH_WARMICFREQ',
+            'ARCH_FCSTICFREQ', 'ARCH_CYC', 'assim_freq', 'ARCDIR', 'SDATE',
+            'FHMIN_GFS', 'FHMAX_GFS', 'FHOUT_GFS', 'ARCH_GAUSSIAN', 'MODE',
+            'FHOUT_OCN', 'FHOUT_ICE', 'FHOUT_OCN_GFS', 'FHOUT_ICE_GFS', 'DO_BUFRSND', 'DOHYBVAR',
+            'ARCH_GAUSSIAN_FHMAX', 'ARCH_GAUSSIAN_FHINC', 'ARCH_GAUSSIAN_FHINC',
+            'DOIAU', 'OCNRES', 'ICERES', 'NUM_SND_COLLECTIVES', 'FHOUT_WAV',
+            'FHOUT_HF_WAV', 'FHMAX_WAV', 'FHMAX_HF_WAV', 'FHMAX_WAV_GFS',
+            'restart_interval_gdas', 'restart_interval_gfs',
+            'AERO_ANL_RUN', 'AERO_FCST_RUN', 'DOIBP_WAV', 'DO_JEDIOCNVAR',
+            'NMEM_ENS', 'DO_JEDIATMVAR', 'DO_VRFY_OCEANDA', 'FHMAX_FITS',
+            'IAUFHRS', 'DO_FIT2OBS']
+
+    archive_dict = AttrDict()
+    for key in keys:
+        archive_dict[key] = archive.task_config[key]
+
+    # Also import all COMIN* and COMOUT* directory and template variables
+    for key in archive.task_config.keys():
+        if key.startswith("COMIN_") or key.startswith("COMOUT_"):
+            archive_dict[key] = archive.task_config[key]
+
+    cwd = os.getcwd()
+
+    os.chdir(config.ROTDIR)
+
+    # Determine which archives to create
+    arcdir_set, atardir_sets = archive.configure(archive_dict)
+
+    # Populate the product archive (ARCDIR)
+    archive.execute_store_products(arcdir_set)
+
+    # Create the backup tarballs and store in ATARDIR
+    for atardir_set in atardir_sets:
+        archive.execute_backup_dataset(atardir_set)
+
+    os.chdir(cwd)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh
deleted file mode 120000
index 5c0d685ab0..0000000000
--- a/scripts/exglobal_archive.sh
+++ /dev/null
@@ -1 +0,0 @@
-exglobal_archive_gsl.sh
\ No newline at end of file
diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py
new file mode 100755
index 0000000000..66f6796343
--- /dev/null
+++ b/scripts/exglobal_atm_analysis_fv3_increment.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# exglobal_atm_analysis_fv3_increment.py
+# This script creates an AtmAnalysis object
+# and runs the init_fv3_increment and fv3_increment methods
+# which convert the JEDI increment into an FV3 increment
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs.task.atm_analysis import AtmAnalysis
+
+# Initialize root logger
+logger = Logger(level='DEBUG', colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the atm analysis task
+    AtmAnl = AtmAnalysis(config)
+    AtmAnl.init_fv3_increment()
+    AtmAnl.fv3_increment()
diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_variational.py
similarity index 89%
rename from scripts/exglobal_atm_analysis_run.py
rename to scripts/exglobal_atm_analysis_variational.py
index 8adbe4a267..07bc208331 100755
--- a/scripts/exglobal_atm_analysis_run.py
+++ b/scripts/exglobal_atm_analysis_variational.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# exglobal_atm_analysis_run.py
+# exglobal_atm_analysis_variational.py
 # This script creates an AtmAnalysis object
 # and runs the execute method
 # which executes the global atm variational analysis
@@ -19,4 +19,4 @@
 
     # Instantiate the atm analysis task
     AtmAnl = AtmAnalysis(config)
-    AtmAnl.execute()
+    AtmAnl.variational()
diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py
new file mode 100755
index 0000000000..c50b00548f
--- /dev/null
+++ b/scripts/exglobal_atmens_analysis_fv3_increment.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# exglobal_atmens_analysis_fv3_increment.py
+# This script creates an AtmEnsAnalysis object
+# and runs the init_fv3_increment and fv3_increment methods
+# which convert the JEDI increment into an FV3 increment
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs.task.atmens_analysis import AtmEnsAnalysis
+
+# Initialize root logger
+logger = Logger(level='DEBUG', colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the atmens analysis task
+    AtmEnsAnl = AtmEnsAnalysis(config)
+    AtmEnsAnl.init_fv3_increment()
+    AtmEnsAnl.fv3_increment()
diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_letkf.py
similarity index 86%
rename from scripts/exglobal_atmens_analysis_run.py
rename to scripts/exglobal_atmens_analysis_letkf.py
index b2eb9fb2e4..30394537cd 100755
--- a/scripts/exglobal_atmens_analysis_run.py
+++ b/scripts/exglobal_atmens_analysis_letkf.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python3
-# exglobal_atmens_analysis_run.py
+# exglobal_atmens_analysis_letkf.py
 # This script creates an AtmEnsAnalysis object
-# and runs the execute method
+# and runs the letkf method
 # which executes the global atm local ensemble analysis
 import os
 
@@ -19,4 +19,4 @@
 
     # Instantiate the atmens analysis task
     AtmEnsAnl = AtmEnsAnalysis(config)
-    AtmEnsAnl.execute()
+    AtmEnsAnl.letkf()
diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh
index cb3c6467a1..932adf8e10 100755
--- a/scripts/exglobal_atmos_analysis.sh
+++ b/scripts/exglobal_atmos_analysis.sh
@@ -19,14 +19,14 @@
 
 #  Set environment.
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #  Directories.
 pwd=$(pwd)
 
 # Base variables
 CDATE=${CDATE:-"2001010100"}
-CDUMP=${CDUMP:-"gdas"}
+rCDUMP=${rCDUMP:-"gdas"}
 GDUMP=${GDUMP:-"gdas"}
 
 # Derived base variables
@@ -38,11 +38,8 @@ bPDY=$(echo ${BDATE} | cut -c1-8)
 bcyc=$(echo ${BDATE} | cut -c9-10)
 
 # Utilities
-export NCP=${NCP:-"/bin/cp"}
-export NMV=${NMV:-"/bin/mv"}
-export NLN=${NLN:-"/bin/ln -sf"}
 export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"}
-export NCLEN=${NCLEN:-${HOMEgfs}/ush/getncdimlen}
+export NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 COMPRESS=${COMPRESS:-gzip}
 UNCOMPRESS=${UNCOMPRESS:-gunzip}
 APRUNCFP=${APRUNCFP:-""}
@@ -68,19 +65,19 @@ DOIAU=${DOIAU:-"NO"}
 export IAUFHRS=${IAUFHRS:-"6"}
 
 # Dependent Scripts and Executables
-GSIEXEC=${GSIEXEC:-${HOMEgfs}/exec/gsi.x}
+GSIEXEC=${GSIEXEC:-${EXECgfs}/gsi.x}
 export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1}
 export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}}
 export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}}
 export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}}
-export CALCINCEXEC=${CALCINCEXEC:-${HOMEgfs}/exec/calc_increment_ens.x}
-export CALCINCNCEXEC=${CALCINCNCEXEC:-${HOMEgfs}/exec/calc_increment_ens_ncio.x}
-export CALCANLEXEC=${CALCANLEXEC:-${HOMEgfs}/exec/calc_analysis.x}
-export CHGRESNCEXEC=${CHGRESNCEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter_nc.x}
-export CHGRESINCEXEC=${CHGRESINCEXEC:-${HOMEgfs}/exec/interp_inc.x}
-CHGRESEXEC=${CHGRESEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter.x}
+export CALCINCEXEC=${CALCINCEXEC:-${EXECgfs}/calc_increment_ens.x}
+export CALCINCNCEXEC=${CALCINCNCEXEC:-${EXECgfs}/calc_increment_ens_ncio.x}
+export CALCANLEXEC=${CALCANLEXEC:-${EXECgfs}/calc_analysis.x}
+export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECgfs}/enkf_chgres_recenter_nc.x}
+export CHGRESINCEXEC=${CHGRESINCEXEC:-${EXECgfs}/interp_inc.x}
+CHGRESEXEC=${CHGRESEXEC:-${EXECgfs}/enkf_chgres_recenter.x}
 export NTHREADS_CHGRES=${NTHREADS_CHGRES:-24}
-CALCINCPY=${CALCINCPY:-${HOMEgfs}/ush/calcinc_gfs.py}
+CALCINCPY=${CALCINCPY:-${USHgfs}/calcinc_gfs.py}
 
 # OPS flags
 RUN=${RUN:-""}
@@ -89,6 +86,8 @@ SENDDBN=${SENDDBN:-"NO"}
 RUN_GETGES=${RUN_GETGES:-"NO"}
 GETGESSH=${GETGESSH:-"getges.sh"}
 export gesenvir=${gesenvir:-${envir}}
+ 
+export hofx_2m_sfcfile=${hofx_2m_sfcfile:-".false."}
 
 # Observations
 OPREFIX=${OPREFIX:-""}
@@ -101,10 +100,10 @@ OSCATBF=${OSCATBF:-${COM_OBS}/${OPREFIX}oscatw.tm00.bufr_d${OSUFFIX}}
 RAPIDSCATBF=${RAPIDSCATBF:-${COM_OBS}/${OPREFIX}rapidscatw.tm00.bufr_d${OSUFFIX}}
 GSNDBF=${GSNDBF:-${COM_OBS}/${OPREFIX}goesnd.tm00.bufr_d${OSUFFIX}}
 GSNDBF1=${GSNDBF1:-${COM_OBS}/${OPREFIX}goesfv.tm00.bufr_d${OSUFFIX}}
-B1HRS2=${B1HRS2:-${COM_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}}
+#B1HRS2=${B1HRS2:-${COM_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}} # HIRS temporarily disabled due to CRTM versioning issues
 B1MSU=${B1MSU:-${COM_OBS}/${OPREFIX}1bmsu.tm00.bufr_d${OSUFFIX}}
-B1HRS3=${B1HRS3:-${COM_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}}
-B1HRS4=${B1HRS4:-${COM_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}}
+#B1HRS3=${B1HRS3:-${COM_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}} # HIRS temporarily disabled due to CRTM versioning issues
+#B1HRS4=${B1HRS4:-${COM_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}} # HIRS temporarily disabled due to CRTM versioning issues
 B1AMUA=${B1AMUA:-${COM_OBS}/${OPREFIX}1bamua.tm00.bufr_d${OSUFFIX}}
 B1AMUB=${B1AMUB:-${COM_OBS}/${OPREFIX}1bamub.tm00.bufr_d${OSUFFIX}}
 B1MHS=${B1MHS:-${COM_OBS}/${OPREFIX}1bmhs.tm00.bufr_d${OSUFFIX}}
@@ -289,21 +288,21 @@ else
 fi
 
 # GSI Fix files
-BERROR=${BERROR:-${FIXgsi}/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77}
-SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt}
-SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt}
-RADCLOUDINFO=${RADCLOUDINFO:-${FIXgsi}/cloudy_radiance_info.txt}
-ATMSFILTER=${ATMSFILTER:-${FIXgsi}/atms_beamwidth.txt}
-ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt}
-CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt}
-vqcdat=${vqcdat:-${FIXgsi}/vqctp001.dat}
-INSITUINFO=${INSITUINFO:-${FIXgsi}/global_insituinfo.txt}
-OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt}
-PCPINFO=${PCPINFO:-${FIXgsi}/global_pcpinfo.txt}
-AEROINFO=${AEROINFO:-${FIXgsi}/global_aeroinfo.txt}
-SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt}
-HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS}.txt}
-OBERROR=${OBERROR:-${FIXgsi}/prepobs_errtable.global}
+BERROR=${BERROR:-${FIXgfs}/gsi/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77}
+SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt}
+SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt}
+RADCLOUDINFO=${RADCLOUDINFO:-${FIXgfs}/gsi/cloudy_radiance_info.txt}
+ATMSFILTER=${ATMSFILTER:-${FIXgfs}/gsi/atms_beamwidth.txt}
+ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS}.txt}
+CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt}
+vqcdat=${vqcdat:-${FIXgfs}/gsi/vqctp001.dat}
+INSITUINFO=${INSITUINFO:-${FIXgfs}/gsi/global_insituinfo.txt}
+OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt}
+PCPINFO=${PCPINFO:-${FIXgfs}/gsi/global_pcpinfo.txt}
+AEROINFO=${AEROINFO:-${FIXgfs}/gsi/global_aeroinfo.txt}
+SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt}
+HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS}.txt}
+OBERROR=${OBERROR:-${FIXgfs}/gsi/prepobs_errtable.global}
 
 # GSI namelist
 SETUP=${SETUP:-""}
@@ -378,11 +377,20 @@ ${NLN} ${SCANINFO}     scaninfo
 ${NLN} ${HYBENSINFO}   hybens_info
 ${NLN} ${OBERROR}      errtable
 
+${NLN} ${FIXgfs}/gsi/AIRS_CLDDET.NL   AIRS_CLDDET.NL
+${NLN} ${FIXgfs}/gsi/CRIS_CLDDET.NL   CRIS_CLDDET.NL
+${NLN} ${FIXgfs}/gsi/IASI_CLDDET.NL   IASI_CLDDET.NL
+
 #If using correlated error, link to the covariance files
 if [ ${USE_CORRELATED_OBERRS} == "YES" ];  then
   if grep -q "Rcov" ${ANAVINFO} ;  then
-     if ls ${FIXgsi}/Rcov* 1> /dev/null 2>&1; then
-       ${NLN} ${FIXgsi}/Rcov* ${DATA}
+     # shellcheck disable=SC2312
+     mapfile -t covfile_array < <(find "${FIXgfs}/gsi/" -name "Rcov*")
+     if (( ${#covfile_array[@]} > 0 )); then
+       for covfile in "${covfile_array[@]}"; do
+         covfile_base=$(basename "${covfile}")
+         ${NLN} "${covfile}" "${DATA}/${covfile_base}"
+       done
        echo "using correlated obs error"
      else
        echo "FATAL ERROR: Satellite error covariance files (Rcov) are missing."
@@ -422,7 +430,18 @@ ${NLN} ${CRTM_FIX}/NPOESS.VISsnow.EmisCoeff.bin  ./crtm_coeffs/NPOESS.VISsnow.Em
 ${NLN} ${CRTM_FIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin
 ${NLN} ${CRTM_FIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin
 ${NLN} ${CRTM_FIX}/AerosolCoeff.bin              ./crtm_coeffs/AerosolCoeff.bin
-${NLN} ${CRTM_FIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin
+if (( imp_physics == 8 )); then
+   echo "using CRTM Thompson cloud optical table"
+   ${NLN} "${CRTM_FIX}/CloudCoeff.Thompson08.-109z-1.bin" ./crtm_coeffs/CloudCoeff.bin
+elif (( imp_physics == 11 )); then
+   echo "using CRTM GFDL cloud optical table"
+   ${NLN} "${CRTM_FIX}/CloudCoeff.GFDLFV3.-109z-1.bin" ./crtm_coeffs/CloudCoeff.bin
+else
+   echo "INVALID imp_physics = ${imp_physics}"
+   echo "FATAL ERROR: No valid CRTM cloud optical table found for imp_physics =  ${imp_physics}"
+   exit 1
+fi
+
 
 ##############################################################
 # Observational data
@@ -526,7 +545,7 @@ if [ ${DOHYBVAR} = "YES" ]; then
 
    for imem in $(seq 1 ${NMEM_ENS}); do
       memchar="mem$(printf %03i "${imem}")"
-      MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com COM_ATMOS_HISTORY
+      MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ATMOS_HISTORY
 
       for fhr in ${fhrs}; do
          ${NLN} ${COM_ATMOS_HISTORY}/${GPREFIX_ENS}atmf0${fhr}${ENKF_SUFFIX}.nc ./ensemble_data/sigf${fhr}_ens_${memchar}
@@ -562,8 +581,8 @@ if [ ${GENDIAG} = "YES" ] ; then
       if [ -d ${DIAG_DIR} ]; then
          rm -rf ${DIAG_DIR}
       fi
-      npe_m1="$((${npe_gsi}-1))"
-      for pe in $(seq 0 ${npe_m1}); do
+      ntasks_m1="$((ntasks-1))"
+      for pe in $(seq 0 ${ntasks_m1}); do
         pedir="dir."$(printf %04i ${pe})
         mkdir -p ${DIAG_DIR}/${pedir}
         ${NLN} ${DIAG_DIR}/${pedir} ${pedir}
@@ -656,7 +675,7 @@ EOFunzip
       chmod 755 ${DATA}/mp_unzip.sh
       ncmd=$(cat ${DATA}/mp_unzip.sh | wc -l)
       if [ ${ncmd} -gt 0 ]; then
-         ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max))
+         ncmd_max=$((ncmd < max_tasks_per_node ? ncmd : max_tasks_per_node))
          APRUNCFP_UNZIP=$(eval echo ${APRUNCFP})
          ${APRUNCFP_UNZIP} ${DATA}/mp_unzip.sh
          export err=$?; err_chk
@@ -743,11 +762,13 @@ cat > gsiparm.anl << EOF
   dfact=0.75,dfact1=3.0,noiqc=.true.,oberrflg=.false.,c_varqc=0.02,
   use_poq7=.true.,qc_noirjaco3_pole=.true.,vqc=.false.,nvqc=.true.,
   aircraft_t_bc=.true.,biaspredt=1.0e5,upd_aircraft=.true.,cleanup_tail=.true.,
-  tcp_width=70.0,tcp_ermax=7.35,
+  tcp_width=70.0,tcp_ermax=7.35,airs_cads=${AIRS_CADS},cris_cads=${CRIS_CADS},
+  iasi_cads=${IASI_CADS},
   ${OBSQC}
 /
 &OBS_INPUT
   dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=50.0,time_window_max=3.0,
+  hofx_2m_sfcfile=${hofx_2m_sfcfile},
   ${OBSINPUT}
 /
 OBS_INPUT::
@@ -821,9 +842,11 @@ OBS_INPUT::
    gomebufr       gome        metop-b     gome_metop-b        0.0     2     0
    atmsbufr       atms        npp         atms_npp            0.0     1     1
    atmsbufr       atms        n20         atms_n20            0.0     1     1
+   atmsbufr       atms        n21         atms_n21            0.0     1     1
    crisbufr       cris        npp         cris_npp            0.0     1     0
    crisfsbufr     cris-fsr    npp         cris-fsr_npp        0.0     1     0
    crisfsbufr     cris-fsr    n20         cris-fsr_n20        0.0     1     0
+   crisfsbufr     cris-fsr    n21         cris-fsr_n21        0.0     1     0
    gsnd1bufr      sndrd1      g14         sndrD1_g14          0.0     1     0
    gsnd1bufr      sndrd2      g14         sndrD2_g14          0.0     1     0
    gsnd1bufr      sndrd3      g14         sndrD3_g14          0.0     1     0
@@ -845,6 +868,7 @@ OBS_INPUT::
    ahibufr        ahi         himawari8   ahi_himawari8       0.0     1     0
    abibufr        abi         g16         abi_g16             0.0     1     0
    abibufr        abi         g17         abi_g17             0.0     1     0
+   abibufr        abi         g18         abi_g18             0.0     1     0
    rapidscatbufr  uv          null        uv                  0.0     0     0
    ompsnpbufr     ompsnp      npp         ompsnp_npp          0.0     0     0
    ompslpbufr     ompslp      npp         ompslp_npp          0.0     0     0
@@ -872,7 +896,9 @@ OBS_INPUT::
   l_hyb_ens=${l_hyb_ens},
   generate_ens=.false.,
   beta_s0=0.125,readin_beta=.false.,
-  s_ens_h=800.,s_ens_v=-0.8,readin_localization=.true.,
+  s_ens_h=1000.0,300.0,150.0,685.0,219.2,s_ens_v=-0.5,-0.5,-0.5,0.0,0.0,
+  readin_localization=.false.,global_spectral_filter_sd=.false.,
+  r_ensloccov4scl=1,nsclgrp=3,naensloc=5,
   aniso_a_en=.false.,oz_univ_static=.false.,uv_hyb_ens=.true.,
   ensemble_path='./ensemble_data/',
   ens_fast_read=.true.,
@@ -964,7 +990,7 @@ cd ${pwd}
 if [ ${SENDECF} = "YES" -a "${RUN}" != "enkf" ]; then
    ecflow_client --event release_fcst
 fi
-echo "${CDUMP} ${CDATE} atminc done at $(date)" > ${COM_ATMOS_ANALYSIS}/${APREFIX}loginc.txt
+echo "${rCDUMP} ${CDATE} atminc done at $(date)" > ${COM_ATMOS_ANALYSIS}/${APREFIX}loginc.txt
 
 ################################################################################
 
diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh
index a2086aa927..423e800879 100755
--- a/scripts/exglobal_atmos_analysis_calc.sh
+++ b/scripts/exglobal_atmos_analysis_calc.sh
@@ -19,22 +19,18 @@
 
 #  Set environment.
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #  Directories.
 pwd=$(pwd)
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
 
 # Base variables
-CDUMP=${CDUMP:-"gdas"}
+rCDUMP=${rCDUMP:-"gdas"}
 GDUMP=${GDUMP:-"gdas"}
 
 # Utilities
-export NCP=${NCP:-"/bin/cp"}
-export NMV=${NMV:-"/bin/mv"}
-export NLN=${NLN:-"/bin/ln -sf"}
 export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"}
-export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+export NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 COMPRESS=${COMPRESS:-gzip}
 UNCOMPRESS=${UNCOMPRESS:-gunzip}
 APRUNCFP=${APRUNCFP:-""}
@@ -53,16 +49,16 @@ export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}}
 export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}}
 export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}}
 
-export CALCANLEXEC=${CALCANLEXEC:-$HOMEgfs/exec/calc_analysis.x}
-export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x}
-export CHGRESINCEXEC=${CHGRESINCEXEC:-$HOMEgfs/exec/interp_inc.x}
+export CALCANLEXEC=${CALCANLEXEC:-${EXECgfs}/calc_analysis.x}
+export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECgfs}/enkf_chgres_recenter_nc.x}
+export CHGRESINCEXEC=${CHGRESINCEXEC:-${EXECgfs}/interp_inc.x}
 export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1}
-CALCINCPY=${CALCINCPY:-$HOMEgfs/ush/calcinc_gfs.py}
-CALCANLPY=${CALCANLPY:-$HOMEgfs/ush/calcanl_gfs.py}
+CALCINCPY=${CALCINCPY:-${USHgfs}/calcinc_gfs.py}
+CALCANLPY=${CALCANLPY:-${USHgfs}/calcanl_gfs.py}
 
 DOGAUSFCANL=${DOGAUSFCANL-"NO"}
-GAUSFCANLSH=${GAUSFCANLSH:-$HOMEgfs/ush/gaussian_sfcanl.sh}
-export GAUSFCANLEXE=${GAUSFCANLEXE:-$HOMEgfs/exec/gaussian_sfcanl.x}
+GAUSFCANLSH=${GAUSFCANLSH:-${USHgfs}/gaussian_sfcanl.sh}
+export GAUSFCANLEXE=${GAUSFCANLEXE:-${EXECgfs}/gaussian_sfcanl.x}
 NTHREADS_GAUSFCANL=${NTHREADS_GAUSFCANL:-1}
 APRUN_GAUSFCANL=${APRUN_GAUSFCANL:-${APRUN:-""}}
 
@@ -185,7 +181,7 @@ if [ $DOGAUSFCANL = "YES" ]; then
     export err=$?; err_chk
 fi
 
-echo "${CDUMP} ${PDY}${cyc} atmanl and sfcanl done at $(date)" > "${COM_ATMOS_ANALYSIS}/${APREFIX}loganl.txt"
+echo "${rCDUMP} ${PDY}${cyc} atmanl and sfcanl done at $(date)" > "${COM_ATMOS_ANALYSIS}/${APREFIX}loganl.txt"
 
 ################################################################################
 # Postprocessing
diff --git a/scripts/exglobal_atmos_ensstat.sh b/scripts/exglobal_atmos_ensstat.sh
new file mode 100755
index 0000000000..7ad0ecfa30
--- /dev/null
+++ b/scripts/exglobal_atmos_ensstat.sh
@@ -0,0 +1,19 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+fhr3=$(printf "%03d" "${FORECAST_HOUR}")
+
+if [[ -a mpmd_script ]]; then rm -Rf mpmd_script; fi
+
+{
+    for grid in '0p25' '0p50' '1p00'; do
+        echo "${USHgfs}/atmos_ensstat.sh ${grid} ${fhr3}"
+        # echo "${USHgfs}/atmos_ensstat.sh ${grid} ${fhr3} b"
+    done
+} > mpmd_script
+
+"${USHgfs}/run_mpmd.sh" mpmd_script
+err=$?
+
+exit "${err}"
diff --git a/scripts/exglobal_atmos_pmgr.sh b/scripts/exglobal_atmos_pmgr.sh
index 86afed962e..7f348474b6 100755
--- a/scripts/exglobal_atmos_pmgr.sh
+++ b/scripts/exglobal_atmos_pmgr.sh
@@ -6,7 +6,7 @@
 #  This script monitors the progress of the gfs_fcst job
 #
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 hour=00
 
diff --git a/scripts/exglobal_atmos_products.sh b/scripts/exglobal_atmos_products.sh
index 5f0b1db6cf..51e1a108bb 100755
--- a/scripts/exglobal_atmos_products.sh
+++ b/scripts/exglobal_atmos_products.sh
@@ -1,17 +1,21 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Programs used
 export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2}
 
 # Scripts used
-INTERP_ATMOS_MASTERSH=${INTERP_ATMOS_MASTERSH:-"${HOMEgfs}/ush/interp_atmos_master.sh"}
-INTERP_ATMOS_SFLUXSH=${INTERP_ATMOS_SFLUXSH:-"${HOMEgfs}/ush/interp_atmos_sflux.sh"}
+INTERP_ATMOS_MASTERSH=${INTERP_ATMOS_MASTERSH:-"${USHgfs}/interp_atmos_master.sh"}
+INTERP_ATMOS_SFLUXSH=${INTERP_ATMOS_SFLUXSH:-"${USHgfs}/interp_atmos_sflux.sh"}
 
 # Variables used in this job
 downset=${downset:-1}  # No. of groups of pressure grib2 products to create
-npe_atmos_products=${npe_atmos_products:-8}  # no. of processors available to process each group
+ntasks_atmos_products=${ntasks_atmos_products:-8}  # no. of processors available to process each group
+
+# WGNE related options
+WGNE=${WGNE:-NO}  # Create WGNE products
+FHMAX_WGNE=${FHMAX_WGNE:-0}  # WGNE products are created for first FHMAX_WGNE forecast hours (except 0)
 
 cd "${DATA}" || exit 1
 
@@ -68,7 +72,7 @@ for (( nset=1 ; nset <= downset ; nset++ )); do
   echo "Begin processing nset = ${nset}"
 
   # Number of processors available to process $nset
-  nproc=${npe_atmos_products}
+  nproc=${ntasks}
 
   # Each set represents a group of files
   if (( nset == 1 )); then
@@ -129,7 +133,7 @@ for (( nset=1 ; nset <= downset ; nset++ )); do
 
   # Run with MPMD or serial
   if [[ "${USE_CFP:-}" = "YES" ]]; then
-    OMP_NUM_THREADS=1 "${HOMEgfs}/ush/run_mpmd.sh" "${DATA}/poescript"
+    OMP_NUM_THREADS=1 "${USHgfs}/run_mpmd.sh" "${DATA}/poescript"
     export err=$?
   else
     chmod 755 "${DATA}/poescript"
@@ -167,14 +171,18 @@ done  # for (( nset=1 ; nset <= downset ; nset++ ))
 
 #---------------------------------------------------------------
 
+# Create the index file for the sflux master, if it exists.
+FLUX_FILE="${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2"
+if [[ -s "${FLUX_FILE}" ]]; then
+  ${WGRIB2} -s "${FLUX_FILE}" > "${FLUX_FILE}.idx"
+fi
+
 # Section creating slfux grib2 interpolated products
 # Create 1-degree sflux grib2 output
 # move to COM and index it
 if [[ "${FLXGF:-}" == "YES" ]]; then
 
   # Files needed by ${INTERP_ATMOS_SFLUXSH}
-  FLUX_FILE="${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2"
-
   input_file="${FLUX_FILE}"
   output_file_prefix="sflux_${fhr3}"
   grid_string="1p00"
@@ -190,6 +198,15 @@ if [[ "${FLXGF:-}" == "YES" ]]; then
   done
 fi
 
+# Section creating 0.25 degree WGNE products for nset=1, and fhr <= FHMAX_WGNE
+if [[ "${WGNE:-}" == "YES" ]]; then
+  grp=""  # TODO: this should be "a" when we eventually rename the pressure grib2 files per EE2 convention
+  if (( FORECAST_HOUR > 0 & FORECAST_HOUR <= FHMAX_WGNE )); then
+    # TODO: 597 is the message number for APCP in GFSv16.  GFSv17 may change this as more messages are added. This can be controlled via config.atmos_products
+    ${WGRIB2} "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2${grp}.0p25.${fhr3}" -d "${APCP_MSG:-597}" -grib "${COM_ATMOS_GRIB_0p25}/${PREFIX}wgne.${fhr3}"
+  fi
+fi
+
 #---------------------------------------------------------------
 
 # Start sending DBN alerts
@@ -200,18 +217,21 @@ if [[ "${SENDDBN:-}" == "YES" ]]; then
   if [[ "${RUN}" == "gfs" ]]; then
     "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P25"      "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.${fhr3}"
     "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P25_WIDX" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.${fhr3}.idx"
-    if [[ -s "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}" ]]; then
+    if [[ -s "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.${fhr3}" ]]; then
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P5"       "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.${fhr3}"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P5_WIDX"  "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.${fhr3}.idx"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P5"      "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.${fhr3}"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P5_WIDX" "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.${fhr3}.idx"
     fi
-    if [[ -s "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}" ]]; then
+    if [[ -s "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}" ]]; then
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_1P0"       "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_1P0_WIDX"  "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}.idx"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_1P0"      "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.${fhr3}"
       "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_1P0_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.${fhr3}.idx"
     fi
+    if [[ "${WGNE:-}" == "YES" ]] && [[ -s "${COM_ATMOS_GRIB_0p25}/${PREFIX}wgne.${fhr3}" ]] ; then
+      "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_WGNE" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}wgne.${fhr3}"
+    fi
   fi
 
   if [[ "${fhr3}" == "anl" ]]; then
diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh
index 2997ac0d25..cbc43b0979 100755
--- a/scripts/exglobal_atmos_sfcanl.sh
+++ b/scripts/exglobal_atmos_sfcanl.sh
@@ -19,206 +19,130 @@
 
 #  Set environment.
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #  Directories.
-pwd=$(pwd)
+cd "${DATA}" || exit 99
 
 # Derived base variables
-# Ignore possible spelling error (nothing is misspelled)
-# shellcheck disable=SC2153
-GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}")
-BDATE=$(${NDATE} -3 "${PDY}${cyc}")
-bPDY=${BDATE:0:8}
-bcyc=${BDATE:8:2}
-
-# Utilities
-export NCP=${NCP:-"/bin/cp"}
-export NMV=${NMV:-"/bin/mv"}
-export NLN=${NLN:-"/bin/ln -sf"}
-export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"}
-export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
-COMPRESS=${COMPRESS:-gzip}
-UNCOMPRESS=${UNCOMPRESS:-gunzip}
-APRUNCFP=${APRUNCFP:-""}
-
-# IAU
-DOIAU=${DOIAU:-"NO"}
-export IAUFHRS=${IAUFHRS:-"6"}
 
-# Surface cycle related parameters
-CYCLESH=${CYCLESH:-${HOMEgfs}/ush/global_cycle.sh}
-export CYCLEXEC=${CYCLEXEC:-${HOMEgfs}/exec/global_cycle}
+# Dependent Scripts and Executables
+CYCLESH=${CYCLESH:-${USHgfs}/global_cycle.sh}
+export CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle}
 NTHREADS_CYCLE=${NTHREADS_CYCLE:-24}
 APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}}
+
+# Surface cycle related parameters
 export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-'-2.'}
 export CYCLVARS=${CYCLVARS:-""}
 export FHOUR=${FHOUR:-0}
 export DELTSFC=${DELTSFC:-6}
-export FIXam=${FIXam:-${HOMEgfs}/fix/am}
-export FIXorog=${FIXorog:-${HOMEgfs}/fix/orog}
 
-# FV3 specific info (required for global_cycle)
-export CASE=${CASE:-"C384"}
-ntiles=${ntiles:-6}
-
-# IAU
-DOIAU=${DOIAU:-"NO"}
-export IAUFHRS=${IAUFHRS:-"6"}
-
-# Dependent Scripts and Executables
-export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1}
-export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}}
-export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}}
-export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}}
-
-export CALCANLEXEC=${CALCANLEXEC:-${HOMEgfs}/exec/calc_analysis.x}
-export CHGRESNCEXEC=${CHGRESNCEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter_nc.x}
-export CHGRESINCEXEC=${CHGRESINCEXEC:-${HOMEgfs}/exec/interp_inc.x}
-export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1}
-CALCINCPY=${CALCINCPY:-${HOMEgfs}/ush/calcinc_gfs.py}
-CALCANLPY=${CALCANLPY:-${HOMEgfs}/ush/calcanl_gfs.py}
-
-export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}}
-CHGRESEXEC=${CHGRESEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter.x}
-
-# OPS flags
-RUN=${RUN:-""}
-SENDECF=${SENDECF:-"NO"}
-SENDDBN=${SENDDBN:-"NO"}
+# Other info used in this script
 RUN_GETGES=${RUN_GETGES:-"NO"}
 GETGESSH=${GETGESSH:-"getges.sh"}
 export gesenvir=${gesenvir:-${envir}}
+# Ignore possible spelling error (nothing is misspelled)
+# shellcheck disable=SC2153
+GPREFIX="gdas.t${GDATE:8:2}z."
+OPREFIX="${RUN/enkf}.t${cyc}z."
+APREFIX="${RUN/enkf}.t${cyc}z."
 
-# Observations
-OPREFIX=${OPREFIX:-""}
-OSUFFIX=${OSUFFIX:-""}
-
-# Guess files
-GPREFIX=${GPREFIX:-""}
+ntiles=6
 
-# Analysis files
-export APREFIX=${APREFIX:-""}
-DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc}
 
+##############################################################
 # Get dimension information based on CASE
-res=$(echo ${CASE} | cut -c2-)
+res="${CASE:1}"
 JCAP_CASE=$((res*2-2))
 LATB_CASE=$((res*2))
 LONB_CASE=$((res*4))
 
-################################################################################
-#  Preprocessing
-mkdata=NO
-if [[ ! -d ${DATA} ]]; then
-   mkdata=YES
-   mkdir -p ${DATA}
-fi
-
-cd ${DATA} || exit 99
-
-if [[ ${DONST} = "YES" ]]; then
-    export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr"
-    ${NLN} ${NSSTBF} nsstbufr
-fi
-
-
-##############################################################
-# Required model guess files
-
-
-##############################################################
-# Output files
-if [[ ${DONST} = "YES" ]]; then
-   ${NLN} ${DTFANL} dtfanl
-fi
-
-
-##############################################################
-# Update surface fields in the FV3 restart's using global_cycle
-mkdir -p "${COM_ATMOS_RESTART}"
-
 # Global cycle requires these files
-export FNTSFA=${FNTSFA:-${COM_OBS}/${OPREFIX}rtgssthr.grb}
-export FNACNA=${FNACNA:-${COM_OBS}/${OPREFIX}seaice.5min.blend.grb}
-export FNSNOA=${FNSNOA:-${COM_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}}
-[[ ! -f ${FNSNOA} ]] && export FNSNOA="${COM_OBS}/${OPREFIX}snogrb_t1534.3072.1536"
-FNSNOG=${FNSNOG:-${COM_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}}
-[[ ! -f ${FNSNOG} ]] && FNSNOG="${COM_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536"
+export FNTSFA=${FNTSFA:-${COMIN_OBS}/${OPREFIX}rtgssthr.grb}
+export FNACNA=${FNACNA:-${COMIN_OBS}/${OPREFIX}seaice.5min.blend.grb}
+export FNSNOA=${FNSNOA:-${COMIN_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}}
+[[ ! -f ${FNSNOA} ]] && export FNSNOA="${COMIN_OBS}/${OPREFIX}snogrb_t1534.3072.1536"
+FNSNOG=${FNSNOG:-${COMIN_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}}
+[[ ! -f ${FNSNOG} ]] && FNSNOG="${COMIN_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536"
 
 # Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle
 if [[ ${RUN_GETGES} = "YES" ]]; then
-    snoprv=$(${GETGESSH} -q -t snogrb_${JCAP_CASE} -e ${gesenvir} -n ${GDUMP} -v ${GDATE})
+  snoprv=$(${GETGESSH} -q -t "snogrb_${JCAP_CASE}" -e "${gesenvir}" -n "${GDUMP}" -v "${GDATE}")
 else
-    snoprv=${snoprv:-${FNSNOG}}
+  snoprv=${snoprv:-${FNSNOG}}
 fi
 
-if [[ $(${WGRIB} -4yr ${FNSNOA} 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \
-    $(${WGRIB} -4yr ${snoprv} 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ]] ; then
-    export FNSNOA=" "
-    export CYCLVARS="FSNOL=99999.,FSNOS=99999.,"
+if [[ $(${WGRIB} -4yr "${FNSNOA}" 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \
+  $(${WGRIB} -4yr "${snoprv}" 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ]] ; then
+  export FNSNOA=" "
+  export CYCLVARS="FSNOL=99999.,FSNOS=99999.,"
 else
-    export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-0.}
-    export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},${CYCLVARS}"
+  export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},${CYCLVARS}"
 fi
 
-if [[ ${DONST} = "YES" ]]; then
-    export NST_FILE=${GSI_FILE:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc}
+# determine where the input snow restart files come from
+if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then
+    sfcdata_dir="${COMIN_SNOW_ANALYSIS}"
 else
-    export NST_FILE="NULL"
+    sfcdata_dir="${COMIN_ATMOS_RESTART_PREV}"
 fi
 
-if [[ ${DOIAU} = "YES" ]]; then
-    # update surface restarts at the beginning of the window, if IAU
-    # For now assume/hold dtfanl.nc valid at beginning of window
-    for n in $(seq 1 ${ntiles}); do
-        ${NCP} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \
-                "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc"
-        ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}"
-        ${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc"   "${DATA}/fnbgso.00${n}"
-        ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc"                         "${DATA}/fngrid.00${n}"
-        ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc"                     "${DATA}/fnorog.00${n}"
-    done
-
-    export APRUNCY=${APRUN_CYCLE}
-    export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE}
-    export MAX_TASKS_CY=${ntiles}
-
-    CDATE="${PDY}${cyc}" ${CYCLESH}
-    export err=$?; err_chk
-fi
-
-# Update surface restarts at middle of window
-for n in $(seq 1 ${ntiles}); do
-    if [[ ${DO_JEDILANDDA:-"NO"} = "YES" ]]; then
-        ${NCP} "${COM_LAND_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \
-               "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc"
-    else
-        ${NCP} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \
-               "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc"
-    fi
-    ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}"
-    ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc"   "${DATA}/fnbgso.00${n}"
-    ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc"                       "${DATA}/fngrid.00${n}"
-    ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc"                   "${DATA}/fnorog.00${n}"
-done
-
+# global_cycle executable specific variables
 export APRUNCY=${APRUN_CYCLE}
 export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE}
 export MAX_TASKS_CY=${ntiles}
 
-CDATE="${PDY}${cyc}" ${CYCLESH}
-export err=$?; err_chk
+# Copy fix files required by global_cycle to DATA just once
+for (( nn=1; nn <= ntiles; nn++ )); do
+  ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${nn}.nc"                 "${DATA}/fngrid.00${nn}"
+  ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${nn}.nc" "${DATA}/fnorog.00${nn}"
+done
 
+# Copy the NSST analysis file for global_cycle
+# There is only a single NSST analysis at the middle of the window
+# For now use/assume it is the same at the beginning of the window if doing IAU
+if [[ "${DONST}" == "YES" ]]; then
+  ${NCP} "${COMIN_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc" "${DATA}/dtfanl"
+  export NST_FILE="dtfanl"
+else
+  export NST_FILE="NULL"
+fi
 
-################################################################################
-# Postprocessing
-cd ${pwd}
-[[ ${mkdata} = "YES" ]] && rm -rf ${DATA}
+# Collect the dates in the window to update surface restarts
+gcycle_dates=("${PDY}${cyc}")  # Always update surface restarts at middle of window
+if [[ "${DOIAU:-}" == "YES" ]]; then  # Update surface restarts at beginning of window
+  half_window=$(( assim_freq / 2 ))
+  BDATE=$(date --utc -d "${PDY} ${cyc} - ${half_window} hours" +%Y%m%d%H)
+  gcycle_dates+=("${BDATE}")
+fi
+
+# Loop over the dates in the window to update the surface restarts
+for gcycle_date in "${gcycle_dates[@]}"; do
+
+  echo "Updating surface restarts for ${gcycle_date} ..."
+
+  datestr="${gcycle_date:0:8}.${gcycle_date:8:2}0000"
+
+  # Copy inputs from COMIN to DATA
+  for (( nn=1; nn <= ntiles; nn++ )); do
+    ${NCP} "${sfcdata_dir}/${datestr}.sfc_data.tile${nn}.nc" "${DATA}/fnbgsi.00${nn}"
+    ${NCP} "${DATA}/fnbgsi.00${nn}"                       "${DATA}/fnbgso.00${nn}"
+  done
+
+  CDATE="${PDY}${cyc}" ${CYCLESH}
+  export err=$?; err_chk
+
+  # Copy outputs from DATA to COMOUT
+  for (( nn=1; nn <= ntiles; nn++ )); do
+    ${NCP} "${DATA}/fnbgso.00${nn}" "${COMOUT_ATMOS_RESTART}/${datestr}.sfcanl_data.tile${nn}.nc"
+  done
+
+done
 
 
 ################################################################################
 
-exit ${err}
+exit "${err}"
 
 ################################################################################
diff --git a/scripts/exglobal_atmos_tropcy_qc_reloc.sh b/scripts/exglobal_atmos_tropcy_qc_reloc.sh
index 380441a6c9..f1272b1844 100755
--- a/scripts/exglobal_atmos_tropcy_qc_reloc.sh
+++ b/scripts/exglobal_atmos_tropcy_qc_reloc.sh
@@ -10,7 +10,7 @@
 # echo "            Oct 2013 - Use main USH vars as part of minor pkg cleanup"
 ############################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Make sure we are in the $DATA directory
 cd $DATA
@@ -50,7 +50,7 @@ if [ "$PROCESS_TROPCY" = 'YES' ]; then
 
 #echo $PDY
 
-   ${USHSYND:-$HOMEgfs/ush}/syndat_qctropcy.sh $cdate10
+   ${USHgfs}/syndat_qctropcy.sh $cdate10
    errsc=$?
    if [ "$errsc" -ne '0' ]; then
     echo "syndat_qctropcy.sh failed. exit"
@@ -95,7 +95,7 @@ if [ "$DO_RELOCATE" = 'YES' ]; then
 ###################################################
 
    export MP_LABELIO=${MP_LABELIO:-yes}
-   $USHRELO/tropcy_relocate.sh $cdate10
+   ${USHgfs}/tropcy_relocate.sh $cdate10
    errsc=$?
 
    [ "$errsc" -ne '0' ]  &&  exit $errsc
diff --git a/scripts/exglobal_atmos_vminmon.sh b/scripts/exglobal_atmos_vminmon.sh
index a4453dcf1a..b4307c8af9 100755
--- a/scripts/exglobal_atmos_vminmon.sh
+++ b/scripts/exglobal_atmos_vminmon.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -44,15 +44,15 @@ if [[ -s ${gsistat} ]]; then
    #------------------------------------------------------------------
    #   Run the child sccripts.
    #------------------------------------------------------------------
-   "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+   "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
    rc_costs=$?
    echo "rc_costs = ${rc_costs}"
 
-   "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+   "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
    rc_gnorms=$?
    echo "rc_gnorms = ${rc_gnorms}"
 
-   "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy
+   "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}"
    rc_reduct=$?
    echo "rc_reduct = ${rc_reduct}"
 
diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh
index 5d7c0a9788..75b1f927bc 100755
--- a/scripts/exglobal_cleanup.sh
+++ b/scripts/exglobal_cleanup.sh
@@ -1,29 +1,37 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ###############################################################
-# Clean up previous cycles; various depths
-# PRIOR CYCLE: Leave the prior cycle alone
-# shellcheck disable=SC2153
-GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${assim_freq} hours")
-# PREVIOUS to the PRIOR CYCLE
-GDATE=$(date --utc +%Y%m%d%H -d "${GDATE:0:8} ${GDATE:8:2} -${assim_freq} hours")
+echo "Begin Cleanup ${DATAROOT}!"
+
+# Remove DATAoutput from the forecast model run
+# TODO: Handle this better
+DATAfcst="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}"
+if [[ -d "${DATAfcst}" ]]; then rm -rf "${DATAfcst}"; fi
+#DATAefcs="${DATAROOT}/${RUN}efcs???${PDY:-}${cyc}"
+rm -rf "${DATAROOT}/${RUN}efcs"*"${PDY:-}${cyc}"
 
-# Remove the TMPDIR directory
-# TODO Only prepbufr is currently using this directory, and all jobs should be
-#   cleaning up after themselves anyway
-COMIN="${DATAROOT}/${GDATE}"
-[[ -d ${COMIN} ]] && rm -rf "${COMIN}"
+# In XML, DATAROOT is defined as:
+#DATAROOT="${STMP}/RUNDIRS/${PSLOT}/${RUN}.${PDY}${cyc}"
+# cleanup is only executed after the entire cycle is successfully completed.
+# removing DATAROOT should be possible if that is the case.
+rm -rf "${DATAROOT}"
+
+echo "Cleanup ${DATAROOT} completed!"
+###############################################################
 
 if [[ "${CLEANUP_COM:-YES}" == NO ]] ; then
     exit 0
 fi
 
+###############################################################
+# Clean up previous cycles; various depths
+
 # Step back every assim_freq hours and remove old rotating directories
 # for successful cycles (defaults from 24h to 120h).
 # Retain files needed by Fit2Obs
-last_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDEND:-24} hours" )
+last_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDEND:-24} hours")
 first_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours")
 last_rtofs=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDRTOFS:-48} hours")
 function remove_files() {
@@ -59,10 +67,11 @@ for (( current_date=first_date; current_date <= last_date; \
         # TODO: This needs to be revamped to not look at the rocoto log.
         # shellcheck disable=SC2312
         if [[ $(tail -n 1 "${rocotolog}") =~ "This cycle is complete: Success" ]]; then
-            YMD="${current_PDY}" HH="${current_cyc}" generate_com COM_TOP
-            if [[ -d "${COM_TOP}" ]]; then
+            YMD="${current_PDY}" HH="${current_cyc}" declare_from_tmpl \
+                COMOUT_TOP:COM_TOP_TMPL
+            if [[ -d "${COMOUT_TOP}" ]]; then
                 IFS=", " read -r -a exclude_list <<< "${exclude_string:-}"
-                remove_files "${COM_TOP}" "${exclude_list[@]:-}"
+                remove_files "${COMOUT_TOP}" "${exclude_list[@]:-}"
             fi
             if [[ -d "${rtofs_dir}" ]] && (( current_date < last_rtofs )); then rm -rf "${rtofs_dir}" ; fi
         fi
diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh
index 3836643afc..ed9bef05df 100755
--- a/scripts/exglobal_diag.sh
+++ b/scripts/exglobal_diag.sh
@@ -19,22 +19,18 @@
 
 #  Set environment.
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #  Directories.
 pwd=$(pwd)
 
 # Base variables
 CDATE="${PDY}${cyc}"
-CDUMP=${CDUMP:-"gdas"}
 GDUMP=${GDUMP:-"gdas"}
 
 # Utilities
-export NCP=${NCP:-"/bin/cp"}
-export NMV=${NMV:-"/bin/mv"}
-export NLN=${NLN:-"/bin/ln -sf"}
 export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"}
-export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen}
+export NCLEN=${NCLEN:-${USHgfs}/getncdimlen}
 export CATEXEC=${CATEXEC:-${ncdiag_ROOT:-${gsi_ncdiag_ROOT}}/bin/ncdiag_cat_serial.x}
 COMPRESS=${COMPRESS:-gzip}
 UNCOMPRESS=${UNCOMPRESS:-gunzip}
@@ -225,7 +221,7 @@ EOFdiag
       chmod 755 $DATA/mp_diag.sh
       ncmd=$(cat $DATA/mp_diag.sh | wc -l)
       if [ $ncmd -gt 0 ]; then
-         ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max))
+         ncmd_max=$((ncmd < max_tasks_per_node ? ncmd : max_tasks_per_node))
          APRUNCFP_DIAG=$(eval echo $APRUNCFP)
          $APRUNCFP_DIAG $DATA/mp_diag.sh
          export err=$?; err_chk
@@ -235,9 +231,11 @@ EOFdiag
    # Restrict diagnostic files containing rstprod data
    rlist="conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_uv saphir"
    for rtype in $rlist; do
-      set +e
-      ${CHGRP_CMD} *${rtype}*
-      ${STRICT_ON:-set -e}
+      for rfile in *"${rtype}"*; do 
+         if [[ -s "${rfile}" ]]; then
+            ${CHGRP_CMD} "${rfile}"
+         fi
+      done
    done
 
    # If requested, create diagnostic file tarballs
diff --git a/scripts/exglobal_extractvars.sh b/scripts/exglobal_extractvars.sh
new file mode 100755
index 0000000000..a124667679
--- /dev/null
+++ b/scripts/exglobal_extractvars.sh
@@ -0,0 +1,53 @@
+#! /usr/bin/env bash                                                                                                                                                                    
+
+################################################################################
+## UNIX Script Documentation Block
+## Script name:         exglobal_extractvars.sh
+## Script description:  Extracts variables from atmosphere, ocean, ice and wave
+##                      products and saves these variables in arcdir
+#######################
+# Main body starts here
+#######################
+
+source "${USHgfs}/preamble.sh"
+source "${USHgfs}/extractvars_tools.sh"
+
+# Scripts used
+EXTRCTVARA="${USHgfs}/atmos_extractvars.sh"
+EXTRCTVARO="${USHgfs}/ocnice_extractvars.sh"
+EXTRCTVARW="${USHgfs}/wave_extractvars.sh"
+
+# Set FHMAX_HF_GFS equal to FHMAX_GFS if FHMAX_HF_GFS is greater than FHMAX_GFS
+if (( FHMAX_GFS < FHMAX_HF_GFS )); then
+  export FHMAX_HF_GFS=${FHMAX_GFS}
+fi
+
+# Set FHOUT_WAV_EXTRACT equal to FHOUT_WAV if FHOUT_WAV is not a factor of FHOUT_WAV_EXTRACT
+if (( FHOUT_WAV_EXTRACT % FHOUT_WAV != 0 )); then
+  FHOUT_WAV_EXTRACT=${FHOUT_WAV}
+fi
+
+# Extract variables for atmosphere
+if [[ "${DO_ATM}" == "YES" ]]; then
+  ${EXTRCTVARA} "${DATA}/atmos"
+fi
+
+# Extract variables for ocean
+if [[ "${DO_OCN}" == "YES" ]]; then
+  export component_name="ocn"
+  ${EXTRCTVARO} "${DATA}/ocn" "${varlist_ocn_netcdf}" "${ocnres}" "${compress_ocn}" "${FHOUT_OCN_GFS}" "${ARC_RFCST_PROD_OCN}"
+fi
+
+# Extract variables for ice
+if [[ "${DO_ICE}" == "YES" ]]; then
+  export component_name="ice"
+  ${EXTRCTVARO} "${DATA}/ice" "${varlist_ice_netcdf}" "${iceres}" "${compress_ice}" "${FHOUT_ICE_GFS}" "${ARC_RFCST_PROD_ICE}"
+fi
+
+# Extract variables for wave
+if [[ "${DO_WAVE}" == "YES" ]]; then
+  export component_name="wav"
+  ${EXTRCTVARW} "${DATA}/wav"
+fi
+
+exit 0
diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh
index c50cde74f1..4ce9d7894d 100755
--- a/scripts/exglobal_forecast.sh
+++ b/scripts/exglobal_forecast.sh
@@ -31,32 +31,32 @@
 ## 		1. restart file except sfc_data, $gmemdir/RESTART/$PDY.$cyc.*.nc
 ##		2. sfcanl_data, $memdir/RESTART/$PDY.$cyc.*.nc
 ##		3. coupler_res, $gmemdir/RESTART/$PDY.$cyc.coupler.res
-##		4. increment file, $memdir/${CDUMP}.t${cyc}z.atminc.nc
+##		4. increment file, $memdir/${RUN}.t${cyc}z.atminc.nc
 ##			OR $DATA/INPUT/fv3_increment.nc
 ##	Cold start files:
 ##		1. initial condition, $memdir/INPUT/*.nc
 ##	Restart files:
 ##
 ##	Fix files:
-##		1. computing grid, $FIXorog/$CASE/${CASE}_grid.tile${n}.nc
-##		2. orography data, $FIXorog/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc
-##		3. mosaic data, $FIXorog/$CASE/${CASE}_mosaic.nc
-##		4. Global O3 data, $FIXam/${O3FORC}
-##		5. Global H2O data, $FIXam/${H2OFORC}
-##		6. Global solar constant data, $FIXam/global_solarconstant_noaa_an.txt
-##		7. Global surface emissivity, $FIXam/global_sfc_emissivity_idx.txt
-##		8. Global CO2 historical data, $FIXam/global_co2historicaldata_glob.txt
-##		8. Global CO2 monthly data, $FIXam/co2monthlycyc.txt
-##		10. Additional global CO2 data, $FIXam/fix_co2_proj/global_co2historicaldata
+##		1. computing grid, ${FIXorog}/$CASE/${CASE}_grid.tile${n}.nc
+##		2. orography data, ${FIXorog}/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc
+##		3. mosaic data, ${FIXorog}/$CASE/${CASE}_mosaic.nc
+##		4. Global O3 data, ${FIXgfs}/am/${O3FORC}
+##		5. Global H2O data, ${FIXgfs}/am/${H2OFORC}
+##		6. Global solar constant data, ${FIXgfs}/am/global_solarconstant_noaa_an.txt
+##		7. Global surface emissivity, ${FIXgfs}/am/global_sfc_emissivity_idx.txt
+##		8. Global CO2 historical data, ${FIXgfs}/am/global_co2historicaldata_glob.txt
+##		8. Global CO2 monthly data, ${FIXgfs}/am/co2monthlycyc.txt
+##		10. Additional global CO2 data, ${FIXgfs}/am/fix_co2_proj/global_co2historicaldata
 ##		11. Climatological aerosol global distribution
-##			$FIXam/global_climaeropac_global.txt
-## 		12. Monthly volcanic forcing $FIXam/global_volcanic_aerosols_YYYY-YYYY.txt
+##			${FIXgfs}/am/global_climaeropac_global.txt
+## 		12. Monthly volcanic forcing ${FIXgfs}/am/global_volcanic_aerosols_YYYY-YYYY.txt
 ##
 ## Data output (location, name)
 ##	If quilting=true and output grid is gaussian grid:
-##	   1. atmf data, $memdir/${CDUMP}.t${cyc}z.atmf${FH3}.$OUTPUT_FILE
-##	   2. sfcf data, $memdir/${CDUMP}.t${cyc}z.sfcf${FH3}.$OUTPUT_FILE
-##	   3. logf data, $memdir/${CDUMP}.t${cyc}z.logf${FH3}.$OUTPUT_FILE
+##	   1. atmf data, $memdir/${RUN}.t${cyc}z.atmf${FH3}.$OUTPUT_FILE
+##	   2. sfcf data, $memdir/${RUN}.t${cyc}z.sfcf${FH3}.$OUTPUT_FILE
+##	   3. logf data, $memdir/${RUN}.t${cyc}z.logf${FH3}.$OUTPUT_FILE
 ##	If quilting=false and output grid is not gaussian grid:
 ##           1. NGGPS2D, $memdir/nggps2d.tile${n}.nc
 ##	   2. NGGPS3D, $memdir/nggps3d.tile${n}.nc
@@ -77,14 +77,15 @@
 # Main body starts here
 #######################
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # include all subroutines. Executions later.
-source "${HOMEgfs}/ush/forecast_predet.sh"	# include functions for variable definition
-source "${HOMEgfs}/ush/forecast_det.sh"  # include functions for run type determination
-source "${HOMEgfs}/ush/forecast_postdet.sh"	# include functions for variables after run type determination
-source "${HOMEgfs}/ush/ufs_configure.sh"	# include functions for ufs.configure processing
-source "${HOMEgfs}/ush/parsing_model_configure_FV3.sh"
+source "${USHgfs}/forecast_predet.sh" 	# include functions for variable definition
+source "${USHgfs}/forecast_det.sh"  # include functions for run type determination
+source "${USHgfs}/forecast_postdet.sh"	# include functions for variables after run type determination
+source "${USHgfs}/parsing_ufs_configure.sh"	 # include functions for ufs_configure processing
+
+source "${USHgfs}/atparse.bash"  # include function atparse for parsing @[XYZ] templated files
 
 # Coupling control switches, for coupling purpose, off by default
 cpl=${cpl:-.false.}
@@ -105,20 +106,20 @@ common_predet
 
 echo "MAIN: Loading variables before determination of run type"
 FV3_predet
+[[ ${cplflx} = .true. ]] && CMEPS_predet
 [[ ${cplflx} = .true. ]] && MOM6_predet
 [[ ${cplwav} = .true. ]] && WW3_predet
 [[ ${cplice} = .true. ]] && CICE_predet
+[[ ${cplchm} = .true. ]] && GOCART_predet
 echo "MAIN: Variables before determination of run type loaded"
 
 echo "MAIN: Determining run type"
-FV3_det
-[[ ${cplflx} = .true. ]] && MOM6_det
-[[ ${cplwav} = .true. ]] && WW3_det
-[[ ${cplice} = .true. ]] && CICE_det
-echo "MAIN: RUN Type Determined"
+UFS_det
+echo "MAIN: run type determined"
 
 echo "MAIN: Post-determination set up of run type"
 FV3_postdet
+[[ ${cplflx} = .true. ]] && CMEPS_postdet
 [[ ${cplflx} = .true. ]] && MOM6_postdet
 [[ ${cplwav} = .true. ]] && WW3_postdet
 [[ ${cplice} = .true. ]] && CICE_postdet
@@ -131,13 +132,9 @@ FV3_nml
 [[ ${cplwav} = .true. ]] && WW3_nml
 [[ ${cplice} = .true. ]] && CICE_nml
 [[ ${cplchm} = .true. ]] && GOCART_rc
-FV3_model_configure
+UFS_configure
 echo "MAIN: Name lists and model configuration written"
 
-echo "MAIN: Writing UFS Configure file"
-writing_ufs_configure
-echo "MAIN: UFS configured"
-
 #------------------------------------------------------------------
 # run the executable
 
@@ -146,7 +143,13 @@ if [[ "${esmf_profile:-}" = ".true." ]]; then
   export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY
 fi
 
-${NCP} "${FCSTEXECDIR}/${FCSTEXEC}" "${DATA}/"
+if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then
+  unset OMP_NUM_THREADS
+else
+  export OMP_NUM_THREADS=${UFS_THREADS:-1}
+fi
+
+${NCP} "${EXECgfs}/${FCSTEXEC}" "${DATA}/"
 ${APRUN_UFS} "${DATA}/${FCSTEXEC}" 1>&1 2>&2
 export ERR=$?
 export err=${ERR}
@@ -154,6 +157,7 @@ ${ERRSCRIPT} || exit "${err}"
 
 FV3_out
 [[ ${cplflx} = .true. ]] && MOM6_out
+[[ ${cplflx} = .true. ]] && CMEPS_out
 [[ ${cplwav} = .true. ]] && WW3_out
 [[ ${cplice} = .true. ]] && CICE_out
 [[ ${cplchm} = .true. ]] && GOCART_out
diff --git a/scripts/exglobal_marinebmat.py b/scripts/exglobal_marinebmat.py
new file mode 100755
index 0000000000..e285e646ac
--- /dev/null
+++ b/scripts/exglobal_marinebmat.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# exglobal_marine_bmat_run.py
+# This script creates an marineBmat object
+# and runs the execute method
+# which executes all the steps necessary to create the global marine B-matrix
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs.task.marine_bmat import MarineBMat
+
+# Initialize root logger
+logger = Logger(level='DEBUG', colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Create an instance of the MarineBMat task
+    marineBMat = MarineBMat(config)
+    marineBMat.initialize()
+    marineBMat.execute()
+    marineBMat.finalize()
diff --git a/scripts/exglobal_oceanice_products.py b/scripts/exglobal_oceanice_products.py
new file mode 100755
index 0000000000..9bb2b09596
--- /dev/null
+++ b/scripts/exglobal_oceanice_products.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+
+import os
+
+from wxflow import AttrDict, Logger, logit, cast_strdict_as_dtypedict
+from pygfs.task.oceanice_products import OceanIceProducts
+
+# initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+@logit(logger)
+def main():
+
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the OceanIce object
+    oceanice = OceanIceProducts(config)
+
+    # Pull out all the configuration keys needed to run the rest of steps
+    keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET',
+            f'COM_{oceanice.task_config.component.upper()}_HISTORY',
+            f'COM_{oceanice.task_config.component.upper()}_GRIB',
+            'APRUN_OCNICEPOST',
+            'component', 'forecast_hour', 'valid_datetime', 'avg_period',
+            'model_grid', 'product_grids', 'oceanice_yaml']
+    oceanice_dict = AttrDict()
+    for key in keys:
+        oceanice_dict[key] = oceanice.task_config[key]
+
+    # Initialize the DATA/ directory; copy static data
+    oceanice.initialize(oceanice_dict)
+
+    for grid in oceanice_dict.product_grids:
+
+        logger.info(f"Processing {grid} grid")
+
+        # Configure DATA/ directory for execution; prepare namelist etc.
+        oceanice.configure(oceanice_dict, grid)
+
+        # Run the oceanice post executable to interpolate and create grib2 files
+        oceanice.execute(oceanice_dict, grid)
+
+    # Subset raw model data to create netCDF products
+    oceanice.subset(oceanice_dict)
+
+    # Copy processed output from execute and subset
+    oceanice.finalize(oceanice_dict)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/exglobal_prep_emissions.py b/scripts/exglobal_prep_emissions.py
new file mode 100755
index 0000000000..ef0e709142
--- /dev/null
+++ b/scripts/exglobal_prep_emissions.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+# exglobal_prep_emissions.py
+# This script creates a emissions object
+# which perform the pre-processing for aerosol emissions
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs import AerosolEmissions
+
+
+# Initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    # Instantiate the emissions pre-processing task
+    emissions = AerosolEmissions(config)
+    emissions.initialize()
+    emissions.configure()
+    emissions.execute(emissions.task_config.DATA, emissions.task_config.APRUN)
+    emissions.finalize()
diff --git a/scripts/exglobal_prep_obs_aero.py b/scripts/exglobal_prep_obs_aero.py
new file mode 100755
index 0000000000..08548e6874
--- /dev/null
+++ b/scripts/exglobal_prep_obs_aero.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# exglobal_prep_obs_aero.py
+# This script collect available viirs
+# obs files, combine and preprocess
+# them.
+import os
+
+from wxflow import Logger, cast_strdict_as_dtypedict
+from pygfs.task.aero_prepobs import AerosolObsPrep
+
+# Initialize root logger
+logger = Logger(level='DEBUG', colored_log=True)
+
+
+if __name__ == '__main__':
+
+    # Take configuration from environment and cast it as python dictionary
+    config = cast_strdict_as_dtypedict(os.environ)
+
+    AeroObs = AerosolObsPrep(config)
+    AeroObs.initialize()
+    AeroObs.runConverter()
+    AeroObs.finalize()
diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_snow_obs.py
similarity index 59%
rename from scripts/exglobal_prep_land_obs.py
rename to scripts/exglobal_prep_snow_obs.py
index 3594771c8a..a6a9070151 100755
--- a/scripts/exglobal_prep_land_obs.py
+++ b/scripts/exglobal_prep_snow_obs.py
@@ -1,12 +1,12 @@
 #!/usr/bin/env python3
-# exglobal_land_analysis_prepare.py
-# This script creates a LandAnalysis object
+# exglobal_prep_snow_obs.py
+# This script creates a SnowAnalysis object
 # and runs the prepare_GTS and prepare_IMS method
 # which perform the pre-processing for GTS and IMS data
 import os
 
 from wxflow import Logger, cast_strdict_as_dtypedict
-from pygfs.task.land_analysis import LandAnalysis
+from pygfs.task.snow_analysis import SnowAnalysis
 
 
 # Initialize root logger
@@ -18,8 +18,8 @@
     # Take configuration from environment and cast it as python dictionary
     config = cast_strdict_as_dtypedict(os.environ)
 
-    # Instantiate the land prepare task
-    LandAnl = LandAnalysis(config)
-    LandAnl.prepare_GTS()
-    if f"{ LandAnl.runtime_config.cyc }" == '18':
-        LandAnl.prepare_IMS()
+    # Instantiate the snow prepare task
+    SnowAnl = SnowAnalysis(config)
+    SnowAnl.prepare_GTS()
+    if SnowAnl.task_config.cyc == 0:
+        SnowAnl.prepare_IMS()
diff --git a/scripts/exglobal_land_analysis.py b/scripts/exglobal_snow_analysis.py
similarity index 66%
rename from scripts/exglobal_land_analysis.py
rename to scripts/exglobal_snow_analysis.py
index 70141475b0..fe050f5af5 100755
--- a/scripts/exglobal_land_analysis.py
+++ b/scripts/exglobal_snow_analysis.py
@@ -1,12 +1,12 @@
 #!/usr/bin/env python3
-# exglobal_land_analysis.py
-# This script creates an LandAnalysis class
+# exglobal_snow_analysis.py
+# This script creates an SnowAnalysis class
 # and runs the initialize, execute and finalize methods
-# for a global Land Snow Depth analysis
+# for a global Snow Depth analysis
 import os
 
 from wxflow import Logger, cast_strdict_as_dtypedict
-from pygfs.task.land_analysis import LandAnalysis
+from pygfs.task.snow_analysis import SnowAnalysis
 
 # Initialize root logger
 logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
@@ -17,8 +17,8 @@
     # Take configuration from environment and cast it as python dictionary
     config = cast_strdict_as_dtypedict(os.environ)
 
-    # Instantiate the land analysis task
-    anl = LandAnalysis(config)
+    # Instantiate the snow analysis task
+    anl = SnowAnalysis(config)
     anl.initialize()
     anl.execute()
     anl.finalize()
diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh
index 58b37f3114..32356cd724 100755
--- a/scripts/exglobal_stage_ic.sh
+++ b/scripts/exglobal_stage_ic.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Locally scoped variables and functions
 # shellcheck disable=SC2153
@@ -8,6 +8,9 @@ GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H)
 gPDY="${GDATE:0:8}"
 gcyc="${GDATE:8:2}"
 
+RDATE=$(date --utc -d "${PDY} ${cyc} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H)
+DTG_PREFIX="${RDATE:0:8}.${RDATE:8:2}0000"
+
 MEMDIR_ARRAY=()
 if [[ "${RUN:-}" = "gefs" ]]; then
   # Populate the member_dirs array based on the value of NMEM_ENS
@@ -31,20 +34,25 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
   # Stage atmosphere initial conditions to ROTDIR
   if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then
     # Stage the FV3 restarts to ROTDIR (warm start)
-    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
+    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL
     [[ ! -d "${COM_ATMOS_RESTART_PREV}" ]] && mkdir -p "${COM_ATMOS_RESTART_PREV}"
-    for ftype in coupler.res fv_core.res.nc; do
-      src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}"
-      tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}"
+    prev_atmos_copy_list=(fv_core.res.nc coupler.res)
+    for ftype in "${prev_atmos_copy_list[@]}"; do
+      src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}"
+      tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}"
       ${NCP} "${src}" "${tgt}"
       rc=$?
       ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
       err=$((err + rc))
     done
     for ftype in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do
-      for ((tt = 1; tt <= 6; tt++)); do
-        src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc"
-        tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc"
+      for ((tt = 1; tt <= ntiles; tt++)); do
+        src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}.tile${tt}.nc"
+        if (( tt > 6 )) ; then
+            tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.nest0$((tt-5)).tile${tt}.nc"
+        else
+            tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.tile${tt}.nc"
+        fi
         ${NCP} "${src}" "${tgt}"
         rc=$?
         ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
@@ -53,7 +61,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
     done
   else
     # Stage the FV3 cold-start initial conditions to ROTDIR
-    YMD=${PDY} HH=${cyc} generate_com COM_ATMOS_INPUT
+    YMD=${PDY} HH=${cyc} declare_from_tmpl COM_ATMOS_INPUT
     [[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}"
     src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/gfs_ctrl.nc"
     tgt="${COM_ATMOS_INPUT}/gfs_ctrl.nc"
@@ -62,7 +70,7 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
     ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
     err=$((err + rc))
     for ftype in gfs_data sfc_data; do
-      for ((tt = 1; tt <= 6; tt++)); do
+      for ((tt = 1; tt <= ntiles; tt++)); do
         src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${ftype}.tile${tt}.nc"
         tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc"
         ${NCP} "${src}" "${tgt}"
@@ -70,15 +78,31 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
         ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
         err=$((err + rc))
       done
+      if (( ntiles > 6 )); then
+        ${NLN} "${COM_ATMOS_INPUT}/${ftype}.tile7.nc" "${COM_ATMOS_INPUT}/${ftype}.nest02.tile7.nc"
+      fi
     done
   fi
+  
+  # Atmosphere Perturbation Files (usually used with replay ICS)
+  # Extra zero on MEMDIR ensure we have a number even if the string is empty
+  if (( $((10#0${MEMDIR:3})) > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+      YMD=${PDY} HH=${cyc} declare_from_tmpl COM_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL
+      [[ ! -d "${COM_ATMOS_ANALYSIS}" ]] && mkdir -p "${COM_ATMOS_ANALYSIS}"
+      src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.fv3_perturbation.nc"
+      tgt="${COM_ATMOS_ANALYSIS}/${RUN}.t00z.atminc.nc"
+      ${NCP} "${src}" "${tgt}"
+      rc=${?}
+      ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
+      err=$((err + rc))
+  fi
 
   # Stage ocean initial conditions to ROTDIR (warm start)
   if [[ "${DO_OCN:-}" = "YES" ]]; then
-    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL
+    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL
     [[ ! -d "${COM_OCEAN_RESTART_PREV}" ]] && mkdir -p "${COM_OCEAN_RESTART_PREV}"
-    src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc"
-    tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc"
+    src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res.nc"
+    tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res.nc"
     ${NCP} "${src}" "${tgt}"
     rc=$?
     ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
@@ -89,8 +113,8 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
         ;;
       "025" )
         for nn in $(seq 1 3); do
-          src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res_${nn}.nc"
-          tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc"
+          src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res_${nn}.nc"
+          tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res_${nn}.nc"
           ${NCP} "${src}" "${tgt}"
           rc=$?
           ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
@@ -104,14 +128,27 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
         ;;
     esac
 
+    # Ocean Perturbation Files
+    # Extra zero on MEMDIR ensure we have a number even if the string is empty
+    if (( $((10#0${MEMDIR:3})) > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+        YMD=${PDY} HH=${cyc} declare_from_tmpl COM_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL
+        [[ ! -d "${COM_OCEAN_ANALYSIS}" ]] && mkdir -p "${COM_OCEAN_ANALYSIS}"
+        src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.mom6_perturbation.nc"
+        tgt="${COM_OCEAN_ANALYSIS}/mom6_increment.nc"
+        ${NCP} "${src}" "${tgt}"
+        rc=${?}
+        ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
+        err=$((err + rc))
+    fi
+
     # TODO: Do mediator restarts exists in a ATMW configuration?
     # TODO: No mediator is presumably involved in an ATMA configuration
     if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then
       # Stage the mediator restarts to ROTDIR (warm start/restart the coupled model)
-      RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL
+      RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL
       [[ ! -d "${COM_MED_RESTART_PREV}" ]] && mkdir -p "${COM_MED_RESTART_PREV}"
-      src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
-      tgt="${COM_MED_RESTART_PREV}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
+      src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${DTG_PREFIX}.ufs.cpld.cpl.r.nc"
+      tgt="${COM_MED_RESTART_PREV}/${DTG_PREFIX}.ufs.cpld.cpl.r.nc"
       if [[ -f "${src}" ]]; then
         ${NCP} "${src}" "${tgt}"
         rc=$?
@@ -126,10 +163,10 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
 
   # Stage ice initial conditions to ROTDIR (warm start)
   if [[ "${DO_ICE:-}" = "YES" ]]; then
-    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
+    RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL
     [[ ! -d "${COM_ICE_RESTART_PREV}" ]] && mkdir -p "${COM_ICE_RESTART_PREV}"
-    src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc"
-    tgt="${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc"
+    src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${DTG_PREFIX}.cice_model.res.nc"
+    tgt="${COM_ICE_RESTART_PREV}/${DTG_PREFIX}.cice_model.res.nc"
     ${NCP} "${src}" "${tgt}"
     rc=$?
     ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
@@ -138,11 +175,11 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
 
   # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc)
   if [[ "${DO_WAVE:-}" = "YES" ]]; then
-    YMD=${PDY} HH=${cyc} generate_com COM_WAVE_RESTART
-    [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}"
+    YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL
+    [[ ! -d "${COM_WAVE_RESTART_PREV}" ]] && mkdir -p "${COM_WAVE_RESTART_PREV}"
     for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust
-      src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${PDY}.${cyc}0000.restart.${grdID}"
-      tgt="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}"
+      src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${DTG_PREFIX}.restart.${grdID}"
+      tgt="${COM_WAVE_RESTART_PREV}/${DTG_PREFIX}.restart.${grdID}"
       ${NCP} "${src}" "${tgt}"
       rc=$?
       ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh
deleted file mode 100755
index ab2c80043e..0000000000
--- a/scripts/run_reg2grb2.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-#requires grib_util module 
-
-MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
-export mask_file="${MOM6REGRID}/fix/reg2grb2/mask.0p25x0p25.grb2"
-
-# offline testing:
-#export DATA=
-#export icefile=$DATA/DATA0p5/icer2012010106.01.2012010100_0p5x0p5.nc
-#export ocnfile=$DATA/DATA0p5/ocnr2012010106.01.2012010100_0p5x0p5.nc
-#export outfile=$DATA/DATA0p5/out/ocnh2012010106.01.2012010100.grb2
-#
-# workflow testing:
-export icefile="icer${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc"
-export ocnfile="ocnr${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc"
-export outfile="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2"
-export outfile0p5="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2"
-
-export mfcstcpl=${mfcstcpl:-1}
-export IGEN_OCNP=${IGEN_OCNP:-197}
-
-# PT This is the forecast date
-export year=${VDATE:0:4}
-export month=${VDATE:4:2}
-export day=${VDATE:6:2}
-export hour=${VDATE:8:2}
-
-# PT This is the initialization date
-export syear=${IDATE:0:4}
-export smonth=${IDATE:4:2}
-export sday=${IDATE:6:2}
-export shour=${IDATE:8:2}
-
-# PT Need to get this from above - could be 6 or 1 hour
-export hh_inc_ocn=6
-#
-# set for 1p0 lat-lon
-#export im=360
-#export jm=181
-# export km=40
-#export imo=360
-#export jmo=181
-#
-# set for 0p5 lat-lon
-#export im=720
-#export jm=361
-#export km=40
-#export imo=720
-#export jmo=361
-#
-# set for 0p25 lat-lon
-export im=1440
-export jm=721
-export imo=1440
-export jmo=721
-export km=40
-
-export flats=-90.
-export flatn=90.
-export flonw=0.0
-export flone=359.75
-
-ln -sf "${mask_file}" ./iceocnpost.g2
-${executable} > "reg2grb2.${VDATE}.${IDATE}.out"
-
-# interpolated from 0p25 to 0p5 grid
-grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000  500000  0"
-${COPYGB2} -g "${grid2p05}" -i0 -x "${outfile}" "${outfile0p5}"
-
diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh
deleted file mode 100755
index 103e9a759e..0000000000
--- a/scripts/run_regrid.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-MOM6REGRID="${MOM6REGRID:-${HOMEgfs}}"
-export EXEC_DIR="${MOM6REGRID}/exec"
-export USH_DIR="${MOM6REGRID}/ush"
-export COMOUTocean="${COM_OCEAN_HISTORY}"
-export COMOUTice="${COM_ICE_HISTORY}"
-export IDATE="${IDATE}"
-export VDATE="${VDATE}"
-export ENSMEM="${ENSMEM}"
-export FHR="${fhr}"
-export DATA="${DATA}"
-export FIXreg2grb2="${FIXreg2grb2}"
-
-###### DO NOT MODIFY BELOW UNLESS YOU KNOW WHAT YOU ARE DOING #######
-#Need NCL module to be loaded:
-echo "${NCARG_ROOT}"
-export NCL="${NCARG_ROOT}/bin/ncl"
-
-ls -alrt
-
-${NCL} "${USH_DIR}/icepost.ncl"
-${NCL} "${USH_DIR}/ocnpost.ncl"
-#####################################################################
-
diff --git a/sorc/build_all.sh b/sorc/build_all.sh
index 23cf420f1d..28f52fd306 100755
--- a/sorc/build_all.sh
+++ b/sorc/build_all.sh
@@ -16,41 +16,57 @@ function _usage() {
 Builds all of the global-workflow components by calling the individual build
   scripts in sequence.
 
-Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v]
+Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-d][-f][-h][-j n][-v][-w]
   -a UFS_app:
     Build a specific UFS app instead of the default
+  -d:
+    Build in debug mode
+  -f:
+    Build the UFS model using the -DFASTER=ON option
   -g:
     Build GSI
   -h:
     Print this help message and exit
   -j:
     Specify maximum number of build jobs (n)
+  -k:
+    Kill all builds if any build fails
   -u:
     Build UFS-DA
   -v:
     Execute all build scripts with -v option to turn on verbose where supported
+  -w:
+    Use structured wave grid
 EOF
   exit 1
 }
 
-script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd)
-cd "${script_dir}" || exit 1
+# shellcheck disable=SC2155
+readonly HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P)
+cd "${HOMEgfs}/sorc" || exit 1
 
 _build_ufs_opt=""
 _build_ufsda="NO"
 _build_gsi="NO"
+_build_debug=""
 _verbose_opt=""
+_wave_opt=""
 _build_job_max=20
+_quick_kill="NO"
 # Reset option counter in case this script is sourced
 OPTIND=1
-while getopts ":a:ghj:uv" option; do
+while getopts ":a:dfghj:kuvw" option; do
   case "${option}" in
     a) _build_ufs_opt+="-a ${OPTARG} ";;
+    f) _build_ufs_opt+="-f ";;
+    d) _build_debug="-d" ;;
     g) _build_gsi="YES" ;;
     h) _usage;;
     j) _build_job_max="${OPTARG} ";;
+    k) _quick_kill="YES" ;;
     u) _build_ufsda="YES" ;;
     v) _verbose_opt="-v";;
+    w) _wave_opt="-w";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       _usage
@@ -64,24 +80,24 @@ done
 
 shift $((OPTIND-1))
 
-logs_dir="${script_dir}/logs"
+logs_dir="${HOMEgfs}/sorc/logs"
 if [[ ! -d "${logs_dir}" ]]; then
   echo "Creating logs folder"
-  mkdir "${logs_dir}" || exit 1
+  mkdir -p "${logs_dir}" || exit 1
 fi
 
 # Check final exec folder exists
-if [[ ! -d "../exec" ]]; then
-  echo "Creating ../exec folder"
-  mkdir ../exec
+if [[ ! -d "${HOMEgfs}/exec" ]]; then
+  echo "Creating ${HOMEgfs}/exec folder"
+  mkdir -p "${HOMEgfs}/exec"
 fi
 
 #------------------------------------
 # GET MACHINE
 #------------------------------------
 export COMPILER="intel"
-source gfs_utils.fd/ush/detect_machine.sh
-source gfs_utils.fd/ush/module-setup.sh
+source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/module-setup.sh"
 if [[ -z "${MACHINE_ID}" ]]; then
   echo "FATAL: Unable to determine target machine"
   exit 1
@@ -113,43 +129,39 @@ declare -A build_opts
 big_jobs=0
 build_jobs["ufs"]=8
 big_jobs=$((big_jobs+1))
-build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}"
+build_opts["ufs"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}"
 
-build_jobs["upp"]=6     # The UPP is hardcoded to use 6 cores
-build_opts["upp"]=""
+build_jobs["upp"]=2
+build_opts["upp"]="${_build_debug}"
 
-build_jobs["ufs_utils"]=3
-build_opts["ufs_utils"]="${_verbose_opt}"
+build_jobs["ufs_utils"]=2
+build_opts["ufs_utils"]="${_verbose_opt} ${_build_debug}"
 
 build_jobs["gfs_utils"]=1
-build_opts["gfs_utils"]="${_verbose_opt}"
+build_opts["gfs_utils"]="${_verbose_opt} ${_build_debug}"
 
-build_jobs["ww3prepost"]=3
-build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}"
+build_jobs["ww3prepost"]=2
+build_opts["ww3prepost"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}"
 
 # Optional DA builds
 if [[ "${_build_ufsda}" == "YES" ]]; then
-   if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" ]]; then
+   if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" && "${MACHINE_ID}" != "wcoss2" ]]; then
       echo "NOTE: The GDAS App is not supported on ${MACHINE_ID}.  Disabling build."
    else
       build_jobs["gdas"]=8
       big_jobs=$((big_jobs+1))
-      build_opts["gdas"]="${_verbose_opt}"
+      build_opts["gdas"]="${_verbose_opt} ${_build_debug}"
    fi
 fi
 if [[ "${_build_gsi}" == "YES" ]]; then
    build_jobs["gsi_enkf"]=8
-   build_opts["gsi_enkf"]="${_verbose_opt}"
+   build_opts["gsi_enkf"]="${_verbose_opt} ${_build_debug}"
 fi
 if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then
-   build_jobs["gsi_utils"]=2
-   build_opts["gsi_utils"]="${_verbose_opt}"
-   if [[ "${MACHINE_ID}" == "hercules" ]]; then
-      echo "NOTE: The GSI Monitor is not supported on Hercules.  Disabling build."
-   else
-      build_jobs["gsi_monitor"]=1
-      build_opts["gsi_monitor"]="${_verbose_opt}"
-   fi
+   build_jobs["gsi_utils"]=1
+   build_opts["gsi_utils"]="${_verbose_opt} ${_build_debug}"
+   build_jobs["gsi_monitor"]=1
+   build_opts["gsi_monitor"]="${_verbose_opt} ${_build_debug}"
 fi
 
 # Go through all builds and adjust CPU counts down if necessary
@@ -171,7 +183,7 @@ echo "Building ${build_list}"
 
 # Go through all builds and adjust CPU counts up if possible
 if [[ ${requested_cpus} -lt ${_build_job_max} && ${big_jobs} -gt 0 ]]; then
-   # Add cores to the gdas, ufs, and gsi build jobs
+   # Add cores to the gdas and ufs build jobs
    extra_cores=$(( _build_job_max - requested_cpus ))
    extra_cores=$(( extra_cores / big_jobs ))
    for build in "${!build_jobs[@]}"; do
@@ -184,6 +196,31 @@ fi
 procs_in_use=0
 declare -A build_ids
 
+check_builds()
+{
+   for chk_build in "${!build_jobs[@]}"; do
+      # Check if the build is complete and if so what the status was
+      if [[ -n "${build_ids[${chk_build}]+0}" ]]; then
+         if ! ps -p "${build_ids[${chk_build}]}" > /dev/null; then
+            wait "${build_ids[${chk_build}]}"
+            build_stat=$?
+            if [[ ${build_stat} != 0 ]]; then
+               echo "build_${chk_build}.sh failed!  Exiting!"
+               echo "Check logs/build_${chk_build}.log for details."
+               echo "logs/build_${chk_build}.log" > "${HOMEgfs}/sorc/logs/error.logs"
+               for kill_build in "${!build_jobs[@]}"; do
+                  if [[ -n "${build_ids[${kill_build}]+0}" ]]; then
+                     pkill -P "${build_ids[${kill_build}]}"
+                  fi
+               done
+               return "${build_stat}"
+            fi
+         fi
+      fi
+   done
+   return 0
+}
+
 builds_started=0
 # Now start looping through all of the jobs until everything is done
 while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do
@@ -192,13 +229,10 @@ while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do
       if [[ -n "${build_jobs[${build}]+0}" && -z "${build_ids[${build}]+0}" ]]; then
          # Do we have enough processors to run it?
          if [[ ${_build_job_max} -ge $(( build_jobs[build] + procs_in_use )) ]]; then
-            if [[ "${build}" != "upp" ]]; then
-               "./build_${build}.sh" -j "${build_jobs[${build}]}" "${build_opts[${build}]:-}" > \
-                  "${logs_dir}/build_${build}.log" 2>&1 &
-            else
-               "./build_${build}.sh" "${build_opts[${build}]}" > \
-                  "${logs_dir}/build_${build}.log" 2>&1 &
-            fi
+            # double-quoting build_opts here will not work since it is a string of options
+            #shellcheck disable=SC2086
+            "./build_${build}.sh" ${build_opts[${build}]:-} -j "${build_jobs[${build}]}" > \
+               "${logs_dir}/build_${build}.log" 2>&1 &
             build_ids["${build}"]=$!
             echo "Starting build_${build}.sh"
             procs_in_use=$(( procs_in_use + build_jobs[${build}] ))
@@ -222,11 +256,31 @@ while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do
       fi
    done
 
+   # If requested, check if any build has failed and exit if so
+   if [[ "${_quick_kill}" == "YES" ]]; then
+      check_builds
+      build_stat=$?
+      if (( build_stat != 0 )); then
+         exit "${build_stat}"
+      fi
+   fi
+
    sleep 5s
 done
 
+
 # Wait for all jobs to complete and check return statuses
-while [[ ${#build_jobs[@]} -gt 0 ]]; do
+while [[ "${#build_jobs[@]}" -gt 0 ]]; do
+
+   # If requested, check if any build has failed and exit if so
+   if [[ "${_quick_kill}" == "YES" ]]; then
+      check_builds
+      build_stat=$?
+      if [[ ${build_stat} != 0 ]]; then
+         exit "${build_stat}"
+      fi
+   fi
+
    for build in "${!build_jobs[@]}"; do
       # Test if each job is complete and if so, notify and remove from the array
       if [[ -n "${build_ids[${build}]+0}" ]]; then
diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh
index b1a17c33dd..43c503ab4d 100755
--- a/sorc/build_gdas.sh
+++ b/sorc/build_gdas.sh
@@ -2,11 +2,12 @@
 set -eux
 
 OPTIND=1
+_opts="-f "  # forces a clean build
 while getopts ":j:dv" option; do
   case "${option}" in
-    d) export BUILD_TYPE="DEBUG";;
-    j) export BUILD_JOBS=${OPTARG};;
-    v) export BUILD_VERBOSE="YES";;
+    d) _opts+="-c -DCMAKE_BUILD_TYPE=Debug " ;;
+    j) BUILD_JOBS=${OPTARG};;
+    v) _opts+="-v ";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -19,12 +20,10 @@ while getopts ":j:dv" option; do
 done
 shift $((OPTIND-1))
 
-# TODO: GDASApp does not presently handle BUILD_TYPE
-
-BUILD_TYPE=${BUILD_TYPE:-"Release"} \
-BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
+# double quoting opts will not work since it is a string of options
+# shellcheck disable=SC2086
 BUILD_JOBS="${BUILD_JOBS:-8}" \
 WORKFLOW_BUILD="ON" \
-./gdas.cd/build.sh
+./gdas.cd/build.sh ${_opts} -f
 
 exit
diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh
index 09bd4a9656..e53f71ddcd 100755
--- a/sorc/build_gfs_utils.sh
+++ b/sorc/build_gfs_utils.sh
@@ -18,14 +18,12 @@ EOF
   exit 1
 }
 
-cwd=$(pwd)
-
 OPTIND=1
 while getopts ":j:dvh" option; do
   case "${option}" in
-    d) export BUILD_TYPE="DEBUG";;
-    v) export BUILD_VERBOSE="YES";;
-    j) export BUILD_JOBS="${OPTARG}";;
+    d) BUILD_TYPE="Debug";;
+    v) BUILD_VERBOSE="YES";;
+    j) BUILD_JOBS="${OPTARG}";;
     h)
       usage
       ;;
@@ -44,6 +42,6 @@ shift $((OPTIND-1))
 BUILD_TYPE=${BUILD_TYPE:-"Release"} \
 BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
 BUILD_JOBS=${BUILD_JOBS:-8} \
-"${cwd}/gfs_utils.fd/ush/build.sh"
+"./gfs_utils.fd/ush/build.sh"
 
 exit
diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh
index 9ba278e3ec..ba24cefa81 100755
--- a/sorc/build_gsi_enkf.sh
+++ b/sorc/build_gsi_enkf.sh
@@ -4,9 +4,9 @@ set -eux
 OPTIND=1
 while getopts ":j:dv" option; do
   case "${option}" in
-    d) export BUILD_TYPE="DEBUG";;
-    j) export BUILD_JOBS="${OPTARG}";;
-    v) export BUILD_VERBOSE="YES";;
+    d) BUILD_TYPE="Debug";;
+    j) BUILD_JOBS="${OPTARG}";;
+    v) BUILD_VERBOSE="YES";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh
index 3de1262aac..31add1882a 100755
--- a/sorc/build_gsi_monitor.sh
+++ b/sorc/build_gsi_monitor.sh
@@ -1,14 +1,12 @@
 #! /usr/bin/env bash
 set -eux
 
-cwd=$(pwd)
-
 OPTIND=1
 while getopts ":j:dv" option; do
   case "${option}" in
-    d) export BUILD_TYPE="DEBUG";;
-    j) export BUILD_JOBS="${OPTARG}";;
-    v) export BUILD_VERBOSE="YES";;
+    d) BUILD_TYPE="Debug";;
+    j) BUILD_JOBS="${OPTARG}";;
+    v) BUILD_VERBOSE="YES";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -24,6 +22,6 @@ shift $((OPTIND-1))
 BUILD_TYPE=${BUILD_TYPE:-"Release"} \
 BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
 BUILD_JOBS=${BUILD_JOBS:-8} \
-"${cwd}/gsi_monitor.fd/ush/build.sh"
+"./gsi_monitor.fd/ush/build.sh"
 
 exit
diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh
index 81eab0f628..58c64e6e4a 100755
--- a/sorc/build_gsi_utils.sh
+++ b/sorc/build_gsi_utils.sh
@@ -1,14 +1,12 @@
 #! /usr/bin/env bash
 set -eux
 
-cwd=$(pwd)
-
 OPTIND=1
 while getopts ":j:dv" option; do
   case "${option}" in
-    d) export BUILD_TYPE="DEBUG";;
-    j) export BUILD_JOBS="${OPTARG}";;
-    v) export BUILD_VERBOSE="YES";;
+    d) BUILD_TYPE="Debug";;
+    j) BUILD_JOBS="${OPTARG}";;
+    v) BUILD_VERBOSE="YES";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -25,6 +23,6 @@ BUILD_TYPE=${BUILD_TYPE:-"Release"} \
 BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
 BUILD_JOBS=${BUILD_JOBS:-8} \
 UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \
-"${cwd}/gsi_utils.fd/ush/build.sh"
+"./gsi_utils.fd/ush/build.sh"
 
 exit
diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh
index 59914d6b09..8707cf7bf1 100755
--- a/sorc/build_ufs.sh
+++ b/sorc/build_ufs.sh
@@ -5,14 +5,17 @@ cwd=$(pwd)
 
 # Default settings
 APP="S2SWA"
-CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_GFS_v17_p8_ugwpv1_c3,FV3_GFS_v17_p8_ugwpv1_c3_mynn,FV3_GFS_v17_p8_ugwpv1_mynn"  # TODO: does the g-w need to build with all these CCPP_SUITES?
+CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1_c3_mynn,FV3_GFS_v17_p8_ugwpv1_mynn"  # TODO: does the g-w need to build with all these CCPP_SUITES?
+PDLIB="ON"
 
-while getopts ":da:j:v" option; do
+while getopts ":da:fj:vw" option; do
   case "${option}" in
-    d) BUILD_TYPE="DEBUG";;
+    d) BUILD_TYPE="Debug";;
     a) APP="${OPTARG}";;
+    f) FASTER="ON";;
     j) BUILD_JOBS="${OPTARG}";;
     v) export BUILD_VERBOSE="YES";;
+    w) PDLIB="OFF";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       ;;
@@ -28,13 +31,18 @@ source "./tests/detect_machine.sh"
 source "./tests/module-setup.sh"
 
 MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}"
-[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON"
+[[ ${PDLIB:-"OFF"} = "ON" ]] && MAKE_OPT+=" -DPDLIB=ON"
+if [[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] ; then
+    MAKE_OPT+=" -DDEBUG=ON"
+elif [[ "${FASTER:-OFF}" == ON ]] ; then
+    MAKE_OPT+=" -DFASTER=ON"
+fi
 COMPILE_NR=0
 CLEAN_BEFORE=YES
 CLEAN_AFTER=NO
 
 if [[ "${MACHINE_ID}" != "noaacloud" ]]; then
-  ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}"
+  BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}"
   mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x
   mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua
   cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua
diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh
index e78ca3c180..63ec56cb41 100755
--- a/sorc/build_ufs_utils.sh
+++ b/sorc/build_ufs_utils.sh
@@ -4,8 +4,9 @@ set -eux
 OPTIND=1
 while getopts ":j:dv" option; do
   case "${option}" in
-    j) export BUILD_JOBS="${OPTARG}";;
-    v) export BUILD_VERBOSE="YES";;
+    d) BUILD_TYPE="Debug" ;;
+    j) BUILD_JOBS="${OPTARG}";;
+    v) BUILD_VERBOSE="YES";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -18,13 +19,11 @@ while getopts ":j:dv" option; do
 done
 shift $((OPTIND-1))
 
-script_dir=$(dirname "${BASH_SOURCE[0]}")
-cd "${script_dir}/ufs_utils.fd" || exit 1
-
 CMAKE_OPTS="-DGFS=ON" \
+BUILD_TYPE=${BUILD_TYPE:-"Release"} \
 BUILD_JOBS=${BUILD_JOBS:-8} \
 BUILD_VERBOSE=${BUILD_VERBOSE:-} \
-./build_all.sh
+./ufs_utils.fd/build_all.sh
 
 exit
 
diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh
index a55e96ebc8..e217e171db 100755
--- a/sorc/build_upp.sh
+++ b/sorc/build_upp.sh
@@ -6,25 +6,26 @@ cd "${script_dir}" || exit 1
 
 OPTIND=1
 _opts=""
-while getopts ":dv" option; do
-	case "${option}" in
-		d) _opts+="-d ";;
-		v) _opts+="-v ";;
-		:)
-			echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
-			;;
-		*)
-			echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}"
-			;;
-	esac
+while getopts ":dj:v" option; do
+  case "${option}" in
+    d) _opts+="-d " ;;
+    j) BUILD_JOBS="${OPTARG}" ;;
+    v) _opts+="-v ";;
+    :)
+      echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
+      ;;
+    *)
+      echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}"
+      ;;
+  esac
 done
 shift $((OPTIND-1))
 
 # Check final exec folder exists
 if [[ ! -d "../exec" ]]; then
-  mkdir ../exec
+  mkdir -p ../exec
 fi
 
 cd ufs_model.fd/FV3/upp/tests
 # shellcheck disable=SC2086
-./compile_upp.sh ${_opts}
+BUILD_JOBS=${BUILD_JOBS:-8} ./compile_upp.sh ${_opts}
diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh
index 919afaacb3..67ee5e1dc2 100755
--- a/sorc/build_ww3prepost.sh
+++ b/sorc/build_ww3prepost.sh
@@ -6,12 +6,15 @@ cd "${script_dir}" || exit 1
 
 # Default settings
 APP="S2SWA"
+PDLIB="ON"
 
-while getopts ":j:a:v" option; do
+while getopts ":j:a:dvw" option; do
   case "${option}" in
     a) APP="${OPTARG}";;
+    d) BUILD_TYPE="Debug";;
     j) BUILD_JOBS="${OPTARG}";;
     v) export BUILD_VERBOSE="YES";;
+    w) PDLIB="OFF";;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -23,15 +26,17 @@ while getopts ":j:a:v" option; do
   esac
 done
 
-
 # Determine which switch to use
 if [[ "${APP}" == "ATMW" ]]; then
   ww3switch="model/esmf/switch"
 else
-  ww3switch="model/bin/switch_meshcap"
+  if [[ "${PDLIB}" == "ON" ]]; then
+    ww3switch="model/bin/switch_meshcap_pdlib"
+  else
+    ww3switch="model/bin/switch_meshcap"
+  fi
 fi
 
-
 # Check final exec folder exists
 if [[ ! -d "../exec" ]]; then
   mkdir ../exec
@@ -64,6 +69,8 @@ mkdir -p "${path_build}" || exit 1
 cd "${path_build}" || exit 1
 echo "Forcing a SHRD build"
 
+buildswitch="${path_build}/switch"
+
 cat "${SWITCHFILE}" > "${path_build}/tempswitch"
 
 sed -e "s/DIST/SHRD/g"\
@@ -73,15 +80,22 @@ sed -e "s/DIST/SHRD/g"\
     -e "s/MPI / /g"\
     -e "s/B4B / /g"\
     -e "s/PDLIB / /g"\
+    -e "s/SCOTCH / /g"\
+    -e "s/METIS / /g"\
     -e "s/NOGRB/NCEP2/g"\
        "${path_build}/tempswitch" > "${path_build}/switch"
 rm "${path_build}/tempswitch"
 
-echo "Switch file is ${path_build}/switch with switches:"
-cat "${path_build}/switch"
+echo "Switch file is ${buildswitch} with switches:"
+cat "${buildswitch}"
+
+#define cmake build options
+MAKE_OPT="-DCMAKE_INSTALL_PREFIX=install"
+[[ ${BUILD_TYPE:-"Release"} = "Debug" ]] && MAKE_OPT+=" -DCMAKE_BUILD_TYPE=Debug"
 
 #Build executables:
-cmake "${WW3_DIR}" -DSWITCH="${path_build}/switch" -DCMAKE_INSTALL_PREFIX=install
+# shellcheck disable=SC2086
+cmake "${WW3_DIR}" -DSWITCH="${buildswitch}" ${MAKE_OPT}
 rc=$?
 if (( rc != 0 )); then
   echo "Fatal error in cmake."
diff --git a/sorc/gdas.cd b/sorc/gdas.cd
index f44a6d500d..52f41a298b 160000
--- a/sorc/gdas.cd
+++ b/sorc/gdas.cd
@@ -1 +1 @@
-Subproject commit f44a6d500dda2aba491e4fa12c0bee428ddb7b80
+Subproject commit 52f41a298b4c6b7bbf6f203b6579516819fbbf36
diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd
index c94bc72ff4..529bb796be 160000
--- a/sorc/gsi_enkf.fd
+++ b/sorc/gsi_enkf.fd
@@ -1 +1 @@
-Subproject commit c94bc72ff410b48c325abbfe92c9fcb601d89aed
+Subproject commit 529bb796bea0e490f186729cd168a91c034bb12d
diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd
index ae256c0d69..e1f9f21af1 160000
--- a/sorc/gsi_monitor.fd
+++ b/sorc/gsi_monitor.fd
@@ -1 +1 @@
-Subproject commit ae256c0d69df3232ee9dd3e81b176bf2c3cda312
+Subproject commit e1f9f21af16ce912fdc2cd75c5b27094a550a0c5
diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd
index 90481d9618..9382fd01c2 160000
--- a/sorc/gsi_utils.fd
+++ b/sorc/gsi_utils.fd
@@ -1 +1 @@
-Subproject commit 90481d961854e4412ecac49991721e6e63d4b82e
+Subproject commit 9382fd01c2a626c8934c3f553d420a45de2b4dec
diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh
index ed33b17e72..799a5724b4 100755
--- a/sorc/link_workflow.sh
+++ b/sorc/link_workflow.sh
@@ -10,7 +10,7 @@ function usage() {
 Builds all of the global-workflow components by calling the individual build
   scripts in sequence.
 
-Usage: ${BASH_SOURCE[0]} [-h][-o]
+Usage: ${BASH_SOURCE[0]} [-h][-o][--nest]
   -h:
     Print this help message and exit
   -o:
@@ -23,12 +23,17 @@ RUN_ENVIR="emc"
 
 # Reset option counter in case this script is sourced
 OPTIND=1
-while getopts ":ho" option; do
+while getopts ":ho-:" option; do
   case "${option}" in
     h) usage ;;
     o)
       echo "-o option received, configuring for NCO"
       RUN_ENVIR="nco";;
+    -)
+      if [[ "${OPTARG}" == "nest" ]]; then
+        LINK_NEST=ON
+      fi
+      ;;
     :)
       echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
       usage
@@ -70,6 +75,7 @@ case "${machine}" in
   "hercules") FIX_DIR="/work/noaa/global/glopara/fix" ;;
   "jet")      FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;;
   "s4")       FIX_DIR="/data/prod/glopara/fix" ;;
+  "gaea")     FIX_DIR="/gpfs/f5/ufs-ard/world-shared/global/glopara/data/fix" ;;
   *)
     echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR"
     exit 1
@@ -79,11 +85,25 @@ esac
 # Source fix version file
 source "${HOMEgfs}/versions/fix.ver"
 
-# Link wxflow in ush/python, workflow and ci/scripts
+# Link python pacakges in ush/python
+# TODO: This will be unnecessary when these are part of the virtualenv
+packages=("wxflow")
+for package in "${packages[@]}"; do
+    cd "${HOMEgfs}/ush/python" || exit 1
+    [[ -s "${package}" ]] && rm -f "${package}"
+    ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" .
+done
+
+# Link GDASapp python packages in ush/python
+packages=("jcb")
+for package in "${packages[@]}"; do
+    cd "${HOMEgfs}/ush/python" || exit 1
+    [[ -s "${package}" ]] && rm -f "${package}"
+    ${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" .
+done
+
+# Link wxflow in workflow and ci/scripts
 # TODO: This will be unnecessary when wxflow is part of the virtualenv
-cd "${HOMEgfs}/ush/python" || exit 1
-[[ -s "wxflow" ]] && rm -f wxflow
-${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" .
 cd "${HOMEgfs}/workflow" || exit 1
 [[ -s "wxflow" ]] && rm -f wxflow
 ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" .
@@ -91,7 +111,6 @@ cd "${HOMEgfs}/ci/scripts" || exit 1
 [[ -s "wxflow" ]] && rm -f wxflow
 ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" .
 
-
 # Link fix directories
 if [[ -n "${FIX_DIR}" ]]; then
   if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi
@@ -107,7 +126,6 @@ for dir in aer \
             lut \
             mom6 \
             orog \
-            reg2grb2 \
             sfc_climo \
             ugwd \
             verif \
@@ -120,7 +138,20 @@ do
   fix_ver="${dir}_ver"
   ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}"
 done
-
+# global-nest uses different versions of orog and ugwd
+if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then
+  for dir in orog \
+             ugwd
+  do
+    nestdir=${dir}_nest
+    if [[ -d "${nestdir}" ]]; then
+      [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${nestdir}"
+      rm -rf "${nestdir}"
+    fi
+    fix_ver="${dir}_nest_ver"
+    ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${nestdir}"
+  done
+fi
 
 if [[ -d "${HOMEgfs}/sorc/ufs_utils.fd" ]]; then
   cd "${HOMEgfs}/sorc/ufs_utils.fd/fix" || exit 1
@@ -136,45 +167,70 @@ cd "${HOMEgfs}/parm/ufs" || exit 1
 ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/noahmptable.tbl" .
 
 cd "${HOMEgfs}/parm/post" || exit 1
-for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \
-    postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \
-    postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt \
-    postxconfig-NT-GFS.txt postxconfig-NT-gefs-aerosol.txt postxconfig-NT-gefs-chem.txt params_grib2_tbl_new \
-    post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat \
-    AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat
+for file in postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GEFS-WAFS.txt \
+    postxconfig-NT-GEFS-F00-aerosol.txt postxconfig-NT-GEFS-aerosol.txt \
+    postxconfig-NT-GFS-ANL.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt \
+    postxconfig-NT-GFS.txt postxconfig-NT-GFS-FLUX.txt postxconfig-NT-GFS-GOES.txt \
+    postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-TWO.txt \
+    params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat
 do
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${file}" .
 done
+for file in optics_luts_DUST.dat optics_luts_DUST_nasa.dat optics_luts_NITR_nasa.dat \
+    optics_luts_SALT.dat optics_luts_SALT_nasa.dat optics_luts_SOOT.dat optics_luts_SOOT_nasa.dat \
+    optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat
+do
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/chem/${file}" .
+done
+for file in ice.csv ocean.csv ocnicepost.nml.jinja2
+do
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/parm/ocnicepost/${file}" .
+done
 
 cd "${HOMEgfs}/scripts" || exit 8
 ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" .
+if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
+  declare -a gdas_scripts=(exglobal_prep_ocean_obs.py \
+                           exgdas_global_marine_analysis_ecen.py \
+                           )
+  for gdas_script in "${gdas_scripts[@]}" ; do
+    ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/${gdas_script}" .
+  done
+fi
 cd "${HOMEgfs}/ush" || exit 8
 for file in emcsfc_ice_blend.sh global_cycle_driver.sh emcsfc_snow.sh global_cycle.sh; do
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/ush/${file}" .
 done
-for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
+for file in make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/ush/${file}" .
 done
 
-# TODO: Link these ufs.configure templates from ufs-weather-model
-#cd "${HOMEgfs}/parm/ufs" || exit 1
-#declare -a ufs_configure_files=("ufs.configure.atm.IN" \
-#                                 "ufs.configure.atm_aero.IN" \
-#                                 "ufs.configure.atmw.IN" \
-#                                 "ufs.configure.blocked_atm_wav_2way.IN" \
-#                                 "ufs.configure.blocked_atm_wav.IN" \
-#                                 "ufs.configure.cpld_agrid.IN" \
-#                                 "ufs.configure.cpld_esmfthreads.IN" \
-#                                 "ufs.configure.cpld.IN" \
-#                                 "ufs.configure.cpld_noaero.IN" \
-#                                 "ufs.configure.cpld_noaero_nowave.IN" \
-#                                 "ufs.configure.cpld_noaero_outwav.IN" \
-#                                 "ufs.configure.leapfrog_atm_wav.IN")
-#for file in "${ufs_configure_files[@]}"; do
-#  [[ -s "${file}" ]] && rm -f "${file}"
-#  ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" .
-#done
+# Link these templates from ufs-weather-model
+cd "${HOMEgfs}/parm/ufs" || exit 1
+declare -a ufs_templates=("model_configure.IN" "model_configure_nest.IN"\
+                          "MOM_input_025.IN" "MOM_input_050.IN" "MOM_input_100.IN" "MOM_input_500.IN" \
+                          "MOM6_data_table.IN" \
+                          "ice_in.IN" \
+                          "ufs.configure.atm.IN" \
+                          "ufs.configure.atm_esmf.IN" \
+                          "ufs.configure.atmaero.IN" \
+                          "ufs.configure.atmaero_esmf.IN" \
+                          "ufs.configure.s2s.IN" \
+                          "ufs.configure.s2s_esmf.IN" \
+                          "ufs.configure.s2sa.IN" \
+                          "ufs.configure.s2sa_esmf.IN" \
+                          "ufs.configure.s2sw.IN" \
+                          "ufs.configure.s2sw_esmf.IN" \
+                          "ufs.configure.s2swa.IN" \
+                          "ufs.configure.s2swa_esmf.IN" \
+                          "ufs.configure.leapfrog_atm_wav.IN" \
+                          "ufs.configure.leapfrog_atm_wav_esmf.IN" )
+for file in "${ufs_templates[@]}"; do
+  [[ -s "${file}" ]] && rm -f "${file}"
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" .
+done
 
+# Link the script from ufs-weather-model that parses the templates
 cd "${HOMEgfs}/ush" || exit 1
 [[ -s "atparse.bash" ]] && rm -f "atparse.bash"
 ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/atparse.bash" .
@@ -187,7 +243,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
   cd "${HOMEgfs}/fix" || exit 1
   [[ ! -d gdas ]] && mkdir -p gdas
   cd gdas || exit 1
-  for gdas_sub in fv3jedi gsibec; do
+  for gdas_sub in fv3jedi gsibec obs soca; do
     if [[ -d "${gdas_sub}" ]]; then
        rm -rf "${gdas_sub}"
     fi
@@ -196,16 +252,38 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
   done
 fi
 
+#------------------------------
+#--add GDASApp parm directory
+#------------------------------
+if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
+  cd "${HOMEgfs}/parm/gdas" || exit 1
+  declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca" "jcb-gdas" "jcb-algorithms")
+  for comp in "${gdasapp_comps[@]}"; do
+    [[ -d "${comp}" ]] && rm -rf "${comp}"
+    ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" .
+  done
+fi
+
 #------------------------------
 #--add GDASApp files
 #------------------------------
 if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then
   cd "${HOMEgfs}/ush" || exit 1
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/soca"                              .
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ufsda"                              .
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py"                     .
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_json.py"    .
+  ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_yaml.py"    .
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py"    .
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/imsfv3_scf2ioda.py"           .
-  ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/exglobal_prep_ocean_obs.py"           .
+  declare -a gdasapp_ocn_insitu_profile_platforms=("argo" "bathy" "glider" "marinemammal" "tesac" "xbtctd")
+  for platform in "${gdasapp_ocn_insitu_profile_platforms[@]}"; do
+    ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_${platform}.py" .
+  done
+  declare -a gdasapp_ocn_insitu_sfc_platforms=("altkob" "trkob")
+  for platform in "${gdasapp_ocn_insitu_sfc_platforms[@]}"; do
+    ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_surface_${platform}.py" .
+  done
 fi
 
 
@@ -242,8 +320,9 @@ if [[ ! -d "${HOMEgfs}/exec" ]]; then mkdir "${HOMEgfs}/exec" || exit 1 ; fi
 cd "${HOMEgfs}/exec" || exit 1
 
 for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \
-  syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x \
-  mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x
+  syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x rdbfmsua.x \
+  mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x ocnicepost.x webtitle.x \
+  ensadd.x ensppf.x ensstat.x wave_stat.x
 do
   [[ -s "${utilexe}" ]] && rm -f "${utilexe}"
   ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/install/bin/${utilexe}" .
@@ -291,28 +370,17 @@ fi
 
 # GDASApp
 if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then
-  declare -a JEDI_EXE=("fv3jedi_addincrement.x" \
-                       "fv3jedi_diffstates.x" \
-                       "fv3jedi_ensvariance.x" \
-                       "fv3jedi_hofx.x" \
-                       "fv3jedi_var.x" \
-                       "fv3jedi_convertincrement.x" \
-                       "fv3jedi_dirac.x" \
-                       "fv3jedi_error_covariance_training.x" \
-                       "fv3jedi_letkf.x" \
-                       "fv3jedi_convertstate.x" \
-                       "fv3jedi_eda.x" \
-                       "fv3jedi_forecast.x" \
+  declare -a JEDI_EXE=("gdas.x" \
+                       "gdas_soca_gridgen.x" \
+                       "gdas_soca_error_covariance_toolbox.x" \
+                       "gdas_soca_setcorscales.x" \
+                       "gdas_soca_diagb.x" \
                        "fv3jedi_plot_field.x" \
-                       "fv3jedi_data_checker.py" \
-                       "fv3jedi_enshofx.x" \
-                       "fv3jedi_hofx_nomodel.x" \
-                       "fv3jedi_testdata_downloader.py" \
-                       "soca_convertincrement.x" \
-                       "soca_error_covariance_training.x" \
-                       "soca_setcorscales.x" \
-                       "soca_gridgen.x" \
-                       "soca_var.x" \
+                       "fv3jedi_fv3inc.x" \
+                       "gdas_ens_handler.x" \
+                       "gdas_incr_handler.x" \
+                       "gdas_obsprovider2ioda.x" \
+                       "gdas_socahybridweights.x" \
                        "bufr2ioda.x" \
                        "calcfIMS.exe" \
                        "apply_incr.exe" )
@@ -397,7 +465,6 @@ for prog in enkf_chgres_recenter_nc.fd \
   mkgfsawps.fd \
   overgridid.fd \
   rdbfmsua.fd \
-  reg2grb2.fd \
   supvit.fd \
   syndat_getjtbul.fd \
   syndat_maksynrc.fd \
@@ -405,7 +472,8 @@ for prog in enkf_chgres_recenter_nc.fd \
   tave.fd \
   tocsbufr.fd \
   vint.fd \
-  webtitle.fd
+  webtitle.fd \
+  ocnicepost.fd
 do
   if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi
   ${LINK_OR_COPY} "gfs_utils.fd/src/${prog}" .
diff --git a/sorc/ncl.setup b/sorc/ncl.setup
deleted file mode 100644
index b4981689db..0000000000
--- a/sorc/ncl.setup
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-set +x
-case ${target} in
-  'jet'|'hera')
-    module load ncl/6.5.0
-    export NCARG_LIB=${NCARG_ROOT}/lib
-  ;;
-  *)
-    echo "[${BASH_SOURCE[0]}]: unknown ${target}"
-  ;;
-esac
diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd
index c267780a12..92904d2c43 160000
--- a/sorc/verif-global.fd
+++ b/sorc/verif-global.fd
@@ -1 +1 @@
-Subproject commit c267780a1255fa7db052c745cf9c78b7dc6a2695
+Subproject commit 92904d2c431969345968f74e676717057ec0042a
diff --git a/sorc/wxflow b/sorc/wxflow
index 528f5abb49..d314e06510 160000
--- a/sorc/wxflow
+++ b/sorc/wxflow
@@ -1 +1 @@
-Subproject commit 528f5abb49e80751f83ebd6eb0a87bc70012bb24
+Subproject commit d314e065101041a4d45e5a11ec19cd2dc5f38c67
diff --git a/test/f90nmlcmp.sh b/test/f90nmlcmp.sh
new file mode 100755
index 0000000000..0acb2b711c
--- /dev/null
+++ b/test/f90nmlcmp.sh
@@ -0,0 +1,19 @@
+#! /usr/bin/env bash
+
+# Compare two F90 namelists (forward and backward)
+
+set -eu
+
+# shellcheck disable=SC2155,SC2312
+HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P)
+declare -rx HOMEgfs
+
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" 1>/dev/null 2>&1
+
+file1=${1:?}
+file2=${2:?}
+
+"${HOMEgfs}/ush/compare_f90nml.py" "${file1}" "${file2}"
+echo " "
+"${HOMEgfs}/ush/compare_f90nml.py" "${file2}" "${file1}"
+echo " "
diff --git a/test/g2cmp.sh b/test/g2cmp.sh
new file mode 100755
index 0000000000..c31d10dd62
--- /dev/null
+++ b/test/g2cmp.sh
@@ -0,0 +1,20 @@
+#! /usr/bin/env bash
+
+# Compare two grib2 files with wgrib2
+# The files must have the same fields in the same order
+
+set -eu
+
+# shellcheck disable=SC2155,SC2312
+HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P)
+declare -rx HOMEgfs
+
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" 1>/dev/null 2>&1
+
+file1=${1:?}
+file2=${2:?}
+
+# Use wgrib2 to compute correlations and print any record that does not have corr=1 for mismatch
+#shellcheck disable=SC2312
+wgrib2 "${file2}" -var -lev -rpn "sto_1" -import_grib "${file1}" -rpn "rcl_1:print_corr:print_rms" | grep -v "rpn_corr=1"
+
diff --git a/test/nccmp.sh b/test/nccmp.sh
new file mode 100755
index 0000000000..b412cdc388
--- /dev/null
+++ b/test/nccmp.sh
@@ -0,0 +1,15 @@
+#! /usr/bin/env bash
+
+set -eu
+
+# shellcheck disable=SC2155,SC2312
+HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P)
+declare -rx HOMEgfs
+
+source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" 1>/dev/null 2>&1
+module load "nccmp/${nccmp_ver:-"1.9.0.1"}"
+
+file1=${1:?}
+file2=${2:?}
+
+nccmp -d -S -f -B --warn=format "${file1}" "${file2}"
diff --git a/ush/atmos_ensstat.sh b/ush/atmos_ensstat.sh
new file mode 100755
index 0000000000..17981a8c3e
--- /dev/null
+++ b/ush/atmos_ensstat.sh
@@ -0,0 +1,99 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+grid=${1}
+fhr3=${2}
+grid_type=${3:-''}
+
+mkdir "${grid}${grid_type}"
+cd "${grid}${grid_type}" || exit 2
+
+# Collect input grib files
+input_files=()
+for ((mem_num = 0; mem_num <= "${NMEM_ENS:-0}"; mem_num++)); do
+    mem=$(printf "%03d" "${mem_num}")
+    MEMDIR="mem${mem}" GRID="${grid}" YMD="${PDY}" HH="${cyc}" declare_from_tmpl COMIN_ATMOS_GRIB:COM_ATMOS_GRIB_GRID_TMPL
+    memfile_in="${COMIN_ATMOS_GRIB}/${RUN}.t${cyc}z.pgrb2${grid_type}.${grid}.f${fhr3}"
+
+    if [[ -r "${memfile_in}.idx" ]]; then
+        ${NLN} "${memfile_in}" "mem${mem}"
+        input_files+=("mem${mem}")
+    else
+        echo "FATAL ERROR: ${memfile_in} does not exist"
+        exit 10
+    fi
+done
+
+num_found=${#input_files[@]}
+if (( num_found != NMEM_ENS + 1 )); then
+    echo "FATAL ERROR: Only ${num_found} grib files found out of $(( NMEM_ENS + 1 )) expected members."
+    exit 10
+fi
+
+# Create namelist for ensstat
+mean_out="${RUN}.t${cyc}z.mean.pres_${grid_type}.${grid}.f${fhr3}.grib2"
+spr_out="${RUN}.t${cyc}z.spread.pres_${grid_type}.${grid}.f${fhr3}.grib2"
+
+cat << EOF > input.nml
+&namdim
+    lfdim=${lfm:-''}
+/
+
+&namens
+    nfiles=${num_found}
+    nenspost=0
+    navg_min=${NMEM_ENS}
+
+    cfopg1="${mean_out}"
+    cfopg2="${spr_out}"
+
+$(
+    for (( filenum = 1; filenum <= num_found; filenum++ )); do
+        echo "    cfipg(${filenum})=\"${input_files[$((filenum-1))]}\","
+        echo "    iskip(${filenum})=0,"
+    done
+)
+/
+EOF
+
+cat input.nml
+
+# Run ensstat
+"${EXECgfs}/ensstat.x" < input.nml
+
+export err=$?
+if (( err != 0 )) ; then
+    echo "FATAL ERROR: ensstat returned error code ${err}"
+    exit "${err}"
+fi
+
+# Send data to com and send DBN alerts
+comout_var_name="COMOUT_ATMOS_GRIB_${grid}"
+comout_path="${!comout_var_name}"
+
+for outfile in ${mean_out} ${spr_out}; do
+    if [[ ! -s ${outfile} ]]; then
+        echo "FATAL ERROR: Failed to create ${outfile}"
+        exit 20
+    fi
+
+    ${WGRIB2} -s "${outfile}" > "${outfile}.idx"
+    err=$?
+    if (( err != 0 )); then
+        echo "FATAL ERROR: Failed to create inventory file, wgrib2 returned ${err}"
+        exit "${err}"
+    fi
+
+    cpfs "${outfile}" "${comout_path}/${outfile}"
+    cpfs "${outfile}.idx" "${comout_path}/${outfile}.idx"
+
+    if [[ ${SENDDBN} == "YES" ]]; then
+        "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2${grid_type}_${grid}" "${job}" \
+            "${comout_path}/${outfile}"
+        "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2${grid_type}_${grid}" "${job}" \
+            "${comout_path}/${outfile}.idx"
+    fi
+
+done
+
diff --git a/ush/atmos_extractvars.sh b/ush/atmos_extractvars.sh
new file mode 100755
index 0000000000..70e86b2f4e
--- /dev/null
+++ b/ush/atmos_extractvars.sh
@@ -0,0 +1,98 @@
+#! /usr/bin/env bash
+
+################################################################################
+## UNIX Script Documentation Block
+## Script name:         atmos_extractvars.sh
+## Script description:  Extracts and calculates 24-hr averages of variables
+##                      from atmosphere products and saves these variables in arcdir
+#######################
+# Main body starts here
+#######################
+
+source "${USHgfs}/preamble.sh"
+
+fcnt=1 # 1 is 1st quarter, 2 is 2nd quarter and 3 is 3rd quarter of the day
+dcnt=1 # lead day
+subdata=${1}
+
+[[ -d "${subdata}" ]] || mkdir -p "${subdata}"
+
+for outtype in "f2d" "f3d"; do
+
+  if [[ "${outtype}" == "f2d" ]]; then
+    varlist=${varlist_2d}
+    ARC_RFCST_PROD_ATMOS="${ARC_RFCST_PROD_ATMOS_F2D}"
+  elif [[ "${outtype}" == "f3d" ]]; then
+    varlist=${varlist_3d}
+    varlist_d=${varlist_3d_d}
+    ARC_RFCST_PROD_ATMOS="${ARC_RFCST_PROD_ATMOS_F3D}"
+  fi
+
+  outdirpre="${subdata}/${outtype}"
+  [[ -d "${outdirpre}" ]] || mkdir -p "${outdirpre}"
+
+  nh=${FHMIN}
+  while (( nh <= FHMAX_GFS )); do
+    fnh=$(printf "%3.3d" "${nh}")
+
+    if [[ "${outtype}" == "f2d" ]]; then
+      if (( nh < FHMAX_HF_GFS )); then
+        outres="0p25"
+      else
+        outres="0p50"
+      fi
+    elif [[ "${outtype}" == "f3d" ]]; then
+      outres="1p00"
+    fi
+
+    if (( nh <= FHMAX_HF_GFS )); then
+      outfreq=${FHOUT_HF_GFS}
+    else
+      outfreq=${FHOUT_GFS}
+    fi                                      
+
+    com_var="COMIN_ATMOS_GRIB_${outres}"
+    infile1="${!com_var}/${RUN}.t${cyc}z.pgrb2.${outres}.f${fnh}"
+    infile2="${!com_var}/${RUN}.t${cyc}z.pgrb2b.${outres}.f${fnh}"
+    outfile="${outdirpre}/${RUN}.t${cyc}z.pgrb2.${outres}.f${fnh}"
+    rm -f "${outfile}" #remove outfile if it already exists before extraction
+
+    for infile in "${infile1}" "${infile2}"; do
+      if [[ -f "${infile}" ]]; then # check if input file exists before extraction
+        # shellcheck disable=SC2312
+        ${WGRIB2} "${infile}" | grep -F -f "${varlist}" | ${WGRIB2} -i "${infile}" -append -grib "${outfile}"
+      else
+        echo "WARNING: ${infile} does not exist."
+      fi
+    done
+
+    check_atmos "${infile1}" "${infile2}" "${varlist}" "${fnh}"
+    copy_to_comout "${outfile}" "${ARC_RFCST_PROD_ATMOS}"
+
+    # Compute daily average for a subset of variables
+    if (( nh % 6 == 0 )) && (( nh != 0 )) && [[ "${outtype}" == "f3d" ]]; then
+      outfile=${subdata}/vartmp_raw_vari_ldy${dcnt}.grib2
+      for infile in "${infile1}" "${infile2}"; do
+        if [[ -f "${infile}" ]]; then # check if input file exists before extraction
+          # shellcheck disable=SC2312
+          ${WGRIB2} "${infile}" | grep -F -f "${varlist_d}" | ${WGRIB2} -i "${infile}" -append -grib "${outfile}"
+        else
+          echo "WARNING: ${infile} does not exist."
+        fi
+      done
+      if [[ ${fcnt} -eq 4 ]]; then
+        daily_avg_atmos "${outfile}" "${dcnt}" "${outres}"
+        copy_to_comout "${davg_file}" "${ARC_RFCST_PROD_ATMOS}"
+        fcnt=1
+        dcnt=$(( dcnt + 1 ))
+      else
+        fcnt=$(( fcnt + 1 ))
+      fi # If at final lead hour of a given day
+    fi # if lead hour is divisible by 6 and outtype is f3d
+
+    nh=$(( nh + outfreq ))
+  done # nh
+
+done # f2d,f3d
+
+exit 0
diff --git a/ush/bash_utils.sh b/ush/bash_utils.sh
new file mode 100755
index 0000000000..b8ce729cb8
--- /dev/null
+++ b/ush/bash_utils.sh
@@ -0,0 +1,126 @@
+#! /usr/bin/env bash
+
+function declare_from_tmpl() {
+    #
+    # Define variables from corresponding templates by substituting in env variables.
+    #
+    # Each template must already be defined. Any variables in the template are replaced
+    #   with their values. Undefined variables are just removed WITHOUT raising an error.
+    #
+    # Accepts as options `-r` and `-x`, which do the same thing as the same options in
+    #   `declare`. Variables are automatically marked as `-g` so the variable is visible
+    #   in the calling script.
+    #
+    # Syntax:
+    #   declare_from_tmpl [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]]
+    #
+    #   options:
+    #       -r: Make variable read-only (same as `declare -r`)
+    #       -x: Mark variable for export (same as `declare -x`)
+    #   var1, var2, etc: Variable names whose values will be generated from a template
+    #                    and declared
+    #   tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL")
+    #
+    #   Examples:
+    #       # Current cycle and RUN, implicitly using template COM_ATMOS_ANALYSIS_TMPL
+    #       YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
+    #
+    #       # Previous cycle and gdas using an explicit template
+    #       RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
+    #           COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
+    #
+    #       # Current cycle and COM for first member
+    #       MEMDIR='mem001' YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY
+    #
+    if [[ ${DEBUG_WORKFLOW:-"NO"} == "NO" ]]; then set +x; fi
+    local opts="-g"
+    local OPTIND=1
+    while getopts "rx" option; do
+        opts="${opts}${option}"
+    done
+    shift $((OPTIND-1))
+
+    for input in "$@"; do
+        IFS=':' read -ra args <<< "${input}"
+        local com_var="${args[0]}"
+        local template
+        local value
+        if (( ${#args[@]} > 1 )); then
+            template="${args[1]}"
+        else
+            template="${com_var}_TMPL"
+        fi
+        if [[ ! -v "${template}" ]]; then
+            echo "FATAL ERROR in declare_from_tmpl: Requested template ${template} not defined!"
+            exit 2
+        fi
+        value=$(echo "${!template}" | envsubst)
+        # shellcheck disable=SC2086
+        declare ${opts} "${com_var}"="${value}"
+        # shellcheck disable=
+        echo "declare_from_tmpl :: ${com_var}=${value}"
+    done
+    set_trace
+}
+
+function wait_for_file() {
+    #
+    # Wait for a file to exist and return the status.
+    #
+    # Checks if a file exists periodically up to a maximum number of attempts. When the file
+    #   exists or the limit is reached, the status is returned (0 if the file exists,1 if it
+    #   does not). This allows it to be used as a conditional to handle missing files.
+    #
+    # Syntax:
+    #   wait_for_file file_name [sleep_interval [max_tries]]
+    #
+    #     file_name:      File to check the existence of (must be readable)
+    #     sleep_interval: Time to wait between each check (in seconds) [default: 60]
+    #     max_tries:      The maximum number of checks to make [default: 100]
+    #
+    # Example:
+    #     ```
+    #     file_name=/path/to/foo
+    #     sleep_interval=60
+    #     max_tries=30
+    #     if wait_for_file; then
+    #       echo "FATAL ERROR: ${file_name} still does not exist after waiting one-half hour."
+    #       exit 1
+    #     fi
+    #     # Code that depends on file existing
+    #     ```
+    #
+    set +x
+    local file_name=${1:?"wait_for_file() requires a file name"}
+    local sleep_interval=${2:-60}
+    local max_tries=${3:-100}
+
+    for (( iter=0; iter<max_tries; iter++ )); do
+        if [[ -r ${file_name} ]]; then
+            set_trace
+            return 0
+        fi
+        sleep "${sleep_interval}"
+    done
+    set_trace
+    return 1
+}
+
+function detect_py_ver() {
+    # 
+    # Returns the major.minor version of the currently active python executable
+    #
+    regex="[0-9]+\.[0-9]+"
+    # shellcheck disable=SC2312
+    if [[ $(python --version) =~ ${regex} ]]; then
+	    echo "${BASH_REMATCH[0]}"
+    else
+	    echo "FATAL ERROR: Could not detect the python version"
+	    exit 1
+    fi
+}
+# shellcheck disable=
+
+declare -xf declare_from_tmpl
+declare -xf wait_for_file
+declare -xf detect_py
diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py
index cf2dc8dc89..5d97d25dfd 100755
--- a/ush/calcanl_gfs.py
+++ b/ush/calcanl_gfs.py
@@ -12,12 +12,14 @@
 from collections import OrderedDict
 import datetime
 
+python2fortran_bool = {True: '.true.', False: '.false.'}
+
 
 # function to calculate analysis from a given increment file and background
 def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                 ComIn_Ges, GPrefix,
                 FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs,
-                ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, Cdump):
+                ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, run, JEDI):
     print('calcanl_gfs beginning at: ', datetime.datetime.utcnow())
 
     IAUHH = IAUHrs
@@ -36,7 +38,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                 gsi_utils.link_file(RunDir + '/siganl', CalcAnlDir + '/anl.06')
                 gsi_utils.copy_file(ExecChgresInc, CalcAnlDir + '/chgres_inc.x')
                 # for ensemble res analysis
-                if Cdump in ["gdas", "gfs"]:
+                if Run in ["gdas", "gfs"]:
                     CalcAnlDir = RunDir + '/calcanl_ensres_' + format(fh, '02')
                     if not os.path.exists(CalcAnlDir):
                         gsi_utils.make_dir(CalcAnlDir)
@@ -133,7 +135,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
     ExecCMDMPI1 = ExecCMDMPI.replace("$ncmd", str(1))
     ExecCMDMPI = ExecCMDMPI.replace("$ncmd", str(nFH))
     ExecCMDLevs = ExecCMDMPI.replace("$ncmd", str(levs))
-    ExecCMDMPI10 = ExecCMDMPI.replace("$ncmd", str(10))
+    ExecCMDMPI13 = ExecCMDMPI.replace("$ncmd", str(13))
 
     # are we using mpirun with lsf, srun, or aprun with Cray?
     launcher = ExecCMDMPI.split(' ')[0]
@@ -154,7 +156,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
             ExecCMDMPILevs_host = 'mpirun -np ' + str(levs) + ' --hostfile hosts'
             ExecCMDMPILevs_nohost = 'mpirun -np ' + str(levs)
         ExecCMDMPI1_host = 'mpirun -np 1 --hostfile hosts'
-        ExecCMDMPI10_host = 'mpirun -np 10 --hostfile hosts'
+        ExecCMDMPI13_host = 'mpirun -np 13 --hostfile hosts'
     elif launcher == 'mpiexec':
         hostfile = os.getenv('PBS_NODEFILE', '')
         with open(hostfile) as f:
@@ -164,7 +166,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
         [hosts.append(x) for x in hosts_tmp if x not in hosts]
         nhosts = len(hosts)
         ExecCMDMPI_host = 'mpiexec -l -n ' + str(nFH)
-        tasks = int(os.getenv('ntasks', 1))
+        tasks = int(os.getenv('ntasks_calcanl', 1))
         print('nhosts,tasks=', nhosts, tasks)
         if levs > tasks:
             ExecCMDMPILevs_host = 'mpiexec -l -n ' + str(tasks)
@@ -173,7 +175,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
             ExecCMDMPILevs_host = 'mpiexec -l -n ' + str(levs)
             ExecCMDMPILevs_nohost = 'mpiexec -l -n ' + str(levs)
         ExecCMDMPI1_host = 'mpiexec -l -n 1 --cpu-bind depth --depth ' + str(NThreads)
-        ExecCMDMPI10_host = 'mpiexec -l -n 10 --cpu-bind depth --depth ' + str(NThreads)
+        ExecCMDMPI13_host = 'mpiexec -l -n 13 --cpu-bind depth --depth ' + str(NThreads)
     elif launcher == 'srun':
         nodes = os.getenv('SLURM_JOB_NODELIST', '')
         hosts_tmp = subprocess.check_output('scontrol show hostnames ' + nodes, shell=True)
@@ -198,7 +200,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
             ExecCMDMPILevs_host = 'srun -n ' + str(levs) + ' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
             ExecCMDMPILevs_nohost = 'srun -n ' + str(levs) + ' --verbose --export=ALL'
         ExecCMDMPI1_host = 'srun -n 1 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
-        ExecCMDMPI10_host = 'srun -n 10 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
+        ExecCMDMPI13_host = 'srun -n 13 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
     elif launcher == 'aprun':
         hostfile = os.getenv('LSB_DJOB_HOSTFILE', '')
         with open(hostfile) as f:
@@ -211,7 +213,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
         ExecCMDMPILevs_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n ' + str(levs)
         ExecCMDMPILevs_nohost = 'aprun -d ' + str(NThreads) + ' -n ' + str(levs)
         ExecCMDMPI1_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 1'
-        ExecCMDMPI10_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 10'
+        ExecCMDMPI13_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 13'
     else:
         print('unknown MPI launcher. Failure.')
         sys.exit(1)
@@ -246,13 +248,13 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                         ihost += 1
                         for a in range(0, 5):
                             hostfile.write(hosts[ihost] + '\n')
-                    for a in range(0, 9):  # need 9 more of the same host for the 10 tasks for chgres_inc
+                    for a in range(0, 12):  # need 12 more of the same host for the 13 tasks for chgres_inc
                         hostfile.write(hosts[ihost] + '\n')
             if launcher == 'srun':
                 os.environ['SLURM_HOSTFILE'] = CalcAnlDir + '/hosts'
             print('interp_inc', fh, namelist)
-            job = subprocess.Popen(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x', shell=True, cwd=CalcAnlDir)
-            print(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x submitted on ' + hosts[ihost])
+            job = subprocess.Popen(ExecCMDMPI13_host + ' ' + CalcAnlDir + '/chgres_inc.x', shell=True, cwd=CalcAnlDir)
+            print(ExecCMDMPI13_host + ' ' + CalcAnlDir + '/chgres_inc.x submitted on ' + hosts[ihost])
             sys.stdout.flush()
             ec = job.wait()
             if ec != 0:
@@ -273,6 +275,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                          "firstguess_filename": "'ges'",
                          "increment_filename": "'inc.fullres'",
                          "fhr": 6,
+                         "jedi": python2fortran_bool[JEDI],
                          }
 
     gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml')
@@ -295,7 +298,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
         sys.exit(exit_fullres)
 
     # compute determinstic analysis on ensemble resolution
-    if Cdump in ["gdas", "gfs"]:
+    if Run in ["gdas", "gfs"]:
         chgres_jobs = []
         for fh in IAUHH:
             # first check to see if guess file exists
@@ -311,6 +314,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                                      "firstguess_filename": "'ges.ensres'",
                                      "increment_filename": "'siginc.nc'",
                                      "fhr": fh,
+                                     "jedi": python2fortran_bool[JEDI],
                                      }
 
                 gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml')
@@ -346,7 +350,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
     ComOut = os.getenv('COM_ATMOS_ANALYSIS', './')
     APrefix = os.getenv('APREFIX', '')
     NThreads = os.getenv('NTHREADS_CHGRES', 1)
-    FixDir = os.getenv('FIXam', './')
+    FixDir = os.path.join(os.getenv('FIXgfs', './'), 'am')
     atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean')
     RunDir = os.getenv('DATA', './')
     ExecCMD = os.getenv('APRUN_CALCANL', '')
@@ -355,11 +359,12 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
     ExecChgresInc = os.getenv('CHGRESINCEXEC', './interp_inc.x')
     NEMSGet = os.getenv('NEMSIOGET', 'nemsio_get')
     IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(',')))
-    Cdump = os.getenv('CDUMP', 'gdas')
+    Run = os.getenv('RUN', 'gdas')
+    JEDI = gsi_utils.isTrue(os.getenv('DO_JEDIATMVAR', 'YES'))
 
     print(locals())
     calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
                 ComIn_Ges, GPrefix,
                 FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs,
                 ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc,
-                Cdump)
+                Run, JEDI)
diff --git a/ush/check_ice_netcdf.sh b/ush/check_ice_netcdf.sh
new file mode 100755
index 0000000000..02ca4dae80
--- /dev/null
+++ b/ush/check_ice_netcdf.sh
@@ -0,0 +1,43 @@
+#! /usr/bin/env bash
+
+yyyy=${1?}
+mm=${2?}
+dd=${3?}
+cyc=${4?}
+fhr=${5?}
+ROTDIR=${6?}
+member=${7?}
+FHOUT_ICE_GFS=${8?}
+
+fhri=$((10#${fhr}))
+
+#Will need to consider fhmin in the future to calculate the offset if we are to stick with this approach. 
+((offset = ( cyc ) % FHOUT_ICE_GFS))
+
+if (( offset != 0  )); then
+  (( fhri = fhri - cyc ))
+  fhr3=$(printf %03i "${fhri}")
+  if (( fhri <= FHOUT_ICE_GFS  )); then
+    (( interval = FHOUT_ICE_GFS - cyc )) 
+    ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc
+  else
+    ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr3}.nc
+  fi
+else
+  ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model_data/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr}.nc
+fi
+
+#Check if netcdf file exists.
+if [[ ! -f "${ncfile}" ]];then
+  rc=1
+else
+  #Check if netcdf file is older than 2 minutes.
+  ncage="$(find "${ncfile}" -mmin -2)"
+  if [[ -n "${ncage}" ]]; then
+    rc=1
+  else
+    rc=0
+  fi
+fi
+
+exit "${rc}"
diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh
index 01ae66a02d..b049a6040e 100755
--- a/ush/detect_machine.sh
+++ b/ush/detect_machine.sh
@@ -1,21 +1,30 @@
 #!/bin/bash
 
+# The authoritative copy of this script lives in the ufs-weather-model at:
+# https://github.com/ufs-community/ufs-weather-model/blob/develop/tests/detect_machine.sh
+# If any local modifications are made or new platform support added,
+# please consider opening an issue and a PR to the ufs-weather-model
+# so that this copy remains in sync with its authoritative source
+#
+# Thank you for your contribution
+
+# If the MACHINE_ID variable is set, skip this script.
+[[ -n ${MACHINE_ID:-} ]] && return
+
 # First detect w/ hostname
 case $(hostname -f) in
 
-  adecflow0[12].acorn.wcoss2.ncep.noaa.gov)  MACHINE_ID=wcoss2 ;; ### acorn
-  alogin0[12].acorn.wcoss2.ncep.noaa.gov)    MACHINE_ID=wcoss2 ;; ### acorn
+  adecflow0[12].acorn.wcoss2.ncep.noaa.gov)  MACHINE_ID=acorn ;; ### acorn
+  alogin0[12].acorn.wcoss2.ncep.noaa.gov)    MACHINE_ID=acorn ;; ### acorn
   clogin0[1-9].cactus.wcoss2.ncep.noaa.gov)  MACHINE_ID=wcoss2 ;; ### cactus01-9
   clogin10.cactus.wcoss2.ncep.noaa.gov)      MACHINE_ID=wcoss2 ;; ### cactus10
   dlogin0[1-9].dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood01-9
   dlogin10.dogwood.wcoss2.ncep.noaa.gov)     MACHINE_ID=wcoss2 ;; ### dogwood10
 
-  gaea9)               MACHINE_ID=gaea ;; ### gaea9
-  gaea1[0-6])          MACHINE_ID=gaea ;; ### gaea10-16
-  gaea9.ncrc.gov)      MACHINE_ID=gaea ;; ### gaea9
-  gaea1[0-6].ncrc.gov) MACHINE_ID=gaea ;; ### gaea10-16
+  gaea5[1-8])          MACHINE_ID=gaea ;; ### gaea51-58
+  gaea5[1-8].ncrc.gov) MACHINE_ID=gaea ;; ### gaea51-58
 
-  hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-9
+  hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-09
   hfe1[0-2]) MACHINE_ID=hera ;; ### hera10-12
   hecflow01) MACHINE_ID=hera ;; ### heraecflow01
 
@@ -28,10 +37,6 @@ case $(hostname -f) in
 
   [Hh]ercules-login-[1-4].[Hh][Pp][Cc].[Mm]s[Ss]tate.[Ee]du) MACHINE_ID=hercules ;; ### hercules1-4
 
-  cheyenne[1-6].cheyenne.ucar.edu)     MACHINE_ID=cheyenne ;; ### cheyenne1-6
-  cheyenne[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6
-  chadmin[1-6].ib0.cheyenne.ucar.edu)  MACHINE_ID=cheyenne ;; ### cheyenne1-6
-
   login[1-4].stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede1-4
 
   login0[1-2].expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse1-2
@@ -40,7 +45,7 @@ case $(hostname -f) in
   *) MACHINE_ID=UNKNOWN ;;  # Unknown platform
 esac
 
-if [[ ${MACHINE_ID} == "UNKNOWN" ]]; then 
+if [[ ${MACHINE_ID} == "UNKNOWN" ]]; then
    case ${PW_CSP:-} in
       "aws" | "google" | "azure") MACHINE_ID=noaacloud ;;
       *) PW_CSP="UNKNOWN"
@@ -56,30 +61,30 @@ if [[ "${MACHINE_ID}" != "UNKNOWN" ]]; then
 fi
 
 # Try searching based on paths since hostname may not match on compute nodes
-if [[ -d /lfs/f1 ]] ; then
+if [[ -d /lfs/h3 ]]; then
   # We are on NOAA Cactus or Dogwood
   MACHINE_ID=wcoss2
-elif [[ -d /mnt/lfs1 ]] ; then
+elif [[ -d /lfs/h1 && ! -d /lfs/h3 ]]; then
+  # We are on NOAA TDS Acorn
+  MACHINE_ID=acorn
+elif [[ -d /mnt/lfs1 ]]; then
   # We are on NOAA Jet
   MACHINE_ID=jet
-elif [[ -d /scratch1 ]] ; then
+elif [[ -d /scratch1 ]]; then
   # We are on NOAA Hera
   MACHINE_ID=hera
-elif [[ -d /work ]] ; then
+elif [[ -d /work ]]; then
   # We are on MSU Orion or Hercules
-  if [[ -d /apps/other ]] ; then
-    # We are on Hercules
+  mount=$(findmnt -n -o SOURCE /home)
+  if [[ ${mount} =~ "hercules" ]]; then
     MACHINE_ID=hercules
   else
     MACHINE_ID=orion
   fi
-elif [[ -d /glade ]] ; then
-  # We are on NCAR Yellowstone
-  MACHINE_ID=cheyenne
-elif [[ -d /lustre && -d /ncrc ]] ; then
+elif [[ -d /gpfs && -d /ncrc ]]; then
   # We are on GAEA.
   MACHINE_ID=gaea
-elif [[ -d /data/prod ]] ; then
+elif [[ -d /data/prod ]]; then
   # We are on SSEC's S4
   MACHINE_ID=s4
 else
diff --git a/ush/extractvars_tools.sh b/ush/extractvars_tools.sh
new file mode 100644
index 0000000000..daf61a3d2e
--- /dev/null
+++ b/ush/extractvars_tools.sh
@@ -0,0 +1,60 @@
+#! /usr/bin/env bash
+
+check_atmos() {
+  # Function to check if there are any missing parm variables in any of the input product grib2 files
+  # A warning will be displayed if there is a parm variable that cannot be found in any of the given input product grib2 files
+  infile1p=$1
+  infile2p=$2
+  varlistl=$3
+  fnhl=$4
+  requestedvar_in_allgrb2file="${subdata}/parmvarsingribfil.txt"
+  rm -f "${requestedvar_in_allgrb2file}"
+  touch "${requestedvar_in_allgrb2file}"
+  for infilep in "${infile1p}" "${infile2p}"; do
+    # It is permitted for an empty string to return if no parmlist vars are in infilep, therefore do not return exit 1 error
+    # shellcheck disable=SC2312
+    ${WGRIB2} "${infilep}" | grep -F -f "${varlist}" >> "${requestedvar_in_allgrb2file}" || true
+  done
+  mapfile -t requestedvar_in_allgrb2file_arr < "${requestedvar_in_allgrb2file}"
+  while read -r vari; do
+    if [[ ! ${requestedvar_in_allgrb2file_arr[*]} =~ ${vari} ]] ;then
+      echo "WARNING: PARM VARIABLE (${vari}) is not available in pgrb and pgrb2b for f${fnhl}."
+    fi
+  done <"${varlistl}"
+}
+
+daily_avg_atmos() {
+  # Function to calculate the 24-hr average of a grib2 file with atmospheric fields
+  # The input grib2 file must contain all the time records to be averaged (e.g. 6hr, 12hr, 18hr and 24hr record in one grib2 file)
+  outfile_p=$1
+  dcnt_p=$2
+  outres_p=$3
+  fnd=$(printf "%2.2d" "${dcnt_p}")
+  davg_file=${outdirpre}/${RUN}.t${cyc}z.pgrb2.${outres_p}.24hr_avg.ldy${fnd}
+  vcnt=1 #count variables in varlist_d
+  while read -r vari; do
+    davgtmp=${subdata}/atmos_tmp.ldy${fnd}.${vcnt}
+    # shellcheck disable=SC2312
+    ${WGRIB2} "${outfile_p}" | grep "${vari}" | ${WGRIB2} -i "${outfile_p}" -fcst_ave 6hr "${davgtmp}"
+    # shellcheck disable=SC2312
+    ${WGRIB2} "${davgtmp}" | ${WGRIB2} -i "${davgtmp}" -append -grib "${davg_file}"
+    rm -f "${davgtmp}"
+    vcnt=$(( vcnt + 1 ))
+  done <"${varlist_d}" # variable
+}
+
+copy_to_comout() {
+  # Function to copy the output file with the extracted product variables to a user-defined destination directory
+  rundir_outfile=$1 # output data file generated in RUNDIR
+  comout_dir=$2 # destination directory to which to copy the data file
+  if [[ -f "${rundir_outfile}" ]]; then
+    cpfs "${rundir_outfile}" "${comout_dir}"
+  else
+    echo "FATAL ERROR: Output file (${rundir_outfile}) does not exist."
+    export err=1; err_chk
+  fi
+}
+
+declare -xf check_atmos
+declare -xf daily_avg_atmos
+declare -xf copy_to_comout
diff --git a/ush/file_utils.sh b/ush/file_utils.sh
old mode 100644
new mode 100755
diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh
index d0dc325460..603447f612 100755
--- a/ush/forecast_det.sh
+++ b/ush/forecast_det.sh
@@ -1,121 +1,118 @@
 #! /usr/bin/env bash
 
-#####
-## "forecast_det.sh"
-## This script sets value of all variables
-##
-## This is the child script of ex-global forecast,
-## This script is a definition of functions.
-#####
-
-# For all non-evironment variables
-# Cycling and forecast hour specific parameters
-
-FV3_det(){
-  echo "SUB ${FUNCNAME[0]}: Run type determination for FV3"
-  #-------------------------------------------------------
-  # warm start?
-  warm_start=${EXP_WARM_START:-".false."}
-  read_increment=${read_increment:-".false."}
-  res_latlon_dynamics="''"
-
-  # Determine if this is a warm start or cold start
-  if [[ -f "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000.coupler.res" ]]; then
+# Disable variable not used warnings
+# shellcheck disable=SC2034
+UFS_det(){
+  echo "SUB ${FUNCNAME[0]}: Run type determination for UFS"
+
+  # Determine if the current cycle is a warm start (based on the availability of restarts)
+  if [[ -f "${COMIN_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then
     warm_start=".true."
-  fi
+  fi 
 
-  # turn IAU off for cold start
-  DOIAU_coldstart=${DOIAU_coldstart:-"NO"}
-  if [ "${DOIAU}" = "YES" -a "${warm_start}" = ".false." ] || [ "${DOIAU_coldstart}" = "YES" -a "${warm_start}" = ".true." ]; then
-    echo "turning off IAU since this is a cold-start"
-    DOIAU="NO"
-    DOIAU_coldstart="YES"
-    # Ignore "not used" warning
-    # shellcheck disable=SC2034
-    IAU_OFFSET=0
-    sCDATE=${current_cycle}
-    sPDY=${current_cycle:0:8}
-    scyc=${current_cycle:8:2}
-    tPDY=${sPDY}
-    tcyc=${scyc}
-  fi
+  # If restarts were not available, this is likely a cold start
+  if [[ "${warm_start}" == ".false." ]]; then
 
-  #-------------------------------------------------------
-  # determine if restart IC exists to continue from a previous forecast run attempt
-
-  RERUN=${RERUN:-"NO"}
-  # Get a list of all YYYYMMDD.HH0000.coupler.res files from the atmos restart directory
-  mapfile -t file_array < <(find "${COM_ATMOS_RESTART:-/dev/null}" -name "????????.??0000.coupler.res")
-  if [[ ( "${RUN}" = "gfs" || "${RUN}" = "gefs" ) \
-    && "${#file_array[@]}" -gt 0 ]]; then
-
-    # Look in reverse order of file_array to determine available restart times
-    for ((ii=${#file_array[@]}-1; ii>=0; ii--)); do
-
-      local filepath="${file_array[ii]}"
-      local filename
-      filename=$(basename "${filepath}")  # Strip path from YYYYMMDD.HH0000.coupler.res
-      PDYS=${filename:0:8}  # match YYYYMMDD of YYYYMMDD.HH0000.coupler.res
-      cycs=${filename:9:2}  # match HH of YYYYMMDD.HH0000.coupler.res
-
-      # Assume all is well; all restarts are available
-      local fv3_rst_ok="YES"
-      local mom6_rst_ok="YES"
-      local cice6_rst_ok="YES"
-      local cmeps_rst_ok="YES"
-      local ww3_rst_ok="YES"
-
-      # Check for availability of FV3 restarts
-      if [[ -f "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res" ]]; then
-        mv "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res" "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res.old"   ## JKH
-      else
-        local fv3_rst_ok="NO"
-      fi
+    # Since restarts are not available from the previous cycle, this is likely a cold start
+    # Ensure cold start ICs are present when warm start is not set
+    # TODO: add checks for other cold start ICs as well
+    if [[ ! -f "${COMIN_ATMOS_INPUT}/gfs_ctrl.nc" ]]; then
+      echo "FATAL ERROR: Cold start ICs are missing from '${COMIN_ATMOS_INPUT}'"
+      exit 1
+    fi
 
-      # Check for availability of MOM6 restarts  # TODO
-      # Check for availability of CICE6 restarts  # TODO
-      # Check for availability of CMEPS restarts  # TODO
-
-      # Check for availability of WW3 restarts
-      if [[ "${cplwav}" = ".true." ]]; then
-        for ww3_grid in ${waveGRD} ; do
-          if [[ ! -f "${COM_WAVE_RESTART}/${PDYS}.${cycs}0000.restart.${ww3_grid}" ]]; then
-            local ww3_rst_ok="NO"
-          fi
-        done
-      fi
-
-      # Collective check
-      if [[ "${fv3_rst_ok}" = "YES" ]] \
-        && [[ "${mom6_rst_ok}" = "YES" ]] \
-        && [[ "${cice6_rst_ok}" = "YES" ]] \
-        && [[ "${cmeps_rst_ok}" = "YES" ]] \
-        && [[ "${ww3_rst_ok}" = "YES" ]]; then
+    # Since warm start is false, we cannot do IAU
+    DOIAU="NO"
+    IAU_OFFSET=0
+    model_start_date_current_cycle=${current_cycle}
 
-        if [[ -f "${COM_ATMOS_RESTART}/coupler.res" ]]; then
-          mv "${COM_ATMOS_RESTART}/coupler.res" "${COM_ATMOS_RESTART}/coupler.res.old"
-        fi
+    # It is still possible that a restart is available from a previous forecast attempt
+    # So we have to continue checking for restarts
+  fi
 
-        SDATE="${PDYS}${cycs}"
-        CDATE_RST="${SDATE}"
-        RERUN="YES"
-        echo "Restarts have been found for CDATE_RST=${CDATE_RST}, returning with 'RERUN=YES'"
-        break
-      fi
+  # Lets assume this is was not run before and hence this is not a RERUN
+  RERUN="NO"
 
-    done
+  # RERUN is only available for RUN=gfs|gefs It is not available for RUN=gdas|enkfgdas|enkfgfs
+  if [[ "${RUN}" =~ "gdas" ]] || [[ "${RUN}" == "enkfgfs" ]]; then
+    echo "RERUN is not available for RUN='${RUN}'"
+    return 0
   fi
-  #-------------------------------------------------------
-}
 
-WW3_det(){
-  echo "SUB ${FUNCNAME[0]}: Run type determination for WW3"
-}
+  # However, if this was run before, a DATArestart/FV3_RESTART must exist with data in it.
+  local file_array nrestarts
+  # shellcheck disable=SC2312
+  mapfile -t file_array < <(find "${DATArestart}/FV3_RESTART" -name "????????.??0000.coupler.res" | sort)
+  nrestarts=${#file_array[@]}
+  if (( nrestarts == 0 )); then
+    echo "No restarts found in '${DATArestart}/FV3_RESTART', RERUN='${RERUN}'"
+    return 0
+  fi
 
-CICE_det(){
-  echo "SUB ${FUNCNAME[0]}: Run type determination for CICE"
-}
+  # Look in reverse order of file_array to determine available restart times
+  local ii filepath filename
+  local rdate seconds
+  local fv3_rst_ok cmeps_rst_ok mom6_rst_ok cice6_rst_ok ww3_rst_ok
+  for (( ii=nrestarts-1; ii>=0; ii-- )); do
+
+    filepath="${file_array[ii]}"
+    filename=$(basename "${filepath}")  # Strip path from YYYYMMDD.HH0000.coupler.res
+    rdate="${filename:0:8}${filename:9:2}"  # match YYYYMMDD and HH of YYYYMMDD.HH0000.coupler.res
+
+    # Assume all is well; all restarts are available
+    fv3_rst_ok="YES"
+    cmeps_rst_ok="YES"
+    mom6_rst_ok="YES"
+    cice6_rst_ok="YES"
+    ww3_rst_ok="YES"
+
+    # Check for FV3 restart availability
+    if [[ ! -f "${DATArestart}/FV3_RESTART/${rdate:0:8}.${rdate:8:2}0000.coupler.res" ]]; then
+    # TODO: add checks for other FV3 restarts as well
+      fv3_rst_ok="NO"
+    fi
+
+    # Check for CMEPS and MOM6 restart availability
+    if [[ "${cplflx}" == ".true." ]]; then
+      seconds=$(to_seconds "${rdate:8:2}0000")
+      if [[ ! -f "${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}.nc" ]]; then
+        cmeps_rst_ok="NO"
+      fi
+      if [[ ! -f "${DATArestart}/MOM6_RESTART/${rdate:0:8}.${rdate:8:2}0000.MOM.res.nc" ]]; then
+      # TODO: add checks for other MOM6 restarts as well
+        mom6_rst_ok="NO"
+      fi
+    fi
 
-MOM6_det(){
-  echo "SUB ${FUNCNAME[0]}: Run type determination for MOM6"
+    # Check for CICE6 restart availability
+    if [[ "${cplice}" == ".true." ]]; then
+      if [[ ! -f "${DATArestart}/CICE_RESTART/cice_model.res.${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}.nc" ]]; then
+        cice6_rst_ok="NO"
+      fi
+    fi
+
+    # Check for WW3 restart availability
+    if [[ "${cplwav}" == ".true." ]]; then
+      local ww3_grid
+      for ww3_grid in ${waveGRD} ; do
+        if [[ ! -f "${DATArestart}/WW3_RESTART/${rdate:0:8}.${rdate:8:2}0000.restart.${ww3_grid}" ]]; then
+          ww3_rst_ok="NO"
+        fi
+      done
+    fi
+
+    # Collective check
+    if [[ "${fv3_rst_ok}" == "YES" ]] \
+      && [[ "${cmeps_rst_ok}" == "YES" ]] \
+      && [[ "${mom6_rst_ok}" == "YES" ]] \
+      && [[ "${cice6_rst_ok}" == "YES" ]] \
+      && [[ "${ww3_rst_ok}" == "YES" ]]; then
+      RERUN="YES"
+      RERUN_DATE="${rdate}"
+      warm_start=".true."
+      echo "All restarts found for '${RERUN_DATE}', RERUN='${RERUN}', warm_start='${warm_start}'"
+      break
+    fi
+
+  done  # loop over nrestarts
 }
diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh
index f47755f854..7de31d6235 100755
--- a/ush/forecast_postdet.sh
+++ b/ush/forecast_postdet.sh
@@ -1,999 +1,631 @@
 #! /usr/bin/env bash
 
-#####
-## This script defines functions for data I/O and namelist.
-## different applications could share the same function
-## or have their own.
-##
-## This is a child script of modular
-## forecast script. This script is function definition.
-## need to call these functions in the parent script
-## for execution.
-#####
-
-FV3_postdet(){
+# Disable variable not used warnings
+# shellcheck disable=SC2034
+FV3_postdet() {
   echo "SUB ${FUNCNAME[0]}: Entering for RUN = ${RUN}"
 
   echo "warm_start = ${warm_start}"
   echo "RERUN = ${RERUN}"
 
-  #-------------------------------------------------------
-  if [[ "${warm_start}" = ".true." ]] || [[ "${RERUN}" = "YES" ]]; then
-    #-------------------------------------------------------
-    #.............................
-    if [[ ${RERUN} = "NO" ]]; then
-      #.............................
-
-      # Link all restart files from previous cycle
-      for file in "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000."*.nc; do
-        file2=$(echo $(basename "${file}"))
-        file2=$(echo "${file2}" | cut -d. -f3-) # remove the date from file
-        fsuf=$(echo "${file2}" | cut -d. -f1)
-        ${NLN} "${file}" "${DATA}/INPUT/${file2}"
-      done
+  #============================================================================
+  # First copy initial conditions
+  # cold start case
+  if [[ "${warm_start}" == ".false." ]]; then
+
+    # Get list of FV3 cold start files
+    local file_list
+    file_list=$(FV3_coldstarts)
+    echo "Copying FV3 cold start files for 'RUN=${RUN}' at '${current_cycle}' from '${COMIN_ATMOS_INPUT}'"
+    local fv3_file
+    for fv3_file in ${file_list}; do
+      ${NCP} "${COMIN_ATMOS_INPUT}/${fv3_file}" "${DATA}/INPUT/${fv3_file}" \
+      || ( echo "FATAL ERROR: Unable to copy FV3 IC, ABORT!"; exit 1 )
+    done
 
-      # Replace sfc_data with sfcanl_data restart files from current cycle (if found)
-      if [[ "${MODE}" = "cycled" ]] && [[ "${CCPP_SUITE}" = "FV3_GFS_v16" ]]; then  # TODO: remove if statement when global_cycle can handle NOAHMP
-        for file in "${COM_ATMOS_RESTART}/${sPDY}.${scyc}0000."*.nc; do
-          file2=$(basename "${file}")
-          file2=$(echo "${file2}" | cut -d. -f3-) # remove the date from file
-          fsufanl=$(echo "${file2}" | cut -d. -f1)
-          file2=$(echo "${file2}" | sed -e "s/sfcanl_data/sfc_data/g")
-          rm -f "${DATA}/INPUT/${file2}"
-          ${NLN} "${file}" "${DATA}/INPUT/${file2}"
-        done
-      fi
+  # warm start case
+  elif [[ "${warm_start}" == ".true." ]]; then
+
+    # Determine restart date and directory containing restarts
+    local restart_date restart_dir
+    if [[ "${RERUN}" == "YES" ]]; then
+      restart_date="${RERUN_DATE}"
+      restart_dir="${DATArestart}/FV3_RESTART"
+    else  # "${RERUN}" == "NO"
+      restart_date="${model_start_date_current_cycle}"
+      restart_dir="${COMIN_ATMOS_RESTART_PREV}"
+    fi
 
-      # Need a coupler.res when doing IAU
-      if [[ ${DOIAU} = "YES" ]]; then
-        rm -f "${DATA}/INPUT/coupler.res"
-        cat >> "${DATA}/INPUT/coupler.res" << EOF
-        2        (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)
-        ${gPDY:0:4}  ${gPDY:4:2}  ${gPDY:6:2}  ${gcyc}     0     0        Model start time:   year, month, day, hour, minute, second
-        ${sPDY:0:4}  ${sPDY:4:2}  ${sPDY:6:2}  ${scyc}     0     0        Current model time: year, month, day, hour, minute, second
-EOF
-      fi
+    # Get list of FV3 restart files
+    local file_list 
+    file_list=$(FV3_restarts)
+    echo "Copying FV3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'"
+    local fv3_file restart_file
+    for fv3_file in ${file_list}; do
+      restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${fv3_file}"
+      ${NCP} "${restart_dir}/${restart_file}" "${DATA}/INPUT/${fv3_file}" \
+      || ( echo "FATAL ERROR: Unable to copy FV3 IC, ABORT!"; exit 1 )
+    done
 
-      # Link increments
-      if [[ ${DOIAU} = "YES" ]]; then
-        for i in $(echo "${IAUFHRS}" | sed "s/,/ /g" | rev); do
-          incfhr=$(printf %03i "${i}")
-          if [[ ${incfhr} = "006" ]]; then
-            increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc"
-          else
-            increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc"
-          fi
-          if [[ ! -f ${increment_file} ]]; then
-            echo "ERROR: DOIAU = ${DOIAU}, but missing increment file for fhr ${incfhr} at ${increment_file}"
-            echo "Abort!"
-            exit 1
-          fi
-          ${NLN} "${increment_file}" "${DATA}/INPUT/fv_increment${i}.nc"
-          IAU_INC_FILES="'fv_increment${i}.nc',${IAU_INC_FILES:-}"
-        done
-        read_increment=".false."
-        res_latlon_dynamics=""
-      else
-        increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc"
-        if [[ -f ${increment_file} ]]; then
-          ${NLN} "${increment_file}" "${DATA}/INPUT/fv3_increment.nc"
-          read_increment=".true."
-          res_latlon_dynamics="fv3_increment.nc"
+    if [[ "${RERUN}" != "YES" ]]; then
+      # Replace sfc_data with sfcanl_data restart files from current cycle (if found)
+      local nn
+      for (( nn = 1; nn <= ntiles; nn++ )); do
+        if [[ -f "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" ]]; then
+          rm -f "${DATA}/INPUT/sfc_data.tile${nn}.nc"
+          ${NCP} "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" \
+                 "${DATA}/INPUT/sfc_data.tile${nn}.nc"
+        else
+          echo "'sfcanl_data.tile1.nc' not found in '${COMOUT_ATMOS_RESTART}', using 'sfc_data.tile1.nc'"
+          break
         fi
-      fi
-
-    #.............................
-    else  ##RERUN
-      export warm_start=".true."
-      PDYT="${CDATE_RST:0:8}"
-      cyct="${CDATE_RST:8:2}"
-      for file in "${COM_ATMOS_RESTART}/${PDYT}.${cyct}0000."*; do
-        file2=$(basename "${file}")
-        file2=$(echo "${file2}" | cut -d. -f3-)
-        ${NLN} "${file}" "${DATA}/INPUT/${file2}"
       done
+    fi  # if [[ "${RERUN}" != "YES" ]]; then
 
-      local hour_rst=$(nhour "${CDATE_RST}" "${current_cycle}")
-      IAU_FHROT=$((IAU_OFFSET+hour_rst))
-      if [[ ${DOIAU} = "YES" ]]; then
-        IAUFHRS=-1
-        # Ignore "not used" warning
-        # shellcheck disable=SC2034
-        IAU_DELTHRS=0
-        IAU_INC_FILES="''"
-      fi
-    fi
-    #.............................
-
-  else ## cold start
-    for file in "${COM_ATMOS_INPUT}/"*.nc; do
-      file2=$(basename "${file}")
-      fsuf="${file2:0:3}"
-      if [[ "${fsuf}" = "gfs" ]] || [[ "${fsuf}" = "sfc" ]]; then
-        ${NLN} "${file}" "${DATA}/INPUT/${file2}"
-      fi
-    done
-
-  fi
+  fi  # if [[ "${warm_start}" == ".true." ]]; then
 
-  nfiles=$(ls -1 "${DATA}/INPUT/"* | wc -l)
-  if [[ ${nfiles} -le 0 ]]; then
-    echo SUB "${FUNCNAME[0]}": Initial conditions must exist in "${DATA}/INPUT", ABORT!
-    exit 1
-  fi
+  #============================================================================
+  # Determine increment files when doing cold start
+  if [[ "${warm_start}" == ".false." ]]; then
 
-  # If doing IAU, change forecast hours
-  if [[ "${DOIAU}" = "YES" ]]; then
-    FHMAX=$((FHMAX+6))
-    if [[ ${FHMAX_HF} -gt 0 ]]; then
-      FHMAX_HF=$((FHMAX_HF+6))
+    if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+      IAU_FHROT=${half_window}  # Replay ICs start at the end of the assimilation window
+      if (( MEMBER == 0 )); then
+        inc_files=()
+      else
+        inc_files=("atminc.nc")
+        read_increment=".true."
+        res_latlon_dynamics="atminc.nc"
+      fi
+      local increment_file
+      for inc_file in "${inc_files[@]}"; do
+        increment_file="${COMIN_ATMOS_INPUT}/${RUN}.t${cyc}z.${inc_file}"
+        if [[ -f "${increment_file}" ]]; then
+          ${NCP} "${increment_file}" "${DATA}/INPUT/${inc_file}"
+        else
+          echo "FATAL ERROR: missing increment file '${increment_file}', ABORT!"
+          exit 1
+        fi
+      done
     fi
-  fi
 
-  #--------------------------------------------------------------------------
-  # Grid and orography data
+  # Determine IAU and increment files when doing warm start
+  elif [[ "${warm_start}" == ".true." ]]; then
 
-  FIXsfc=${FIXsfc:-"${FIXorog}/${CASE}/sfc"}
+    #--------------------------------------------------------------------------
+    if [[ "${RERUN}" == "YES" ]]; then
 
-  if [[ ${cplflx} = ".false." ]] ; then
-    ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc"
-  else
-    ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc"
-  fi
-
-  for n in $(seq 1 "${ntiles}"); do
-    ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/INPUT/oro_data.tile${n}.nc"
-    ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc"     "${DATA}/INPUT/${CASE}_grid.tile${n}.nc"
-  done
-
-  _suite_file="${HOMEgfs}/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml"
-  if [[ ! -f ${_suite_file} ]]; then
-    echo "FATAL: CCPP Suite file ${_suite_file} does not exist!"
-    exit 2
-  fi
-
-  # Scan suite file to determine whether it uses Noah-MP
-  if [[ $(grep noahmpdrv "${_suite_file}" | wc -l ) -gt 0 ]]; then
-    lsm="2"
-    lheatstrg=".false."
-    landice=".false."
-    iopt_dveg=${iopt_dveg:-"4"}
-    iopt_crs=${iopt_crs:-"2"}
-    iopt_btr=${iopt_btr:-"1"}
-    iopt_run=${iopt_run:-"1"}
-    iopt_sfc=${iopt_sfc:-"1"}
-    iopt_frz=${iopt_frz:-"1"}
-    iopt_inf=${iopt_inf:-"1"}
-    iopt_rad=${iopt_rad:-"3"}
-    iopt_alb=${iopt_alb:-"1"}
-    iopt_snf=${iopt_snf:-"4"}
-    iopt_tbot=${iopt_tbot:-"2"}
-    iopt_stc=${iopt_stc:-"3"}
-    IALB=${IALB:-2}
-    IEMS=${IEMS:-2}
-  else
-    lsm="1"
-    lheatstrg=".true."
-    landice=".true."
-    iopt_dveg=${iopt_dveg:-"1"}
-    iopt_crs=${iopt_crs:-"1"}
-    iopt_btr=${iopt_btr:-"1"}
-    iopt_run=${iopt_run:-"1"}
-    iopt_sfc=${iopt_sfc:-"1"}
-    iopt_frz=${iopt_frz:-"1"}
-    iopt_inf=${iopt_inf:-"1"}
-    iopt_rad=${iopt_rad:-"1"}
-    iopt_alb=${iopt_alb:-"2"}
-    iopt_snf=${iopt_snf:-"4"}
-    iopt_tbot=${iopt_tbot:-"2"}
-    iopt_stc=${iopt_stc:-"1"}
-    IALB=${IALB:-1}
-    IEMS=${IEMS:-1}
-  fi
-
-  # NoahMP table
-  local noahmptablefile="${HOMEgfs}/parm/ufs/noahmptable.tbl"
-  if [[ ! -f ${noahmptablefile} ]]; then
-    echo "FATAL ERROR: missing noahmp table file ${noahmptablefile}"
-    exit 1
-  else
-    ${NLN} "${noahmptablefile}" "${DATA}/noahmptable.tbl"
-  fi
-
-  # Files for GWD
-  ${NLN} "${FIXugwd}/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc"
-  for n in $(seq 1 "${ntiles}"); do
-    ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ls.tile${n}.nc" "${DATA}/INPUT/oro_data_ls.tile${n}.nc"
-    ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ss.tile${n}.nc" "${DATA}/INPUT/oro_data_ss.tile${n}.nc"
-  done
-
-  # GFS standard input data
-
-  ISOL=${ISOL:-2}
-  IAER=${IAER:-1011}
-  ICO2=${ICO2:-2}
-
-  if [[ ${new_o3forc:-YES} = YES ]]; then
-    O3FORC=ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77
-  else
-    O3FORC=global_o3prdlos.f77
-  fi
-  H2OFORC=${H2OFORC:-"global_h2o_pltc.f77"}
-  ####
-  #  Copy CCN_ACTIVATE.BIN for Thompson microphysics
-  #  Thompson microphysics used when CCPP_SUITE set to FV3_GSD_v0 or FV3_GSD_noah
-  #  imp_physics should be 8:
-  ####
-  if [[ ${imp_physics} -eq 8 ]]; then
-    ${NLN} "${FIXam}/CCN_ACTIVATE.BIN"  "${DATA}/CCN_ACTIVATE.BIN"
-    ${NLN} "${FIXam}/freezeH2O.dat"     "${DATA}/freezeH2O.dat"
-    ${NLN} "${FIXam}/qr_acr_qgV2.dat"   "${DATA}/qr_acr_qgV2.dat"
-    ${NLN} "${FIXam}/qr_acr_qsV2.dat"   "${DATA}/qr_acr_qsV2.dat"
-  fi
-
-  ${NLN} "${FIXam}/${O3FORC}"                         "${DATA}/global_o3prdlos.f77"
-  ${NLN} "${FIXam}/${H2OFORC}"                        "${DATA}/global_h2oprdlos.f77"
-  ${NLN} "${FIXam}/global_solarconstant_noaa_an.txt"  "${DATA}/solarconstant_noaa_an.txt"
-  ${NLN} "${FIXam}/global_sfc_emissivity_idx.txt"     "${DATA}/sfc_emissivity_idx.txt"
-
-  ## merra2 aerosol climo
-  if [[ ${IAER} -eq "1011" ]]; then
-    for month in $(seq 1 12); do
-      MM=$(printf %02d "${month}")
-      ${NLN} "${FIXaer}/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc"
-    done
-    ${NLN} "${FIXlut}/optics_BC.v1_3.dat"  "${DATA}/optics_BC.dat"
-    ${NLN} "${FIXlut}/optics_OC.v1_3.dat"  "${DATA}/optics_OC.dat"
-    ${NLN} "${FIXlut}/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat"
-    ${NLN} "${FIXlut}/optics_SS.v3_3.dat"  "${DATA}/optics_SS.dat"
-    ${NLN} "${FIXlut}/optics_SU.v1_3.dat"  "${DATA}/optics_SU.dat"
-  fi
+      local restart_fhr
+      restart_fhr=$(nhour "${RERUN_DATE}" "${current_cycle}")
+      IAU_FHROT=$((IAU_OFFSET + restart_fhr))
+      if [[ "${DOIAU}" == "YES" ]]; then
+        IAUFHRS=-1
+        IAU_DELTHRS=0
+        IAU_INC_FILES="''"
+      fi
 
-  ${NLN} "${FIXam}/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt"
-  ${NLN} "${FIXam}/co2monthlycyc.txt"                 "${DATA}/co2monthlycyc.txt"
-  if [[ ${ICO2} -gt 0 ]]; then
-    for file in $(ls "${FIXam}/fix_co2_proj/global_co2historicaldata"*) ; do
-      ${NLN} "${file}" "${DATA}/$(basename "${file//global_}")"
-    done
-  fi
+    #--------------------------------------------------------------------------
+    else  # "${RERUN}" == "NO"
 
-  ${NLN} "${FIXam}/global_climaeropac_global.txt"     "${DATA}/aerosol.dat"
-  if [[ ${IAER} -gt 0 ]] ; then
-    for file in $(ls "${FIXam}/global_volcanic_aerosols"*) ; do
-      ${NLN} "${file}" "${DATA}/$(basename "${file//global_}")"
-    done
-  fi
+      # Need a coupler.res that is consistent with the model start time
+      if [[ "${DOIAU:-NO}" == "YES" ]]; then
+        local model_start_time="${previous_cycle}"
+      else
+        local model_start_time="${current_cycle}"
+      fi
+      local model_current_time="${model_start_date_current_cycle}"
+      rm -f "${DATA}/INPUT/coupler.res"
+      cat >> "${DATA}/INPUT/coupler.res" << EOF
+      3        (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)
+      ${model_start_time:0:4}  ${model_start_time:4:2}  ${model_start_time:6:2}  ${model_start_time:8:2}  0  0        Model start time: year, month, day, hour, minute, second
+      ${model_current_time:0:4}  ${model_current_time:4:2}  ${model_current_time:6:2}  ${model_current_time:8:2}  0  0        Current model time: year, month, day, hour, minute, second
+EOF
 
-  # inline post fix files
-  if [[ ${WRITE_DOPOST} = ".true." ]]; then
-    ${NLN} "${PARM_POST}/post_tag_gfs${LEVS}"             "${DATA}/itag"
-    ${NLN} "${FLTFILEGFS:-${PARM_POST}/postxconfig-NT-GFS-TWO.txt}"           "${DATA}/postxconfig-NT.txt"
-    ${NLN} "${FLTFILEGFSF00:-${PARM_POST}/postxconfig-NT-GFS-F00-TWO.txt}"    "${DATA}/postxconfig-NT_FH00.txt"
-    ${NLN} "${POSTGRB2TBL:-${PARM_POST}/params_grib2_tbl_new}"                "${DATA}/params_grib2_tbl_new"
-  fi
+      # Create a array of increment files
+      local inc_files inc_file iaufhrs iaufhr
+      if [[ "${DOIAU}" == "YES" ]]; then
+        # create an array of inc_files for each IAU hour
+        IFS=',' read -ra iaufhrs <<< "${IAUFHRS}"
+        inc_files=()
+        delimiter=""
+        IAU_INC_FILES=""
+        for iaufhr in "${iaufhrs[@]}"; do
+          if (( iaufhr == 6 )); then
+            inc_file="atminc.nc"
+          else
+            inc_file="atmi$(printf %03i "${iaufhr}").nc"
+          fi
+          inc_files+=("${inc_file}")
+          IAU_INC_FILES="${IAU_INC_FILES}${delimiter}'${inc_file}'"
+          delimiter=","
+        done
+      else  # "${DOIAU}" == "NO"
+        inc_files=("atminc.nc")
+        read_increment=".true."
+        res_latlon_dynamics="atminc.nc"
+        if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+          IAU_FHROT=${half_window}  # Replay ICs start at the end of the assimilation window
+          # Control member has no perturbation
+          if (( MEMBER == 0 )); then
+            inc_files=()
+            read_increment=".false."
+            res_latlon_dynamics='""'
+          fi
+        fi
+      fi
 
-  #------------------------------------------------------------------
-  # changeable parameters
-  # dycore definitions
-  res="${CASE:1}"
-  resp=$((res+1))
-  npx=${resp}
-  npy=${resp}
-  npz=$((LEVS-1))
-  io_layout="1,1"
-  #ncols=$(( (${npx}-1)*(${npy}-1)*3/2 ))
-
-  # spectral truncation and regular grid resolution based on FV3 resolution
-  JCAP_CASE=$((2*res-2))
-  LONB_CASE=$((4*res))
-  LATB_CASE=$((2*res))
-
-  JCAP=${JCAP:-${JCAP_CASE}}
-  LONB=${LONB:-${LONB_CASE}}
-  LATB=${LATB:-${LATB_CASE}}
-
-  LONB_IMO=${LONB_IMO:-${LONB_CASE}}
-  LATB_JMO=${LATB_JMO:-${LATB_CASE}}
-
-  # Fix files
-  FNGLAC=${FNGLAC:-"${FIXam}/global_glacier.2x2.grb"}
-  FNMXIC=${FNMXIC:-"${FIXam}/global_maxice.2x2.grb"}
-  FNTSFC=${FNTSFC:-"${FIXam}/RTGSST.1982.2012.monthly.clim.grb"}
-  FNSNOC=${FNSNOC:-"${FIXam}/global_snoclim.1.875.grb"}
-  FNZORC=${FNZORC:-"igbp"}
-  FNAISC=${FNAISC:-"${FIXam}/IMS-NIC.blended.ice.monthly.clim.grb"}
-  FNALBC2=${FNALBC2:-"${FIXsfc}/${CASE}.mx${OCNRES}.facsf.tileX.nc"}
-  FNTG3C=${FNTG3C:-"${FIXsfc}/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"}
-  FNVEGC=${FNVEGC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
-  FNMSKH=${FNMSKH:-"${FIXam}/global_slmask.t1534.3072.1536.grb"}
-  FNVMNC=${FNVMNC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
-  FNVMXC=${FNVMXC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
-  FNSLPC=${FNSLPC:-"${FIXsfc}/${CASE}.mx${OCNRES}.slope_type.tileX.nc"}
-  FNALBC=${FNALBC:-"${FIXsfc}/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"}
-  FNVETC=${FNVETC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"}
-  FNSOTC=${FNSOTC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_type.tileX.nc"}
-  FNSOCC=${FNSOCC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_color.tileX.nc"}
-  FNABSC=${FNABSC:-"${FIXsfc}/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"}
-  FNSMCC=${FNSMCC:-"${FIXam}/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"}
-
-  # If the appropriate resolution fix file is not present, use the highest resolution available (T1534)
-  [[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXam}/global_soilmgldas.statsgo.t1534.3072.1536.grb"
-
-  # NSST Options
-  # nstf_name contains the NSST related parameters
-  # nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled
-  # nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON,
-  # nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON
-  # nstf_name(4) : ZSEA1 (in mm) : 0
-  # nstf_name(5) : ZSEA2 (in mm) : 0
-  # nst_anl      : .true. or .false., NSST analysis over lake
-  NST_MODEL=${NST_MODEL:-0}
-  NST_SPINUP=${NST_SPINUP:-0}
-  NST_RESV=${NST_RESV-0}
-  ZSEA1=${ZSEA1:-0}
-  ZSEA2=${ZSEA2:-0}
-  nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"}
-  nst_anl=${nst_anl:-".false."}
-
-  # blocking factor used for threading and general physics performance
-  #nyblocks=$(expr \( $npy - 1 \) \/ $layout_y )
-  #nxblocks=$(expr \( $npx - 1 \) \/ $layout_x \/ 32)
-  #if [ $nxblocks -le 0 ]; then nxblocks=1 ; fi
-  blocksize=${blocksize:-32}
-
-  # the pre-conditioning of the solution
-  # =0 implies no pre-conditioning
-  # >0 means new adiabatic pre-conditioning
-  # <0 means older adiabatic pre-conditioning
-  na_init=${na_init:-1}
-  [[ ${warm_start} = ".true." ]] && na_init=0
-
-  # variables for controlling initialization of NCEP/NGGPS ICs
-  filtered_terrain=${filtered_terrain:-".true."}
-  gfs_dwinds=${gfs_dwinds:-".true."}
-
-  # various debug options
-  no_dycore=${no_dycore:-".false."}
-  dycore_only=${adiabatic:-".false."}
-  chksum_debug=${chksum_debug:-".false."}
-  print_freq=${print_freq:-6}
-
-  if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic options
-    hydrostatic=".false."
-    phys_hydrostatic=".false."     # enable heating in hydrostatic balance in non-hydrostatic simulation
-    use_hydro_pressure=".false."   # use hydrostatic pressure for physics
-    if [[ ${warm_start} = ".true." ]]; then
-      make_nh=".false."              # restarts contain non-hydrostatic state
-    else
-      make_nh=".true."               # re-initialize non-hydrostatic state
-    fi
+      local increment_file
+      for inc_file in "${inc_files[@]}"; do
+        increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}"
+        if [[ -f "${increment_file}" ]]; then
+          ${NCP} "${increment_file}" "${DATA}/INPUT/${inc_file}"
+        else
+          echo "FATAL ERROR: missing increment file '${increment_file}', ABORT!"
+          exit 1
+        fi
+      done
 
-  else # hydrostatic options
-    hydrostatic=".true."
-    phys_hydrostatic=".false."     # ignored when hydrostatic = T
-    use_hydro_pressure=".false."   # ignored when hydrostatic = T
-    make_nh=".false."              # running in hydrostatic mode
-  fi
+    fi  # if [[ "${RERUN}" == "YES" ]]; then
+    #--------------------------------------------------------------------------
 
-  # Conserve total energy as heat globally
-  consv_te=${consv_te:-1.} # range 0.-1., 1. will restore energy to orig. val. before physics
-
-  # time step parameters in FV3
-  k_split=${k_split:-2}
-  n_split=${n_split:-5}
-
-  if [[ "${MONO:0:4}" = "mono" ]]; then # monotonic options
-    d_con=${d_con_mono:-"0."}
-    do_vort_damp=".false."
-    if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic
-      hord_mt=${hord_mt_nh_mono:-"10"}
-      hord_xx=${hord_xx_nh_mono:-"10"}
-    else # hydrostatic
-      hord_mt=${hord_mt_hydro_mono:-"10"}
-      hord_xx=${hord_xx_hydro_mono:-"10"}
-    fi
+  fi  # if [[ "${warm_start}" == ".true." ]]; then
+  #============================================================================
 
-  else # non-monotonic options
-    d_con=${d_con_nonmono:-"1."}
-    do_vort_damp=".true."
-    if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic
-      hord_mt=${hord_mt_nh_nonmono:-"5"}
-      hord_xx=${hord_xx_nh_nonmono:-"5"}
-    else # hydrostatic
-      hord_mt=${hord_mt_hydro_nonmono:-"10"}
-      hord_xx=${hord_xx_hydro_nonmono:-"10"}
+  #============================================================================
+  # If doing IAU, change forecast hours
+  if [[ "${DOIAU:-NO}" == "YES" ]]; then
+    FHMAX=$((FHMAX + 6))
+    if (( FHMAX_HF > 0 )); then
+      FHMAX_HF=$((FHMAX_HF + 6))
     fi
   fi
+  #============================================================================
 
-  if [[ "${MONO:0:4}" != "mono" ]] && [[ "${TYPE}" = "nh" ]]; then
-    vtdm4=${vtdm4_nh_nonmono:-"0.06"}
-  else
-    vtdm4=${vtdm4:-"0.05"}
-  fi
+  #============================================================================
+  # If warm starting from restart files, set the following flags
+  if [[ "${warm_start}" == ".true." ]]; then
 
-  if [[ ${warm_start} = ".true." ]]; then # warm start from restart file
+    # start from restart file
     nggps_ic=".false."
     ncep_ic=".false."
     external_ic=".false."
     mountain=".true."
-    if [[ ${read_increment} = ".true." ]]; then # add increment on the fly to the restarts
-      res_latlon_dynamics="fv3_increment.nc"
-    else
-      res_latlon_dynamics='""'
-    fi
 
-  else # CHGRES'd GFS analyses
-    nggps_ic=${nggps_ic:-".true."}
-    ncep_ic=${ncep_ic:-".false."}
-    external_ic=".true."
-    mountain=".false."
-    read_increment=".false."
-    res_latlon_dynamics='""'
-  fi
+    # restarts contain non-hydrostatic state
+    [[ "${TYPE}" == "nh" ]] && make_nh=".false."
 
-  # Stochastic Physics Options
-  if [[ ${SET_STP_SEED:-"YES"} = "YES" ]]; then
-    ISEED_SKEB=$((current_cycle*1000 + MEMBER*10 + 1))
-    ISEED_SHUM=$((current_cycle*1000 + MEMBER*10 + 2))
-    ISEED_SPPT=$((current_cycle*1000 + MEMBER*10 + 3))
-    ISEED_CA=$(( (current_cycle*1000 + MEMBER*10 + 4) % 2147483647 ))
-    ISEED_LNDP=$(( (current_cycle*1000 + MEMBER*10 + 5) % 2147483647 ))
-  else
-    ISEED=${ISEED:-0}
-  fi
-  if [[ ${DO_SKEB} = "YES" ]]; then
-    do_skeb=".true."
-  fi
-  if [[ ${DO_SPPT} = "YES" ]]; then
-    do_sppt=".true."
-  fi
-  if [[ ${DO_SHUM} = "YES" ]]; then
-    do_shum=".true."
-  fi
-  if [[ ${DO_LAND_PERT} = "YES" ]]; then
-    lndp_type=${lndp_type:-2}
-    LNDP_TAU=${LNDP_TAU:-21600}
-    LNDP_SCALE=${LNDP_SCALE:-500000}
-    ISEED_LNDP=${ISEED_LNDP:-${ISEED}}
-    lndp_var_list=${lndp_var_list:-"'smc', 'vgf',"}
-    lndp_prt_list=${lndp_prt_list:-"0.2,0.1"}
-    n_var_lndp=$(echo "${lndp_var_list}" | wc -w)
-  fi
-  JCAP_STP=${JCAP_STP:-${JCAP_CASE}}
-  LONB_STP=${LONB_STP:-${LONB_CASE}}
-  LATB_STP=${LATB_STP:-${LATB_CASE}}
-  cd "${DATA}" || exit 1
-  if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi
-  if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi
+    # do not pre-condition the solution
+    na_init=0
+
+  fi  # warm_start == .true.
+  #============================================================================
+
+  #============================================================================
   if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then
+    local FH2 FH3
     for fhr in ${FV3_OUTPUT_FH}; do
-      local FH3=$(printf %03i "${fhr}")
-      local FH2=$(printf %02i "${fhr}")
-      ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
-      ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
-      ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
-      if [[ ${WRITE_DOPOST} = ".true." ]]; then
-        ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}"
-        ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}"
+      FH3=$(printf %03i "${fhr}")
+      FH2=$(printf %02i "${fhr}")
+      ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
+      ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
+      ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
+      if [[ "${WRITE_DOPOST}" == ".true." ]]; then
+        ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}"
+        ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}"
+        if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+          ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.grb2f${FH3}" "GFSPRS.GrbF${FH2}.nest02"
+          ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}.nest02"
+        fi
       fi
     done
-  else  # TODO: Is this even valid anymore?
-    for n in $(seq 1 "${ntiles}"); do
-      ${NLN} "nggps2d.tile${n}.nc"       "${COM_ATMOS_HISTORY}/nggps2d.tile${n}.nc"
-      ${NLN} "nggps3d.tile${n}.nc"       "${COM_ATMOS_HISTORY}/nggps3d.tile${n}.nc"
-      ${NLN} "grid_spec.tile${n}.nc"     "${COM_ATMOS_HISTORY}/grid_spec.tile${n}.nc"
-      ${NLN} "atmos_static.tile${n}.nc"  "${COM_ATMOS_HISTORY}/atmos_static.tile${n}.nc"
-      ${NLN} "atmos_4xdaily.tile${n}.nc" "${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${n}.nc"
-    done
   fi
+  #============================================================================
 }
 
-FV3_nml(){
+FV3_nml() {
   # namelist output for a certain component
   echo "SUB ${FUNCNAME[0]}: Creating name lists and model configure file for FV3"
-  # Call child scripts in current script directory
-  source "${HOMEgfs}/ush/parsing_namelists_FV3.sh"
-  FV3_namelists
+
+  source "${USHgfs}/parsing_namelists_FV3.sh"
+  source "${USHgfs}/parsing_model_configure_FV3.sh"
+
+  # Call the appropriate namelist functions
+  if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+    source "${USHgfs}/parsing_namelists_FV3_nest.sh"
+    FV3_namelists_nest global
+    FV3_namelists_nest nest
+  else
+    FV3_namelists
+  fi
+  FV3_model_configure
+
   echo "SUB ${FUNCNAME[0]}: FV3 name lists and model configure file created"
 }
 
 FV3_out() {
   echo "SUB ${FUNCNAME[0]}: copying output data for FV3"
 
-  # Copy FV3 restart files
-  if [[ ${RUN} =~ "gdas" ]]; then
-    cd "${DATA}/RESTART"
-    mkdir -p "${COM_ATMOS_RESTART}"
-    local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-    while [[ ${idate} -le ${forecast_end_cycle} ]]; do
-      for file in "${idate:0:8}.${idate:8:2}0000."*; do
-        ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}"
-      done
-      local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
+  # Copy configuration files
+  ${NCP} "${DATA}/input.nml" "${COMOUT_CONF}/ufs.input.nml"
+  ${NCP} "${DATA}/model_configure" "${COMOUT_CONF}/ufs.model_configure"
+  ${NCP} "${DATA}/ufs.configure" "${COMOUT_CONF}/ufs.ufs.configure"
+  ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table"
+ 
+
+  # Determine the dates for restart files to be copied to COM
+  local restart_date restart_dates
+  restart_dates=()
+
+  # Copy restarts in the assimilation window for RUN=gdas|enkfgdas|enkfgfs
+  if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then
+    restart_date="${model_start_date_next_cycle}"
+    while (( restart_date <= forecast_end_cycle )); do
+      restart_dates+=("${restart_date:0:8}.${restart_date:8:2}0000")
+      restart_date=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H)
     done
-  else
-    # No need to copy FV3 restart files when RUN=gfs or gefs
-    ${NCP} "${DATA}/input.nml" "${COM_CONF}/ufs.input.nml"
-    ${NCP} "${DATA}/model_configure" "${COM_CONF}/ufs.model_configure"
-    ${NCP} "${DATA}/ufs.configure" "${COM_CONF}/ufs.ufs.configure"
-    ${NCP} "${DATA}/diag_table" "${COM_CONF}/ufs.diag_table"  
+  elif [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then # Copy restarts at the end of the forecast segment for RUN=gfs|gefs
+    if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then
+      restart_dates+=("${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000")
+    fi
   fi
-  echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied"
-}
-
-WW3_postdet() {
-  echo "SUB ${FUNCNAME[0]}: Linking input data for WW3"
-  COMPONENTwave=${COMPONENTwave:-${RUN}wave}
 
-  #Link mod_def files for wave grids
-  if [[ ${waveMULTIGRID} = ".true." ]]; then
-    local array=(${WAVECUR_FID} ${WAVEICE_FID} ${WAVEWND_FID} ${waveuoutpGRD} ${waveGRD} ${waveesmfGRD})
-    echo "Wave Grids: ${WAVECUR_FID} ${WAVEICE_FID} ${WAVEWND_FID} ${waveuoutpGRD} ${waveGRD} ${waveesmfGRD}"
-    local grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ')
+  ### Check that there are restart files to copy
+  if [[ ${#restart_dates[@]} -gt 0 ]]; then
+    # Get list of FV3 restart files
+    local file_list fv3_file
+    file_list=$(FV3_restarts)
 
-    for wavGRD in ${grdALL}; do
-      ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${wavGRD}" "${DATA}/mod_def.${wavGRD}"
+    # Copy restarts for the dates collected above to COM
+    for restart_date in "${restart_dates[@]}"; do
+      echo "Copying FV3 restarts for 'RUN=${RUN}' at ${restart_date}"
+      for fv3_file in ${file_list}; do
+        ${NCP} "${DATArestart}/FV3_RESTART/${restart_date}.${fv3_file}" \
+               "${COMOUT_ATMOS_RESTART}/${restart_date}.${fv3_file}"
+      done
     done
-  else
-    #if shel, only 1 waveGRD which is linked to mod_def.ww3
-    ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${waveGRD}" "${DATA}/mod_def.ww3"
-  fi
-
 
-  #if wave mesh is not the same as the ocn/ice mesh, linkk it in the file
-  local comparemesh=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"}
-  if [[ "${MESH_WAV}" = "${comparemesh}" ]]; then
-    echo "Wave is on same mesh as ocean/ice"
-  else
-    ${NLN} "${FIXwave}/${MESH_WAV}" "${DATA}/"
+    echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied"
   fi
+}
 
-  export wavprfx=${RUNwave}${WAV_MEMBER:-}
+# Disable variable not used warnings
+# shellcheck disable=SC2034
+WW3_postdet() {
+  echo "SUB ${FUNCNAME[0]}: Linking input data for WW3"
 
-  #Copy initial condition files:
-  for wavGRD in ${waveGRD} ; do
-    if [[ "${warm_start}" = ".true." ]] || [[ "${RERUN}" = "YES" ]]; then
-      if [[ ${RERUN} = "NO" ]]; then
-        local waverstfile="${COM_WAVE_RESTART_PREV}/${sPDY}.${scyc}0000.restart.${wavGRD}"
-      else
-        local waverstfile="${COM_WAVE_RESTART}/${PDYT}.${cyct}0000.restart.${wavGRD}"
-      fi
+  local ww3_grid
+  # Copy initial condition files:
+  if [[ "${warm_start}" == ".true." ]]; then
+    local restart_date restart_dir
+    if [[ "${RERUN}" == "YES" ]]; then
+      restart_date="${RERUN_DATE}"
+      restart_dir="${DATArestart}/WW3_RESTART"
     else
-      local waverstfile="${COM_WAVE_RESTART}/${sPDY}.${scyc}0000.restart.${wavGRD}"
+      restart_date="${model_start_date_current_cycle}"
+      restart_dir="${COMIN_WAVE_RESTART_PREV}"
     fi
-    if [[ ! -f ${waverstfile} ]]; then
-      if [[ ${RERUN} = "NO" ]]; then
-        echo "WARNING: NON-FATAL ERROR wave IC is missing, will start from rest"
-      else
-        echo "ERROR: Wave IC is missing in RERUN, exiting."
-        exit 1
+    echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'"
+    local ww3_restart_file
+    for ww3_grid in ${waveGRD} ; do
+      ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.${ww3_grid}"
+      if [[ ! -f "${ww3_restart_file}" ]]; then
+        echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!"
+        if [[ "${RERUN}" == "YES" ]]; then
+          # In the case of a RERUN, the WW3 restart file is required
+          echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!"
+          exit 1
+        fi
       fi
-    else
-      if [[ ${waveMULTIGRID} = ".true." ]]; then
-        ${NLN} "${waverstfile}" "${DATA}/restart.${wavGRD}"
+      if [[ "${waveMULTIGRID}" == ".true." ]]; then
+        ${NCP} "${ww3_restart_file}" "${DATA}/restart.${ww3_grid}" \
+        || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 )
       else
-        ${NLN} "${waverstfile}" "${DATA}/restart.ww3"
+        ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \
+        || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 )
       fi
-    fi
-  done
-
-  if [[ ${waveMULTIGRID} = ".true." ]]; then
-    for wavGRD in ${waveGRD} ; do
-      ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${wavGRD}.${PDY}${cyc}" "log.${wavGRD}"
     done
-  else
-    ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3"
-  fi
-
-  if [[ "${WW3ICEINP}" = "YES" ]]; then
-    local wavicefile="${COM_WAVE_PREP}/${RUNwave}.${WAVEICE_FID}.${cycle}.ice"
-    if [[ ! -f ${wavicefile} ]]; then
-      echo "ERROR: WW3ICEINP = ${WW3ICEINP}, but missing ice file"
-      echo "Abort!"
-      exit 1
-    fi
-    ${NLN} "${wavicefile}" "${DATA}/ice.${WAVEICE_FID}"
-  fi
-
-  if [[ "${WW3CURINP}" = "YES" ]]; then
-    local wavcurfile="${COM_WAVE_PREP}/${RUNwave}.${WAVECUR_FID}.${cycle}.cur"
-    if [[ ! -f ${wavcurfile} ]]; then
-      echo "ERROR: WW3CURINP = ${WW3CURINP}, but missing current file"
-      echo "Abort!"
-      exit 1
-    fi
-    ${NLN} "${wavcurfile}" "${DATA}/current.${WAVECUR_FID}"
-  fi
-
-  if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi
+  else  # cold start
+    echo "WW3 will start from rest!"
+  fi  # [[ "${warm_start}" == ".true." ]]
 
   # Link output files
-  cd "${DATA}"
-  if [[ ${waveMULTIGRID} = ".true." ]]; then
-    ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3"
+  local wavprfx="${RUN}wave${WAV_MEMBER:-}"
+  if [[ "${waveMULTIGRID}" == ".true." ]]; then
+    ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3"
+    for ww3_grid in ${waveGRD}; do
+      ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.${ww3_grid}.${PDY}${cyc}" "log.${ww3_grid}"
+    done
+  else
+    ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3"
   fi
 
   # Loop for gridded output (uses FHINC)
-  local fhr vdate FHINC wavGRD
+  local fhr vdate FHINC ww3_grid
   fhr=${FHMIN_WAV}
-  while [[ ${fhr} -le ${FHMAX_WAV} ]]; do
-    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
-    if [[ ${waveMULTIGRID} = ".true." ]]; then
-      for wavGRD in ${waveGRD} ; do
-        ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${wavGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_grd.${wavGRD}"
+  fhinc=${FHOUT_WAV}
+  while (( fhr <= FHMAX_WAV )); do
+    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d.%H0000)
+    if [[ "${waveMULTIGRID}" == ".true." ]]; then
+      for ww3_grid in ${waveGRD} ; do
+        ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_grd.${ww3_grid}.${vdate}" "${DATA}/${vdate}.out_grd.${ww3_grid}"
       done
     else
-      ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_grd.ww3"
+      ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${vdate}" "${DATA}/${vdate}.out_grd.ww3"
     fi
-    FHINC=${FHOUT_WAV}
     if (( FHMAX_HF_WAV > 0 && FHOUT_HF_WAV > 0 && fhr < FHMAX_HF_WAV )); then
-      FHINC=${FHOUT_HF_WAV}
+      fhinc=${FHOUT_HF_WAV}
     fi
-    fhr=$((fhr+FHINC))
+    fhr=$((fhr + fhinc))
   done
 
   # Loop for point output (uses DTPNT)
   fhr=${FHMIN_WAV}
-  while [[ ${fhr} -le ${FHMAX_WAV} ]]; do
-    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
-    if [[ ${waveMULTIGRID} = ".true." ]]; then
-      ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_pnt.${waveuoutpGRD}"
+  fhinc=${FHINCP_WAV}
+  while (( fhr <= FHMAX_WAV )); do
+    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d.%H0000)
+    if [[ "${waveMULTIGRID}" == ".true." ]]; then
+      ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.${waveuoutpGRD}"
     else
-      ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_pnt.ww3"
+      ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.ww3"
     fi
-
-    FHINC=${FHINCP_WAV}
-    fhr=$((fhr+FHINC))
+    fhr=$((fhr + fhinc))
   done
 }
 
 WW3_nml() {
   echo "SUB ${FUNCNAME[0]}: Copying input files for WW3"
-  WAV_MOD_TAG=${RUN}wave${waveMEMB}
-  if [[ "${USE_WAV_RMP:-YES}" = "YES" ]]; then
-    if (( $( ls -1 "${FIXwave}/rmp_src_to_dst_conserv_"* 2> /dev/null | wc -l) > 0 )); then
-      for file in $(ls "${FIXwave}/rmp_src_to_dst_conserv_"*) ; do
-        ${NLN} "${file}" "${DATA}/"
-      done
-    else
-      echo 'FATAL ERROR : No rmp precomputed nc files found for wave model'
-      exit 4
-    fi
-  fi
-  source "${HOMEgfs}/ush/parsing_namelists_WW3.sh"
+  source "${USHgfs}/parsing_namelists_WW3.sh"
   WW3_namelists
 }
 
 WW3_out() {
   echo "SUB ${FUNCNAME[0]}: Copying output data for WW3"
+  # TODO: Need to add logic to copy restarts from DATArestart/WW3_RESTART to COMOUT_WAVE_RESTART
 }
 
 
 CPL_out() {
   echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields"
-  if [[ "${esmf_profile:-}" = ".true." ]]; then
-    ${NCP} "${DATA}/ESMF_Profile.summary" "${COM_ATMOS_HISTORY}/ESMF_Profile.summary"
+  if [[ "${esmf_profile:-.false.}" == ".true." ]]; then
+    ${NCP} "${DATA}/ESMF_Profile.summary" "${COMOUT_ATMOS_HISTORY}/ESMF_Profile.summary"
   fi
 }
 
 MOM6_postdet() {
   echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination"
 
+  local restart_dir restart_date
+  if [[ "${RERUN}" == "YES" ]]; then
+    restart_dir="${DATArestart}/MOM6_RESTART"
+    restart_date="${RERUN_DATE}"
+  else  # "${RERUN}" == "NO"
+    restart_dir="${COMIN_OCEAN_RESTART_PREV}"
+    restart_date="${model_start_date_current_cycle}"
+  fi
+
   # Copy MOM6 ICs
-  ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc"
+  ${NCP} "${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" \
+  || ( echo "FATAL ERROR: Unable to copy MOM6 IC, ABORT!"; exit 1 )
   case ${OCNRES} in
     "025")
-      for nn in $(seq 1 4); do
-        if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then
-          ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc"
+      local nn
+      for (( nn = 1; nn <= 4; nn++ )); do
+        if [[ -f "${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.MOM.res_${nn}.nc" ]]; then
+          ${NCP} "${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" \
+          || ( echo "FATAL ERROR: Unable to copy MOM6 IC, ABORT!"; exit 1 )
         fi
       done
     ;;
+    *) ;;
   esac
 
-  # Link increment
-  if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
-      if [[ ! -f "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" ]]; then
-          echo "FATAL ERROR: Ocean increment not found, ABORT!"
-          exit 111
-      fi
-      ${NLN} "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc"
-  fi
-
-  # Copy MOM6 fixed files
-  ${NCP} "${FIXmom}/${OCNRES}/"* "${DATA}/INPUT/"
-
-  # Copy coupled grid_spec
-  spec_file="${FIXcpl}/a${CASE}o${OCNRES}/grid_spec.nc"
-  if [[ -s ${spec_file} ]]; then
-    ${NCP} "${spec_file}" "${DATA}/INPUT/"
-  else
-    echo "FATAL ERROR: grid_spec file '${spec_file}' does not exist"
-    exit 3
-  fi
-
-  # Copy mediator restart files to RUNDIR  # TODO: mediator should have its own CMEPS_postdet() function
-  if [[ ${warm_start} = ".true." ]]; then
-    local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
-    if [[ -f "${mediator_file}" ]]; then
-      ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc"
-      rm -f "${DATA}/rpointer.cpl"
-      touch "${DATA}/rpointer.cpl"
-      echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
-    else
-      # We have a choice to make here.
-      # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
-      # cmeps_run_type is determined based on the availability of the mediator restart file
-      echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!"
-      #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!"
-      #exit 4
+  # Copy increment (only when RERUN=NO)
+  if [[ "${RERUN}" == "NO" ]]; then
+    if [[ "${DO_JEDIOCNVAR:-NO}" == "YES" ]]; then
+      ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \
+      || ( echo "FATAL ERROR: Unable to copy MOM6 increment, ABORT!"; exit 1 )
     fi
-  else
-    # This is a cold start, so initialize the coupling fields from zero
-    export cmeps_run_type="startup"
-  fi
 
-  # If using stochatic parameterizations, create a seed that does not exceed the
-  # largest signed integer
-  if [[ "${DO_OCN_SPPT}" = "YES" ]] || [[ "${DO_OCN_PERT_EPBL}" = "YES" ]]; then
-    if [[ ${SET_STP_SEED:-"YES"} = "YES" ]]; then
-      ISEED_OCNSPPT=$(( (current_cycle*1000 + MEMBER*10 + 6) % 2147483647 ))
-      ISEED_EPBL=$(( (current_cycle*1000 + MEMBER*10 + 7) % 2147483647 ))
-    else
-      ISEED=${ISEED:-0}
+    # GEFS perturbations
+    # TODO if [[ $RUN} == "gefs" ]] block maybe be needed
+    #     to ensure it does not interfere with the GFS when ensemble is updated in the GFS
+    if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then
+      ${NCP} "${COMIN_OCEAN_ANALYSIS}/mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \
+      || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 )
     fi
-  fi
-
-  # Create COMOUTocean
-  [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}"
+  fi  # if [[ "${RERUN}" == "NO" ]]; then
 
   # Link output files
-  if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
-    # Link output files for RUN = gfs
+  if [[ "${RUN}" =~ "gfs" || "${RUN}" == "gefs" ]]; then  # Link output files for RUN=gfs|enkfgfs|gefs
 
-    # TODO: get requirements on what files need to be written out and what these dates here are and what they mean
-
-    if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi
+    # Looping over MOM6 output hours
+    local fhr fhr3 last_fhr interval midpoint vdate vdate_mid source_file dest_file
+    for fhr in ${MOM6_OUTPUT_FH}; do
+      fhr3=$(printf %03i "${fhr}")
 
-    # Looping over FV3 output hours
-    # TODO: Need to define MOM6_OUTPUT_FH and control at some point for issue #1629
-    for fhr in ${FV3_OUTPUT_FH}; do
       if [[ -z ${last_fhr:-} ]]; then
-        local last_fhr=${fhr}
+        last_fhr=${fhr}
         continue
       fi
+
       (( interval = fhr - last_fhr ))
       (( midpoint = last_fhr + interval/2 ))
 
-      local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
-      local vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
-
+      vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+      vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
 
       # Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM
-      local source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
-      local dest_file="ocn${vdate}.${ENSMEM}.${current_cycle}.nc"
-      ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
-
-      local source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc"
-      local dest_file=${source_file}
-      if [[ ! -a "${DATA}/${source_file}" ]]; then
-        ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
-      fi
+      source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+      dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
+      ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}"
+
+      last_fhr=${fhr}
 
-      local last_fhr=${fhr}
     done
 
-  elif [[ "${RUN}" =~ "gdas" ]]; then
-    # Link output files for RUN = gdas
+  elif [[ "${RUN}" =~ "gdas" ]]; then  # Link output files for RUN=gdas|enkfgdas
 
-    # Save MOM6 backgrounds
-    for fhr in ${FV3_OUTPUT_FH}; do
-      local idatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
-      local fhr3=$(printf %03i "${fhr}")
-      ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc"
+    # Save (instantaneous) MOM6 backgrounds
+    local fhr3 vdatestr
+    for fhr in ${MOM6_OUTPUT_FH}; do
+      fhr3=$(printf %03i "${fhr}")
+      vdatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
+      ${NLN} "${COMOUT_OCEAN_HISTORY}/${RUN}.ocean.t${cyc}z.inst.f${fhr3}.nc" "${DATA}/MOM6_OUTPUT/ocn_da_${vdatestr}.nc"
     done
   fi
 
-  mkdir -p "${COM_OCEAN_RESTART}"
-
-  # Link ocean restarts from DATA to COM
-  # Coarser than 1/2 degree has a single MOM restart
-  ${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
-  # 1/4 degree resolution has 4 additional restarts
-  case ${OCNRES} in
-    "025")
-      for nn in $(seq 1 4); do
-        ${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/"
-      done
-      ;;
-    *)
-    ;;
-  esac
-
-  # Loop over restart_interval frequency and link restarts from DATA to COM
-  local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  while [[ ${idate} -lt ${forecast_end_cycle} ]]; do
-    local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}")
-    ${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
-    case ${OCNRES} in
-      "025")
-        for nn in $(seq 1 4); do
-          ${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/"
-        done
-        ;;
-    esac
-    local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  done
-
-  # TODO: mediator should have its own CMEPS_postdet() function
-  # Link mediator restarts from DATA to COM
-  # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below:
-  # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
-  # Instead of linking, copy the mediator files after the model finishes
-  #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med"
-  #mkdir -p "${COMOUTmed}/RESTART"
-  #local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  #while [[ ${idate} -le ${forecast_end_cycle} ]]; do
-  #  local seconds=$(to_seconds ${idate:8:2}0000)  # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
-  #  local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
-  #  ${NLN} "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
-  #  local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  #done
-
   echo "SUB ${FUNCNAME[0]}: MOM6 input data linked/copied"
 
 }
 
 MOM6_nml() {
   echo "SUB ${FUNCNAME[0]}: Creating name list for MOM6"
-  source "${HOMEgfs}/ush/parsing_namelists_MOM6.sh"
+  source "${USHgfs}/parsing_namelists_MOM6.sh"
   MOM6_namelists
 }
 
 MOM6_out() {
   echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6"
 
-  # Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull)
-  if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi
-  ${NCP} "${DATA}/INPUT/MOM_input" "${COM_CONF}/ufs.MOM_input"
-
-  # TODO: mediator should have its own CMEPS_out() function
-  # Copy mediator restarts from DATA to COM
-  # Linking mediator restarts to COM causes the model to fail with a message.
-  # See MOM6_postdet() function for error message
-  mkdir -p "${COM_MED_RESTART}"
-  local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  while [[ ${idate} -le ${forecast_end_cycle} ]]; do
-    local seconds=$(to_seconds "${idate:8:2}"0000)  # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
-    local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
-    local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
-    if [[ -f ${mediator_file} ]]; then
-      ${NCP} "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc"
-    else
-      echo "Mediator restart ${mediator_file} not found."
+  # Copy MOM_input from DATA to COMOUT_CONF after the forecast is run (and successfull)
+  ${NCP} "${DATA}/INPUT/MOM_input" "${COMOUT_CONF}/ufs.MOM_input"
+
+  # Create a list of MOM6 restart files
+  # Coarser than 1/2 degree has a single MOM restart
+  local mom6_restart_files mom6_restart_file restart_file
+  mom6_restart_files=(MOM.res.nc)
+  # 1/4 degree resolution has 3 additional restarts
+  case "${OCNRES}" in
+    "025")
+      local nn
+      for (( nn = 1; nn <= 3; nn++ )); do
+        mom6_restart_files+=("MOM.res_${nn}.nc")
+      done
+      ;;
+    *) ;;
+  esac
+
+  # Copy MOM6 restarts at the end of the forecast segment to COM for RUN=gfs|gefs
+  if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then
+    local restart_file
+    if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then
+      echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}"
+      for mom6_restart_file in "${mom6_restart_files[@]}"; do
+        restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}"
+        ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \
+               "${COMOUT_OCEAN_RESTART}/${restart_file}"
+      done
     fi
-    local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  done
+  fi
+
+  # Copy restarts for the next cycle for RUN=gdas|enkfgdas|enkfgfs
+  if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then
+    local restart_date
+    restart_date="${model_start_date_next_cycle}"
+    echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${restart_date}"
+    for mom6_restart_file in "${mom6_restart_files[@]}"; do
+      restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${mom6_restart_file}"
+      ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \
+             "${COMOUT_OCEAN_RESTART}/${restart_file}"
+    done
+  fi
 }
 
 CICE_postdet() {
   echo "SUB ${FUNCNAME[0]}: CICE after run type determination"
 
-  # TODO:  These settings should be elevated to config.ice
-  histfreq_n=${histfreq_n:-6}
-  dumpfreq_n=${dumpfreq_n:-1000}  # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure
-  dumpfreq=${dumpfreq:-"y"} #  "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years"
-
-  if [[ "${RUN}" =~ "gdas" ]]; then
-    cice_hist_avg=".false., .false., .false., .false., .false."   # DA needs instantaneous
-  else
-    cice_hist_avg=".true., .true., .true., .true., .true."    # P8 wants averaged over histfreq_n
+  local restart_date cice_restart_file
+  if [[ "${RERUN}" == "YES" ]]; then
+    restart_date="${RERUN_DATE}"
+    local seconds
+    seconds=$(to_seconds "${restart_date:8:2}0000")  # convert HHMMSS to seconds
+    cice_restart_file="${DATArestart}/CICE_RESTART/cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc"
+  else  # "${RERUN}" == "NO"
+    restart_date="${model_start_date_current_cycle}"
+    cice_restart_file="${COMIN_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc"
+    if [[ "${DO_JEDIOCNVAR:-NO}" == "YES" ]]; then
+      cice_restart_file="${COMIN_ICE_ANALYSIS}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model_anl.res.nc"
+    fi
   fi
 
-  FRAZIL_FWSALT=${FRAZIL_FWSALT:-".true."}
-  ktherm=${ktherm:-2}
-  tfrz_option=${tfrz_option:-"'mushy'"}
-  tr_pond_lvl=${tr_pond_lvl:-".true."} # Use level melt ponds tr_pond_lvl=true
-
-  # restart_pond_lvl (if tr_pond_lvl=true):
-  #   -- if true, initialize the level ponds from restart (if runtype=continue)
-  #   -- if false, re-initialize level ponds to zero (if runtype=initial or continue)
-  restart_pond_lvl=${restart_pond_lvl:-".false."}
-
-  ice_grid_file=${ice_grid_file:-"grid_cice_NEMS_mx${ICERES}.nc"}
-  ice_kmt_file=${ice_kmt_file:-"kmtu_cice_NEMS_mx${ICERES}.nc"}
-  export MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"}
-
   # Copy CICE ICs
-  echo "Link CICE ICs"
-  cice_restart_file="${COM_ICE_RESTART_PREV}/${sPDY}.${scyc}0000.cice_model.res.nc"
-  if [[ ! -f "${cice_restart_file}" ]]; then
-    echo "FATAL ERROR: CICE restart file not found at '${cice_restart_file}', ABORT!"
-    exit 112
-  else
-    ${NLN} "${cice_restart_file}" "${DATA}/cice_model.res.nc"
-  fi
-  rm -f "${DATA}/ice.restart_file"
-  echo "${DATA}/cice_model.res.nc" > "${DATA}/ice.restart_file"
-
-  echo "Link CICE fixed files"
-  ${NLN} "${FIXcice}/${ICERES}/${ice_grid_file}" "${DATA}/"
-  ${NLN} "${FIXcice}/${ICERES}/${ice_kmt_file}"  "${DATA}/"
-  ${NLN} "${FIXcice}/${ICERES}/${MESH_OCN_ICE}"  "${DATA}/"
+  ${NCP} "${cice_restart_file}" "${DATA}/cice_model.res.nc" \
+  || ( echo "FATAL ERROR: Unable to copy CICE IC, ABORT!"; exit 1 )
 
-  # Link CICE output files
-  if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi
-  mkdir -p "${COM_ICE_RESTART}"
+  # Link iceh_ic file to COM.  This is the initial condition file from CICE (f000)
+  # TODO: Is this file needed in COM? Is this going to be used for generating any products?
+  local vdate seconds vdatestr fhr fhr3 interval last_fhr
+  seconds=$(to_seconds "${model_start_date_current_cycle:8:2}0000")  # convert HHMMSS to seconds
+  vdatestr="${model_start_date_current_cycle:0:4}-${model_start_date_current_cycle:4:2}-${model_start_date_current_cycle:6:2}-${seconds}"
+  ${NLN} "${COMOUT_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
 
-  if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
-    # Link output files for RUN = gfs
+  # Link CICE forecast output files from DATA/CICE_OUTPUT to COM
+  local source_file dest_file
+  for fhr in "${CICE_OUTPUT_FH[@]}"; do
 
-    # TODO: make these forecast output files consistent w/ GFS output
-    # TODO: Work w/ NB to determine appropriate naming convention for these files
-
-    # TODO: consult w/ NB on how to improve on this.  Gather requirements and more information on what these files are and how they are used to properly catalog them
-    local vdate seconds vdatestr fhr last_fhr
-    for fhr in ${FV3_OUTPUT_FH}; do
-      vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
-      seconds=$(to_seconds "${vdate:8:2}0000")  # convert HHMMSS to seconds
-      vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
-
-      if [[ 10#${fhr} -eq 0 ]]; then
-        ${NLN} "${COM_ICE_HISTORY}/iceic${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
-      else
-        (( interval = fhr - last_fhr ))  # Umm.. isn't this histfreq_n?
-        ${NLN} "${COM_ICE_HISTORY}/ice${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc"
-      fi
+    if [[ -z ${last_fhr:-} ]]; then
       last_fhr=${fhr}
-    done
-
-  elif [[ "${RUN}" =~ "gdas" ]]; then
-
-    # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice
-    # This can be thought of as the f000 output from the CICE model
-    local seconds vdatestr
-    seconds=$(to_seconds "${current_cycle:8:2}0000")  # convert HHMMSS to seconds
-    vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}"
-    ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
-
-    # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice
-    local vdate vdatestr seconds fhr fhr3
-    fhr="${FHOUT}"
-    while [[ "${fhr}" -le "${FHMAX}" ]]; do
-      vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
-      seconds=$(to_seconds "${vdate:8:2}0000")  # convert HHMMSS to seconds
-      vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
-      fhr3=$(printf %03i "${fhr}")
-      ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${vdatestr}.nc"
-      fhr=$((fhr + FHOUT))
-    done
+      continue
+    fi
 
-  fi
+    fhr3=$(printf %03i "${fhr}")
+    (( interval = fhr - last_fhr ))
 
-  # Link CICE restarts from CICE_RESTART to COMOUTice/RESTART
-  # Loop over restart_interval and link restarts from DATA to COM
-  local vdate vdatestr seconds
-  vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
-  while [[ ${vdate} -le ${forecast_end_cycle} ]]; do
+    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
     seconds=$(to_seconds "${vdate:8:2}0000")  # convert HHMMSS to seconds
     vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
-    ${NLN} "${COM_ICE_RESTART}/${vdate:0:8}.${vdate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${vdatestr}.nc"
-    vdate=$(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
+
+    if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
+      source_file="iceh_$(printf "%0.2d" "${FHOUT_ICE}")h.${vdatestr}.nc"
+      dest_file="${RUN}.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
+    elif [[ "${RUN}" =~ "gdas" ]]; then
+      source_file="iceh_inst.${vdatestr}.nc"
+      dest_file="${RUN}.ice.t${cyc}z.inst.f${fhr3}.nc"
+    fi
+    ${NLN} "${COMOUT_ICE_HISTORY}/${dest_file}" "${DATA}/CICE_OUTPUT/${source_file}"
+
+    last_fhr=${fhr}
   done
+
 }
 
 CICE_nml() {
   echo "SUB ${FUNCNAME[0]}: Creating name list for CICE"
-  source "${HOMEgfs}/ush/parsing_namelists_CICE.sh"
+  source "${USHgfs}/parsing_namelists_CICE.sh"
   CICE_namelists
 }
 
 CICE_out() {
   echo "SUB ${FUNCNAME[0]}: Copying output data for CICE"
 
-  # Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull)
-  if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi
-  ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in"
+  # Copy ice_in namelist from DATA to COMOUT_CONF after the forecast is run (and successfull)
+  ${NCP} "${DATA}/ice_in" "${COMOUT_CONF}/ufs.ice_in"
+
+  # Copy CICE restarts at the end of the forecast segment to COM for RUN=gfs|gefs
+  if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then
+    local seconds source_file target_file
+    if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then
+      echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}"
+      seconds=$(to_seconds "${forecast_end_cycle:8:2}0000")  # convert HHMMSS to seconds
+      source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc"
+      target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc"
+      ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \
+             "${COMOUT_ICE_RESTART}/${target_file}"
+    fi
+  fi
+
+  # Copy restarts for next cycle for RUN=gdas|enkfgdas|enkfgfs
+  if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then
+    local restart_date
+    restart_date="${model_start_date_next_cycle}"
+    echo "Copying CICE restarts for 'RUN=${RUN}' at ${restart_date}"
+    seconds=$(to_seconds "${restart_date:8:2}0000")  # convert HHMMSS to seconds
+    source_file="cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc"
+    target_file="${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc"
+    ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \
+           "${COMOUT_ICE_RESTART}/${target_file}"
+  fi
+
 }
 
 GOCART_rc() {
@@ -1002,14 +634,14 @@ GOCART_rc() {
   # this variable is platform-dependent and should be set via a YAML file
 
   # link directory containing GOCART input dataset, if provided
-  if [[ ! -z "${AERO_INPUTS_DIR}" ]]; then
+  if [[ -n "${AERO_INPUTS_DIR}" ]]; then
     ${NLN} "${AERO_INPUTS_DIR}" "${DATA}/ExtData"
     status=$?
     [[ ${status} -ne 0 ]] && exit "${status}"
   fi
 
   # copying GOCART configuration files
-  if [[ ! -z "${AERO_CONFIG_DIR}" ]]; then
+  if [[  -n "${AERO_CONFIG_DIR}" ]]; then
     ${NCP} "${AERO_CONFIG_DIR}"/*.rc "${DATA}"
     status=$?
     [[ ${status} -ne 0 ]] && exit "${status}"
@@ -1030,18 +662,17 @@ GOCART_rc() {
 GOCART_postdet() {
   echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART"
 
-  if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi
-
-  for fhr in ${FV3_OUTPUT_FH}; do
-    local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+  local vdate
+  for fhr in ${GOCART_OUTPUT_FH}; do
+    vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
 
     # Temporarily delete existing files due to noclobber in GOCART
-    if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" ]]; then
-      rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
+    if [[ -e "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" ]]; then
+      rm -f "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
     fi
 
-    #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end
-    #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
+    #TODO: Temporarily removing this as this will crash gocart, adding copy statement at the end
+    #${NLN} "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
     #       "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
   done
 }
@@ -1050,15 +681,88 @@ GOCART_out() {
   echo "SUB ${FUNCNAME[0]}: Copying output data for GOCART"
 
   # Copy gocart.inst_aod after the forecast is run (and successfull)
-  # TO DO: this should be linked but there were issues where gocart was crashing if it was linked
+  # TODO: this should be linked but there are issues where gocart crashing if it is linked
   local fhr
   local vdate
-  for fhr in ${FV3_OUTPUT_FH}; do
+  for fhr in ${GOCART_OUTPUT_FH}; do
     if (( fhr == 0 )); then continue; fi
     vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
     ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
-      "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
+      "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
   done
+}
+
+CMEPS_postdet() {
+  echo "SUB ${FUNCNAME[0]}: Linking output data for CMEPS mediator"
+
+  if [[ "${warm_start}" == ".true." ]]; then
+
+    # Determine the appropriate restart file
+    local restart_date cmeps_restart_file
+    if [[ "${RERUN}" == "YES" ]]; then
+      restart_date="${RERUN_DATE}"
+      local seconds
+      seconds=$(to_seconds "${restart_date:8:2}0000")  # convert HHMMSS to seconds
+      cmeps_restart_file="${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc"
+    else  # "${RERUN}" == "NO"
+      restart_date="${model_start_date_current_cycle}"
+      cmeps_restart_file="${COMIN_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc"
+    fi
+
+    # Copy CMEPS restarts
+    if [[ -f "${cmeps_restart_file}" ]]; then
+      ${NCP} "${cmeps_restart_file}" "${DATA}/ufs.cpld.cpl.r.nc" \
+      || ( echo "FATAL ERROR: Unable to copy CMEPS restarts, ABORT!"; exit 1 )
+      rm -f "${DATA}/rpointer.cpl"
+      touch "${DATA}/rpointer.cpl"
+      echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
+    else
+      # We have a choice to make here.
+      # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
+      # cmeps_run_type is determined based on the availability of the CMEPS restart file
+      echo "WARNING: CMEPS restart file '${cmeps_restart_file}' not found for warm_start='${warm_start}', will initialize!"
+      if [[ "${RERUN}" == "YES" ]]; then
+        # In the case of a RERUN, the CMEPS restart file is required
+        echo "FATAL ERROR: CMEPS restart file '${cmeps_restart_file}' not found for RERUN='${RERUN}', ABORT!"
+        exit 1
+      fi
+    fi
+
+  fi  # [[ "${warm_start}" == ".true." ]];
+}
+
+CMEPS_out() {
+  echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator"
+
+  # Copy mediator restarts at the end of the forecast segment to COM for RUN=gfs|gefs
+  if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then
+    echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}"
+    local seconds source_file target_file
+    seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000)
+    source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc"
+    target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc"
+    if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then
+      ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \
+             "${COMOUT_MED_RESTART}/${target_file}"
+    else
+      echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found."
+    fi
+  fi
 
+  # Copy restarts for the next cycle to COM for RUN=gdas|enkfgdas|enkfgfs
+  if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then
+    local restart_date
+    restart_date="${model_start_date_next_cycle}"
+    echo "Copying mediator restarts for 'RUN=${RUN}' at ${restart_date}"
+    seconds=$(to_seconds "${restart_date:8:2}"0000)
+    source_file="ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc"
+    target_file="${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc"
+    if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then
+      ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \
+             "${COMOUT_MED_RESTART}/${target_file}"
+    else
+      echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found."
+    fi
+  fi
 
 }
diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh
index 9bb565919a..ebf7cfd282 100755
--- a/ush/forecast_predet.sh
+++ b/ush/forecast_predet.sh
@@ -1,42 +1,34 @@
 #! /usr/bin/env bash
 
-#####
-## "forecast_predet.sh"
-## This script sets value of all variables
-##
-## This is the child script of ex-global forecast,
-## This script is a definition of functions.
-#####
-
-# For all non-evironment variables
-# Cycling and forecast hour specific parameters
-
 to_seconds() {
   # Function to convert HHMMSS to seconds since 00Z
-  local hhmmss=${1:?}
-  local hh=${hhmmss:0:2}
-  local mm=${hhmmss:2:2}
-  local ss=${hhmmss:4:2}
-  local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss}))
-  local padded_seconds=$(printf "%05d" "${seconds}")
+  local hhmmss hh mm ss seconds padded_seconds
+  hhmmss=${1:?}
+  hh=${hhmmss:0:2}
+  mm=${hhmmss:2:2}
+  ss=${hhmmss:4:2}
+  seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss}))
+  padded_seconds=$(printf "%05d" "${seconds}")
   echo "${padded_seconds}"
 }
 
 middle_date(){
   # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH
-  local date1=${1:?}
-  local date2=${2:?}
-  local date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s)
-  local date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s)
-  local dtsecsby2=$(( $((date2s - date1s)) / 2 ))
-  local mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S)
+  local date1 date2 date1s date2s dtsecsby2 mid_date
+  date1=${1:?}
+  date2=${2:?}
+  date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s)
+  date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s)
+  dtsecsby2=$(( $((date2s - date1s)) / 2 ))
+  mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S)
   echo "${mid_date:0:10}"
 }
 
 nhour(){
   # Function to calculate hours between two dates (This replicates prod-util NHOUR)
-  local date1=${1:?}
-  local date2=${2:?}
+  local date1 date2 seconds1 seconds2 hours
+  date1=${1:?}
+  date2=${2:?}
   # Convert dates to UNIX timestamps
   seconds1=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s)
   seconds2=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s)
@@ -44,119 +36,131 @@ nhour(){
   echo "${hours}"
 }
 
+FV3_coldstarts(){
+  # Function to return an comma-separated string of cold-start input files for FV3
+  # Create an array of chgres-ed FV3 files
+  local fv3_input_files tile_files
+  fv3_input_files=(gfs_ctrl.nc)
+  tile_files=(gfs_data sfc_data)
+  local nn tt
+  for (( nn = 1; nn <= ntiles; nn++ )); do
+    for tt in "${tile_files[@]}"; do
+      fv3_input_files+=("${tt}.tile${nn}.nc")
+    done
+  done
+  # Create a comma separated string from array using IFS
+  IFS=, echo "${fv3_input_files[*]}"
+}
+
+FV3_restarts(){
+  # Function to return an comma-separated string of warm-start input files for FV3
+  # Create an array of FV3 restart files
+  local fv3_restart_files tile_files
+  fv3_restart_files=(coupler.res fv_core.res.nc)
+  tile_files=(fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data ca_data)
+  local nn tt
+  for (( nn = 1; nn <= ntiles; nn++ )); do
+    for tt in "${tile_files[@]}"; do
+      fv3_restart_files+=("${tt}.tile${nn}.nc")
+    done
+  done
+  # Create a comma separated string from array using IFS
+  IFS=, echo "${fv3_restart_files[*]}"
+}
+
+# shellcheck disable=SC2034
 common_predet(){
   echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components"
-  # Ignore "not used" warning
-  # shellcheck disable=SC2034
-  pwd=$(pwd)
-  CDUMP=${CDUMP:-gdas}
-  CASE=${CASE:-C768}
-  CDATE=${CDATE:-2017032500}
-  ENSMEM=${ENSMEM:-000}
-
-  FCSTEXECDIR=${FCSTEXECDIR:-${HOMEgfs}/exec}
-  FCSTEXEC=${FCSTEXEC:-ufs_model.x}
 
-  # Directories.
-  FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
+  RUN=${RUN:-gdas}
+  rCDUMP=${rCDUMP:-${RUN}}
 
-  # Model specific stuff
-  PARM_POST=${PARM_POST:-${HOMEgfs}/parm/post}
+  CDATE=${CDATE:-"${PDY}${cyc}"}
+  ENSMEM=${ENSMEM:-000}
 
   # Define significant cycles
-  current_cycle=${CDATE}
+  half_window=$(( assim_freq / 2 ))
+  current_cycle="${PDY}${cyc}"
   previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H)
-  # ignore errors that variable isn't used
-  # shellcheck disable=SC2034
   next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H)
+  current_cycle_begin=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${half_window} hours" +%Y%m%d%H)
+  current_cycle_end=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${half_window} hours" +%Y%m%d%H)
+  next_cycle_begin=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${half_window} hours" +%Y%m%d%H)
   forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H)
 
-  # IAU options
-  IAU_OFFSET=${IAU_OFFSET:-0}
-  DOIAU=${DOIAU:-"NO"}
-  if [[ "${DOIAU}" = "YES" ]]; then
-    sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H)
-    sPDY="${sCDATE:0:8}"
-    scyc="${sCDATE:8:2}"
-    tPDY=${previous_cycle:0:8}
-    tcyc=${previous_cycle:8:2}
+  # Define model start date for current_cycle and next_cycle as the time the forecast will start
+  if [[ "${DOIAU:-NO}" == "YES" ]]; then
+    model_start_date_current_cycle="${current_cycle_begin}"
+    model_start_date_next_cycle="${next_cycle_begin}"
   else
-    sCDATE=${current_cycle}
-    sPDY=${current_cycle:0:8}
-    scyc=${current_cycle:8:2}
-    tPDY=${sPDY}
-    tcyc=${scyc}
+    if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+      model_start_date_current_cycle=${current_cycle_end}
+    else
+      model_start_date_current_cycle=${current_cycle}
+    fi
+    model_start_date_next_cycle=${next_cycle}
   fi
 
-  mkdir -p "${COM_CONF}"
+  FHMIN=${FHMIN:-0}
+  FHMAX=${FHMAX:-9}
+  FHOUT=${FHOUT:-3}
+  FHMAX_HF=${FHMAX_HF:-0}
+  FHOUT_HF=${FHOUT_HF:-1}
+
+  if [[ ! -d "${COMOUT_CONF}" ]]; then mkdir -p "${COMOUT_CONF}"; fi
+
   cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 )
+
+  # Several model components share DATA/INPUT for input data
+  if [[ ! -d "${DATA}/INPUT" ]]; then mkdir -p "${DATA}/INPUT"; fi
+
 }
 
+# shellcheck disable=SC2034
 FV3_predet(){
   echo "SUB ${FUNCNAME[0]}: Defining variables for FV3"
-  FHMIN=${FHMIN:-0}
-  FHMAX=${FHMAX:-9}
-  FHOUT=${FHOUT:-3}
+
+  if [[ ! -d "${COMOUT_ATMOS_HISTORY}" ]]; then mkdir -p "${COMOUT_ATMOS_HISTORY}"; fi
+  if [[ ! -d "${COMOUT_ATMOS_MASTER}" ]]; then mkdir -p "${COMOUT_ATMOS_MASTER}"; fi
+  if [[ ! -d "${COMOUT_ATMOS_RESTART}" ]]; then mkdir -p "${COMOUT_ATMOS_RESTART}"; fi
+  if [[ ! -d "${DATArestart}/FV3_RESTART" ]]; then mkdir -p "${DATArestart}/FV3_RESTART"; fi
+  ${NLN} "${DATArestart}/FV3_RESTART" "${DATA}/RESTART"
+
   FHZER=${FHZER:-6}
   FHCYC=${FHCYC:-24}
-  FHMAX_HF=${FHMAX_HF:-0}
-  FHOUT_HF=${FHOUT_HF:-1}
-  NSOUT=${NSOUT:-"-1"}
-  FDIAG=${FHOUT}
-  if (( FHMAX_HF > 0 && FHOUT_HF > 0 )); then FDIAG=${FHOUT_HF}; fi
-  WRITE_DOPOST=${WRITE_DOPOST:-".false."}
   restart_interval=${restart_interval:-${FHMAX}}
   # restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX
-  if [[ ${restart_interval} -eq 0 ]]; then
+  # Convert restart interval into an explicit list for FV3
+  if (( restart_interval == 0 )); then
     restart_interval=${FHMAX}
+    FV3_RESTART_FH=("${restart_interval}")
+  else
+    # shellcheck disable=SC2312
+    mapfile -t FV3_RESTART_FH < <(seq "${restart_interval}" "${restart_interval}" "${FHMAX}")
+    # If the last forecast hour is not in the array, add it
+    local nrestarts=${#FV3_RESTART_FH[@]}
+    if (( FV3_RESTART_FH[nrestarts-1] != FHMAX )); then
+      FV3_RESTART_FH+=("${FHMAX}")
+    fi
   fi
 
   # Convert output settings into an explicit list for FV3
-  # NOTE:  FV3_OUTPUT_FH is also currently used in other components
-  # TODO: Have a seperate control for other components to address issue #1629
   FV3_OUTPUT_FH=""
   local fhr=${FHMIN}
   if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
-    for (( fh = FHMIN; fh < FHMAX_HF; fh = fh + FHOUT_HF )); do
-      FV3_OUTPUT_FH="${FV3_OUTPUT_FH} ${fh}"
-    done
+    FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${FHMIN}" "${FHOUT_HF}" "${FHMAX_HF}")"
     fhr=${FHMAX_HF}
   fi
-  for (( fh = fhr; fh <= FHMAX; fh = fh + FHOUT )); do
-    FV3_OUTPUT_FH="${FV3_OUTPUT_FH} ${fh}"
-  done
-
-
-  # Model resolution specific parameters
-  DELTIM=${DELTIM:-225}
-  layout_x=${layout_x:-8}
-  layout_y=${layout_y:-16}
-  LEVS=${LEVS:-65}
+  FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
 
   # Other options
-  MEMBER=${MEMBER:-"-1"} # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER
-  ENS_NUM=${ENS_NUM:-1}  # Single executable runs multiple members (e.g. GEFS)
+  MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER
   PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment
 
   # IAU options
   IAUFHRS=${IAUFHRS:-0}
   IAU_DELTHRS=${IAU_DELTHRS:-0}
 
-  # Model config options
-  ntiles=6
-
-  TYPE=${TYPE:-"nh"}                  # choices:  nh, hydro
-  MONO=${MONO:-"non-mono"}            # choices:  mono, non-mono
-
-  QUILTING=${QUILTING:-".true."}
-  OUTPUT_GRID=${OUTPUT_GRID:-"gaussian_grid"}
-  WRITE_NEMSIOFLIP=${WRITE_NEMSIOFLIP:-".true."}
-  WRITE_FSYNCFLAG=${WRITE_FSYNCFLAG:-".true."}
-
-  rCDUMP=${rCDUMP:-${CDUMP}}
-
-  mkdir -p "${DATA}/INPUT"
-
   #------------------------------------------------------------------
   # changeable parameters
   # dycore definitions
@@ -196,7 +200,6 @@ FV3_predet(){
   nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"}
   nst_anl=${nst_anl:-".false."}
 
-
   # blocking factor used for threading and general physics performance
   #nyblocks=$(expr \( $npy - 1 \) \/ $layout_y )
   #nxblocks=$(expr \( $npx - 1 \) \/ $layout_x \/ 32)
@@ -213,42 +216,474 @@ FV3_predet(){
   chksum_debug=${chksum_debug:-".false."}
   print_freq=${print_freq:-6}
 
-  #-------------------------------------------------------
-  if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]]; then
-    if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi
-    ${NLN} "${COM_ATMOS_RESTART}" RESTART
-    # The final restart written at the end doesn't include the valid date
-    # Create links that keep the same name pattern for these files
-    files="coupler.res fv_core.res.nc"
-    for n in $(seq 1 "${ntiles}"); do
-      for base in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do
-        files="${files} ${base}.tile${n}.nc"
-      done
+  # the pre-conditioning of the solution
+  # =0 implies no pre-conditioning
+  # >0 means new adiabatic pre-conditioning
+  # <0 means older adiabatic pre-conditioning
+  na_init=${na_init:-1}
+
+  local suite_file="${HOMEgfs}/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml"
+  if [[ ! -f "${suite_file}" ]]; then
+    echo "FATAL ERROR: CCPP Suite file ${suite_file} does not exist, ABORT!"
+    exit 2
+  fi
+
+  # Scan suite file to determine whether it uses Noah-MP
+  local num_noahmpdrv
+  num_noahmpdrv=$(grep -c noahmpdrv "${suite_file}")
+  if (( num_noahmpdrv > 0 )); then
+    lsm="2"
+    lheatstrg=".false."
+    landice=".false."
+    iopt_dveg=${iopt_dveg:-"4"}
+    iopt_crs=${iopt_crs:-"2"}
+    iopt_btr=${iopt_btr:-"1"}
+    iopt_run=${iopt_run:-"1"}
+    iopt_sfc=${iopt_sfc:-"1"}
+    iopt_frz=${iopt_frz:-"1"}
+    iopt_inf=${iopt_inf:-"1"}
+    iopt_rad=${iopt_rad:-"3"}
+    iopt_alb=${iopt_alb:-"1"}
+    iopt_snf=${iopt_snf:-"4"}
+    iopt_tbot=${iopt_tbot:-"2"}
+    iopt_stc=${iopt_stc:-"3"}
+    IALB=${IALB:-2}
+    IEMS=${IEMS:-2}
+  else
+    lsm="1"
+    lheatstrg=".true."
+    landice=".true."
+    iopt_dveg=${iopt_dveg:-"1"}
+    iopt_crs=${iopt_crs:-"1"}
+    iopt_btr=${iopt_btr:-"1"}
+    iopt_run=${iopt_run:-"1"}
+    iopt_sfc=${iopt_sfc:-"1"}
+    iopt_frz=${iopt_frz:-"1"}
+    iopt_inf=${iopt_inf:-"1"}
+    iopt_rad=${iopt_rad:-"1"}
+    iopt_alb=${iopt_alb:-"2"}
+    iopt_snf=${iopt_snf:-"4"}
+    iopt_tbot=${iopt_tbot:-"2"}
+    iopt_stc=${iopt_stc:-"1"}
+    IALB=${IALB:-1}
+    IEMS=${IEMS:-1}
+  fi
+
+  if [[ "${TYPE}" == "nh" ]]; then  # non-hydrostatic options
+    hydrostatic=".false."
+    phys_hydrostatic=".false."     # enable heating in hydrostatic balance in non-hydrostatic simulation
+    use_hydro_pressure=".false."   # use hydrostatic pressure for physics
+    make_nh=".true."               # running in non-hydrostatic mode
+  else  # hydrostatic options
+    hydrostatic=".true."
+    phys_hydrostatic=".false."     # ignored when hydrostatic = T
+    use_hydro_pressure=".false."   # ignored when hydrostatic = T
+    make_nh=".false."              # running in hydrostatic mode
+  fi
+
+  # Conserve total energy as heat globally
+  consv_te=${consv_te:-1.} # range 0.-1., 1. will restore energy to orig. val. before physics
+  if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+    consv_te=0
+    k_split=${k_split:-1}
+    k_split_nest=${k_split_nest:-4}
+  else
+    consv_te=${consv_te:-1.} # range 0.-1., 1. will restore energy to orig. val. before physics
+    k_split=${k_split:-2}
+  fi
+
+  # time step parameters in FV3
+  n_split=${n_split:-5}
+
+  if [[ "${MONO:0:4}" == "mono" ]]; then  # monotonic options
+    d_con=${d_con_mono:-"0."}
+    do_vort_damp=".false."
+    if [[ "${TYPE}" == "nh" ]]; then  # monotonic and non-hydrostatic
+      hord_mt=${hord_mt_nh_mono:-"10"}
+      hord_xx=${hord_xx_nh_mono:-"10"}
+    else  # monotonic and hydrostatic
+      hord_mt=${hord_mt_hydro_mono:-"10"}
+      hord_xx=${hord_xx_hydro_mono:-"10"}
+    fi
+  else  # non-monotonic options
+    d_con=${d_con_nonmono:-"1."}
+    do_vort_damp=".true."
+    if [[ "${TYPE}" == "nh" ]]; then  # non-monotonic and non-hydrostatic
+      hord_mt=${hord_mt_nh_nonmono:-"5"}
+      hord_xx=${hord_xx_nh_nonmono:-"5"}
+    else # non-monotonic and hydrostatic
+      hord_mt=${hord_mt_hydro_nonmono:-"10"}
+      hord_xx=${hord_xx_hydro_nonmono:-"10"}
+    fi
+  fi
+
+  if [[ "${MONO:0:4}" != "mono" && "${TYPE}" == "nh" ]]; then
+    vtdm4=${vtdm4_nh_nonmono:-"0.06"}
+  else
+    vtdm4=${vtdm4:-"0.05"}
+  fi
+
+  # Initial conditions are chgres-ed from GFS analysis file
+  nggps_ic=${nggps_ic:-".true."}
+  ncep_ic=${ncep_ic:-".false."}
+  external_ic=".true."
+  mountain=".false."
+  warm_start=".false."
+  read_increment=".false."
+  res_latlon_dynamics='""'
+
+  # Stochastic Physics Options
+  do_skeb=".false."
+  do_shum=".false."
+  do_sppt=".false."
+  do_ca=".false."
+  ISEED=0
+  if (( MEMBER > 0 )); then  # these are only applicable for ensemble members
+    local imem=${MEMBER#0}
+    local base_seed=$((current_cycle*10000 + imem*100))
+
+    if [[ "${DO_SKEB:-}" == "YES" ]]; then
+      do_skeb=".true."
+      ISEED_SKEB=$((base_seed + 1))
+    fi
+
+    if [[ "${DO_SHUM:-}" == "YES" ]]; then
+      do_shum=".true."
+      ISEED_SHUM=$((base_seed + 2))
+    fi
+
+    if [[ "${DO_SPPT:-}" == "YES" ]]; then
+      do_sppt=".true."
+      ISEED_SPPT=$((base_seed + 3)),$((base_seed + 4)),$((base_seed + 5)),$((base_seed + 6)),$((base_seed + 7))
+    fi
+
+    if [[ "${DO_CA:-}" == "YES" ]]; then
+      do_ca=".true."
+      ISEED_CA=$(( (base_seed + 18) % 2147483647 ))
+    fi
+
+    if [[ "${DO_LAND_PERT:-}" == "YES" ]]; then
+      lndp_type=${lndp_type:-2}
+      ISEED_LNDP=$(( (base_seed + 5) % 2147483647 ))
+      LNDP_TAU=${LNDP_TAU:-21600}
+      LNDP_SCALE=${LNDP_SCALE:-500000}
+      lndp_var_list=${lndp_var_list:-"'smc', 'vgf',"}
+      lndp_prt_list=${lndp_prt_list:-"0.2,0.1"}
+      n_var_lndp=$(echo "${lndp_var_list}" | wc -w)
+    fi
+
+  fi  # end of ensemble member specific options
+
+  #--------------------------------------------------------------------------
+
+  # Fix files
+  FNGLAC=${FNGLAC:-"${FIXgfs}/am/global_glacier.2x2.grb"}
+  FNMXIC=${FNMXIC:-"${FIXgfs}/am/global_maxice.2x2.grb"}
+  FNTSFC=${FNTSFC:-"${FIXgfs}/am/RTGSST.1982.2012.monthly.clim.grb"}
+  FNSNOC=${FNSNOC:-"${FIXgfs}/am/global_snoclim.1.875.grb"}
+  FNZORC=${FNZORC:-"igbp"}
+  FNAISC=${FNAISC:-"${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb"}
+  FNALBC2=${FNALBC2:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc"}
+  FNTG3C=${FNTG3C:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"}
+  FNVEGC=${FNVEGC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+  FNMSKH=${FNMSKH:-"${FIXgfs}/am/global_slmask.t1534.3072.1536.grb"}
+  FNVMNC=${FNVMNC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+  FNVMXC=${FNVMXC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"}
+  FNSLPC=${FNSLPC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc"}
+  FNALBC=${FNALBC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"}
+  FNVETC=${FNVETC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"}
+  FNSOTC=${FNSOTC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc"}
+  FNSOCC=${FNSOCC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_color.tileX.nc"}
+  FNABSC=${FNABSC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"}
+  FNSMCC=${FNSMCC:-"${FIXgfs}/am/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"}
+
+  # If the appropriate resolution fix file is not present, use the highest resolution available (T1534)
+  [[ ! -f "${FNSMCC}" ]] && FNSMCC="${FIXgfs}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb"
+
+  # Grid and orography data
+  if [[ "${cplflx}" == ".false." ]] ; then
+    ${NCP} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc"
+  else
+    ${NCP} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc"
+  fi
+
+  # Files for GWD
+  ${NCP} "${FIXugwd}/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc"
+
+  # Files for orography, GWD tiles
+  local tt
+  for (( tt = 1; tt <= ntiles; tt++ )); do
+    ${NCP} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${tt}.nc" "${DATA}/INPUT/oro_data.tile${tt}.nc"
+    ${NCP} "${FIXorog}/${CASE}/${CASE}_grid.tile${tt}.nc"                 "${DATA}/INPUT/${CASE}_grid.tile${tt}.nc"
+    ${NCP} "${FIXugwd}/${CASE}/${CASE}_oro_data_ls.tile${tt}.nc"          "${DATA}/INPUT/oro_data_ls.tile${tt}.nc"
+    ${NCP} "${FIXugwd}/${CASE}/${CASE}_oro_data_ss.tile${tt}.nc"          "${DATA}/INPUT/oro_data_ss.tile${tt}.nc"
+  done
+  if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+    ${NLN} "${DATA}/INPUT/oro_data.tile7.nc" "${DATA}/INPUT/oro_data.nest02.tile7.nc"
+    ${NLN} "${DATA}/INPUT/${CASE}_grid.tile7.nc"     "${DATA}/INPUT/${CASE}_grid.nest02.tile7.nc"
+    ${NLN} "${DATA}/INPUT/${CASE}_grid.tile7.nc"     "${DATA}/INPUT/grid.nest02.tile7.nc"
+    ${NLN} "${DATA}/INPUT/oro_data_ls.tile7.nc" "${DATA}/INPUT/oro_data_ls.nest02.tile7.nc"
+    ${NLN} "${DATA}/INPUT/oro_data_ss.tile7.nc" "${DATA}/INPUT/oro_data_ss.nest02.tile7.nc"
+  fi
+
+  # NoahMP table
+  local noahmptablefile="${PARMgfs}/ufs/noahmptable.tbl"
+  if [[ ! -f "${noahmptablefile}" ]]; then
+    echo "FATAL ERROR: missing noahmp table file '${noahmptablefile}'"
+    exit 1
+  else
+    ${NCP} "${noahmptablefile}" "${DATA}/noahmptable.tbl"
+  fi
+
+  #  Thompson microphysics fix files
+  if (( imp_physics == 8 )); then
+    ${NCP} "${FIXgfs}/am/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN"
+    ${NCP} "${FIXgfs}/am/freezeH2O.dat"    "${DATA}/freezeH2O.dat"
+    ${NCP} "${FIXgfs}/am/qr_acr_qgV2.dat"  "${DATA}/qr_acr_qgV2.dat"
+    ${NCP} "${FIXgfs}/am/qr_acr_qsV2.dat"  "${DATA}/qr_acr_qsV2.dat"
+  fi
+
+  if [[ "${new_o3forc:-YES}" == "YES" ]]; then
+    O3FORC="ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77"
+  else
+    O3FORC="global_o3prdlos.f77"
+  fi
+  H2OFORC=${H2OFORC:-"global_h2o_pltc.f77"}
+  ${NCP} "${FIXgfs}/am/${O3FORC}"  "${DATA}/global_o3prdlos.f77"
+  ${NCP} "${FIXgfs}/am/${H2OFORC}" "${DATA}/global_h2oprdlos.f77"
+
+  # GFS standard input data
+
+  ISOL=${ISOL:-2}
+
+  ${NCP} "${FIXgfs}/am/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt"
+  ${NCP} "${FIXgfs}/am/global_sfc_emissivity_idx.txt"    "${DATA}/sfc_emissivity_idx.txt"
+
+  # Aerosol options
+  IAER=${IAER:-1011}
+
+  ## merra2 aerosol climo
+  if (( IAER == 1011 )); then
+    local month mm
+    for (( month = 1; month <=12; month++ )); do
+      mm=$(printf %02d "${month}")
+      ${NCP} "${FIXgfs}/aer/merra2.aerclim.2003-2014.m${mm}.nc" "aeroclim.m${mm}.nc"
     done
-    for file in ${files}; do
-      ${NLN} "${file}" "${COM_ATMOS_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${file}"
+  fi
+
+  ${NCP} "${FIXgfs}/am/global_climaeropac_global.txt" "${DATA}/aerosol.dat"
+  if (( IAER > 0 )) ; then
+    local file
+    for file in "${FIXgfs}/am/global_volcanic_aerosols"* ; do
+      ${NCP} "${file}" "${DATA}/$(basename "${file//global_}")"
     done
-  else
-    mkdir -p "${DATA}/RESTART"
   fi
 
-  echo "SUB ${FUNCNAME[0]}: pre-determination variables set"
+  ${NCP} "${FIXgfs}/lut/optics_BC.v1_3.dat"  "${DATA}/optics_BC.dat"
+  ${NCP} "${FIXgfs}/lut/optics_OC.v1_3.dat"  "${DATA}/optics_OC.dat"
+  ${NCP} "${FIXgfs}/lut/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat"
+  ${NCP} "${FIXgfs}/lut/optics_SS.v3_3.dat"  "${DATA}/optics_SS.dat"
+  ${NCP} "${FIXgfs}/lut/optics_SU.v1_3.dat"  "${DATA}/optics_SU.dat"
+
+  # CO2 options
+  ICO2=${ICO2:-2}
+
+  ${NCP} "${FIXgfs}/am/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt"
+  ${NCP} "${FIXgfs}/am/co2monthlycyc.txt"                 "${DATA}/co2monthlycyc.txt"
+  # Set historical CO2 values based on whether this is a reforecast run or not
+  # Ref. issue 2403
+  local co2dir
+  co2dir="fix_co2_proj"
+  if [[ "${reforecast:-}" == "YES" ]]; then
+    co2dir="co2dat_4a"
+  fi
+  if (( ICO2 > 0 )); then
+    local file
+    for file in "${FIXgfs}/am/${co2dir}/global_co2historicaldata"* ; do
+      ${NCP} "${file}" "${DATA}/$(basename "${file//global_}")"
+    done
+  fi
+
+  # Inline UPP fix files
+  if [[ "${WRITE_DOPOST:-}" == ".true." ]]; then
+    ${NCP} "${PARMgfs}/post/post_tag_gfs${LEVS}"                              "${DATA}/itag"
+    ${NCP} "${FLTFILEGFS:-${PARMgfs}/post/postxconfig-NT-GFS-TWO.txt}"        "${DATA}/postxconfig-NT.txt"
+    ${NCP} "${FLTFILEGFSF00:-${PARMgfs}/post/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt"
+    ${NCP} "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}"             "${DATA}/params_grib2_tbl_new"
+  fi
+
 }
 
+# Disable variable not used warnings
+# shellcheck disable=SC2034
 WW3_predet(){
   echo "SUB ${FUNCNAME[0]}: WW3 before run type determination"
-  if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi
-  ${NLN} "${COM_WAVE_RESTART}" "restart_wave"
+
+  if [[ ! -d "${COMOUT_WAVE_HISTORY}" ]]; then mkdir -p "${COMOUT_WAVE_HISTORY}"; fi
+  if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}" ; fi
+
+  if [[ ! -d "${DATArestart}/WAVE_RESTART" ]]; then mkdir -p "${DATArestart}/WAVE_RESTART"; fi
+  ${NLN} "${DATArestart}/WAVE_RESTART" "${DATA}/restart_wave"
+
+  # Files from wave prep and wave init jobs
+  # Copy mod_def files for wave grids
+  local ww3_grid
+  if [[ "${waveMULTIGRID}" == ".true." ]]; then
+    local array=("${WAVECUR_FID}" "${WAVEICE_FID}" "${WAVEWND_FID}" "${waveuoutpGRD}" "${waveGRD}" "${waveesmfGRD}")
+    echo "Wave Grids: ${array[*]}"
+    local grdALL
+    # shellcheck disable=SC2312
+    grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ')
+
+    for ww3_grid in ${grdALL}; do
+      ${NCP} "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${ww3_grid}" "${DATA}/mod_def.${ww3_grid}" \
+      || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${ww3_grid}' from '${COMIN_WAVE_PREP}'"; exit 1 )
+    done
+  else
+    #if shel, only 1 waveGRD which is linked to mod_def.ww3
+    ${NCP} "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" \
+    || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${waveGRD}' from '${COMIN_WAVE_PREP}'"; exit 1 )
+  fi
+
+  if [[ "${WW3ICEINP}" == "YES" ]]; then
+    local wavicefile="${COMIN_WAVE_PREP}/${RUN}wave.${WAVEICE_FID}.t${current_cycle:8:2}z.ice"
+    if [[ ! -f "${wavicefile}" ]]; then
+      echo "FATAL ERROR: WW3ICEINP='${WW3ICEINP}', but missing ice file '${wavicefile}', ABORT!"
+      exit 1
+    fi
+    ${NCP} "${wavicefile}" "${DATA}/ice.${WAVEICE_FID}" \
+    || ( echo "FATAL ERROR: Unable to copy '${wavicefile}', ABORT!"; exit 1 )
+  fi
+
+  if [[ "${WW3CURINP}" == "YES" ]]; then
+    local wavcurfile="${COMIN_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.t${current_cycle:8:2}z.cur"
+    if [[ ! -f "${wavcurfile}" ]]; then
+      echo "FATAL ERROR: WW3CURINP='${WW3CURINP}', but missing current file '${wavcurfile}', ABORT!"
+      exit 1
+    fi
+    ${NCP} "${wavcurfile}" "${DATA}/current.${WAVECUR_FID}" \
+    || ( echo "FATAL ERROR: Unable to copy '${wavcurfile}', ABORT!"; exit 1 )
+  fi
+
+  # Fix files
+  #if wave mesh is not the same as the ocean mesh, copy it in the file
+  if [[ "${MESH_WAV}" == "${MESH_OCN:-mesh.mx${OCNRES}.nc}" ]]; then
+    echo "Wave is on the same mesh as ocean"
+  else
+    echo "Wave is NOT on the same mesh as ocean"
+    ${NCP} "${FIXgfs}/wave/${MESH_WAV}" "${DATA}/"
+  fi
+
+  WAV_MOD_TAG="${RUN}wave${waveMEMB}"
+  if [[ "${USE_WAV_RMP:-YES}" == "YES" ]]; then
+    local file file_array file_count
+    # shellcheck disable=SC2312
+    mapfile -t file_array < <(find "${FIXgfs}/wave" -name "rmp_src_to_dst_conserv_*" | sort)
+    file_count=${#file_array[@]}
+    if (( file_count > 0 )); then
+      for file in "${file_array[@]}" ; do
+        ${NCP} "${file}" "${DATA}/"
+      done
+    else
+      echo 'FATAL ERROR : No rmp precomputed nc files found for wave model, ABORT!'
+      exit 4
+    fi
+  fi
 }
 
+# shellcheck disable=SC2034
 CICE_predet(){
   echo "SUB ${FUNCNAME[0]}: CICE before run type determination"
+
+  if [[ ! -d "${COMOUT_ICE_HISTORY}" ]]; then mkdir -p "${COMOUT_ICE_HISTORY}"; fi
+  if [[ ! -d "${COMOUT_ICE_RESTART}" ]]; then mkdir -p "${COMOUT_ICE_RESTART}"; fi
+  if [[ ! -d "${COMIN_ICE_INPUT}" ]]; then mkdir -p "${COMIN_ICE_INPUT}"; fi
+
   if [[ ! -d "${DATA}/CICE_OUTPUT" ]]; then  mkdir -p "${DATA}/CICE_OUTPUT"; fi
-  if [[ ! -d "${DATA}/CICE_RESTART" ]]; then mkdir -p "${DATA}/CICE_RESTART"; fi
+  if [[ ! -d "${DATArestart}/CICE_RESTART" ]]; then mkdir -p "${DATArestart}/CICE_RESTART"; fi
+  ${NLN} "${DATArestart}/CICE_RESTART" "${DATA}/CICE_RESTART"
+
+  # CICE does not have a concept of high frequency output like FV3
+  # Convert output settings into an explicit list for CICE
+  if (( $(( ( cyc + FHMIN ) % FHOUT_ICE )) == 0 )); then
+    # shellcheck disable=SC2312
+    mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10
+  else
+    CICE_OUTPUT_FH=("${FHMIN}")
+    # shellcheck disable=SC2312
+    mapfile -t -O "${#CICE_OUTPUT_FH[@]}" CICE_OUTPUT_FH < <(seq "$(( FHMIN + $(( ( cyc + FHMIN ) % FHOUT_ICE )) ))" "${FHOUT_ICE}" "${FHMAX}") || exit 10
+    CICE_OUTPUT_FH+=("${FHMAX}")
+  fi
+
+  # Fix files
+  ${NCP} "${FIXgfs}/cice/${ICERES}/${CICE_GRID}" "${DATA}/"
+  ${NCP} "${FIXgfs}/cice/${ICERES}/${CICE_MASK}" "${DATA}/"
+  ${NCP} "${FIXgfs}/cice/${ICERES}/${MESH_ICE}"  "${DATA}/"
+
 }
 
+# shellcheck disable=SC2034
 MOM6_predet(){
   echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination"
+
+  if [[ ! -d "${COMOUT_OCEAN_HISTORY}" ]]; then mkdir -p "${COMOUT_OCEAN_HISTORY}"; fi
+  if [[ ! -d "${COMOUT_OCEAN_RESTART}" ]]; then mkdir -p "${COMOUT_OCEAN_RESTART}"; fi
+  if [[ ! -d "${COMIN_OCEAN_INPUT}" ]]; then mkdir -p "${COMIN_OCEAN_INPUT}"; fi
+
   if [[ ! -d "${DATA}/MOM6_OUTPUT" ]]; then mkdir -p "${DATA}/MOM6_OUTPUT"; fi
-  if [[ ! -d "${DATA}/MOM6_RESTART" ]]; then mkdir -p "${DATA}/MOM6_RESTART"; fi
+  if [[ ! -d "${DATArestart}/MOM6_RESTART" ]]; then mkdir -p "${DATArestart}/MOM6_RESTART"; fi
+  ${NLN} "${DATArestart}/MOM6_RESTART" "${DATA}/MOM6_RESTART"
+
+  # MOM6 does not have a concept of high frequency output like FV3
+  # Convert output settings into an explicit list for MOM6
+  MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCN}" "${FHMAX}")
+
+  # If using stochastic parameterizations, create a seed that does not exceed the
+  # largest signed integer
+  if (( MEMBER > 0 )); then  # these are only applicable for ensemble members
+    local imem=${MEMBER#0}
+    local base_seed=$((current_cycle*10000 + imem*100))
+
+    if [[ "${DO_OCN_SPPT:-}" == "YES" ]]; then
+      ISEED_OCNSPPT=$((base_seed + 8)),$((base_seed + 9)),$((base_seed + 10)),$((base_seed + 11)),$((base_seed + 12))
+    fi
+
+    if [[ "${DO_OCN_PERT_EPBL:-}" == "YES" ]]; then
+      ISEED_EPBL=$((base_seed + 13)),$((base_seed + 14)),$((base_seed + 15)),$((base_seed + 16)),$((base_seed + 17))
+    fi
+  fi
+
+  # Fix files
+  ${NCP} "${FIXgfs}/mom6/${OCNRES}/"* "${DATA}/INPUT/"  # TODO: These need to be explicit
+
+  # Copy coupled grid_spec
+  local spec_file
+  spec_file="${FIXgfs}/cpl/a${CASE}o${OCNRES}/grid_spec.nc"
+  if [[ -s "${spec_file}" ]]; then
+    ${NCP} "${spec_file}" "${DATA}/INPUT/"
+  else
+    echo "FATAL ERROR: coupled grid_spec file '${spec_file}' does not exist"
+    exit 3
+  fi
+
+}
+
+CMEPS_predet(){
+  echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination"
+
+  if [[ ! -d "${COMOUT_MED_RESTART}" ]]; then mkdir -p "${COMOUT_MED_RESTART}"; fi
+
+  if [[ ! -d "${DATArestart}/CMEPS_RESTART" ]]; then mkdir -p "${DATArestart}/CMEPS_RESTART"; fi
+  ${NLN} "${DATArestart}/CMEPS_RESTART" "${DATA}/CMEPS_RESTART"
+
+}
+
+# shellcheck disable=SC2034
+GOCART_predet(){
+  echo "SUB ${FUNCNAME[0]}: GOCART before run type determination"
+
+  if [[ ! -d "${COMOUT_CHEM_HISTORY}" ]]; then mkdir -p "${COMOUT_CHEM_HISTORY}"; fi
+
+  GOCART_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "6" "${FHMAX}")
+  # TODO: AERO_HISTORY.rc has hardwired output frequency to 6 hours
 }
diff --git a/ush/fv3gfs_remap.sh b/ush/fv3gfs_remap.sh
deleted file mode 100755
index 7986add331..0000000000
--- a/ush/fv3gfs_remap.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#! /usr/bin/env bash
-
-#--------------------------------------
-#-- remap FV3 6 tiles to global array
-#-- Fanglin Yang, October 2016
-#--------------------------------------
-
-source "$HOMEgfs/ush/preamble.sh"
-
-export CDATE=${CDATE:-"2016100300"}
-export CASE=${CASE:-"C192"}           # C48 C96 C192 C384 C768 C1152 C3072
-export GG=${master_grid:-"0p25deg"}   # 1deg 0p5deg 0p25deg 0p125deg
-
-pwd=$(pwd)
-export DATA=${DATA:-$pwd}
-export HOMEgfs=${HOMEgfs:-$PACKAGEROOT}
-export FIXgfs=${FIXgfs:-$HOMEgfs/fix}
-export FIXorog=${FIXorog:-$FIXgfs/orog}
-export REMAPEXE=${REMAPEXE:-$HOMEgfs/exec/fregrid_parallel}
-export IPD4=${IPD4:-"YES"}
-
-cycn=$(echo $CDATE | cut -c 9-10)
-export TCYC=${TCYC:-".t${cycn}z."}
-export CDUMP=${CDUMP:-gfs}
-export PREFIX=${PREFIX:-${CDUMP}${TCYC}}
-
-#--------------------------------------------------
-export grid_loc=${FIXorog}/${CASE}/${CASE}_mosaic.nc
-export weight_file=${FIXorog}/${CASE}/remap_weights_${CASE}_${GG}.nc
-
-export APRUN_REMAP=${APRUN_REMAP:-${APRUN:-""}}
-export NTHREADS_REMAP=${NTHREADS_REMAP:-${NTHREADS:-1}}
-
-#--------------------------------------------------
-if [ $GG = 1deg    ];  then  export nlon=360  ; export nlat=180  ; fi
-if [ $GG = 0p5deg  ];  then  export nlon=720  ; export nlat=360  ; fi
-if [ $GG = 0p25deg ];  then  export nlon=1440 ; export nlat=720  ; fi
-if [ $GG = 0p125deg ]; then  export nlon=2880 ; export nlat=1440 ; fi
-
-#--------------------------------------------------
-hgt=h; if [ $IPD4 = YES ]; then hgt=z; fi
-
-#--for non-hydrostatic case
-export atmos_4xdaily_nh="slp, vort850, vort200,\
-        us, u1000, u850, u700, u500, u200, u100, u50, u10,\
-        vs, v1000, v850, v700, v500, v200, v100, v50, v10,\
-        tm, t1000, t850, t700, t500, t200, t100, t50, t10,\
-        ${hgt}1000, ${hgt}850, ${hgt}700, ${hgt}500, ${hgt}200, ${hgt}100, ${hgt}50, ${hgt}10,\
-        q1000, q850, q700, q500, q200, q100, q50, q10,\
-        rh1000, rh850, rh700, rh500, rh200,\
-        omg1000, omg850, omg700, omg500, omg200, omg100, omg50, omg10,\
-        w700,w850,w500, w200"
-
-#--for hydrostatic case
-export atmos_4xdaily_hy="slp, vort850, vort200,\
-        us, u1000, u850, u700, u500, u200, u100, u50, u10,\
-        vs, v1000, v850, v700, v500, v200, v100, v50, v10,\
-        tm, t1000, t850, t700, t500, t200, t100, t50, t10,\
-        ${hgt}1000, ${hgt}850, ${hgt}700, ${hgt}500, ${hgt}200, ${hgt}100, ${hgt}50, ${hgt}10,\
-        q1000, q850, q700, q500, q200, q100, q50, q10,\
-        rh1000, rh850, rh700, rh500, rh200,\
-        omg1000, omg850, omg700, omg500, omg200, omg100, omg50, omg10,\
-        w700"
-
-export nggps2d_nh="ALBDOsfc, CPRATsfc, PRATEsfc, DLWRFsfc, ULWRFsfc,\
-        DSWRFsfc, USWRFsfc, DSWRFtoa, USWRFtoa, ULWRFtoa,\
-        GFLUXsfc, HGTsfc, HPBLsfc, ICECsfc, SLMSKsfc,\
-        LHTFLsfc, SHTFLsfc, PRESsfc, PWATclm, SOILM,\
-        SOILW1, SOILW2, SOILW3, SOILW4, SPFH2m,\
-        TCDCclm, TCDChcl, TCDClcl, TCDCmcl,\
-        SOILT1, SOILT2, SOILT3, SOILT4,\
-        TMP2m, TMPsfc, UGWDsfc, VGWDsfc, UFLXsfc,\
-        VFLXsfc, UGRD10m, VGRD10m, WEASDsfc, SNODsfc,\
-        ZORLsfc, VFRACsfc, F10Msfc, VTYPEsfc, STYPEsfc"
-export nggps2d_hy="$nggps2d_nh"
-
-export nggps3d_nh="ucomp, vcomp, temp, delp, sphum, o3mr, clwmr, nhpres, w, delz"     #for non-hydrostatic case
-export nggps3d_hy="ucomp, vcomp, temp, delp, sphum, o3mr, clwmr, hypres"              #for hydrostatic case
-
-#--------------------------------------------------
-cd $DATA || exit 8
-
-testfile=nggps3d.tile4.nc
-nhrun=$(ncdump -c $testfile | grep nhpres)
-nhrun=$?
-
-export OMP_NUM_THREADS=$NTHREADS_REMAP
-
-#--------------------------------------------------
-err=0
-for type in atmos_4xdaily nggps2d nggps3d ; do
-
-  export in_file="$type"
-  export out_file=${PREFIX}${type}.${GG}.nc
-  [[ -s $DATA/$out_file ]] && rm -f $DATA/$out_file
-  if [ $nhrun -eq 0 ]; then
-    export fld=$(eval echo \${${type}_nh})
-  else
-    export fld=$(eval echo \${${type}_hy})
-  fi
-
-  $APRUN_REMAP $REMAPEXE --input_dir $DATA \
-                         --input_file $in_file \
-                         --output_dir $DATA \
-                         --output_file $out_file \
-                         --input_mosaic $grid_loc \
-                         --scalar_field "$fld" \
-                         --interp_method conserve_order1 \
-                         --remap_file $weight_file \
-                         --nlon $nlon \
-                         --nlat $nlat
-  rc=$?
-  ((err+=$rc))
-
-done
-
-exit $err
-
diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh
index 1a0441a06f..4ac762824b 100755
--- a/ush/gaussian_sfcanl.sh
+++ b/ush/gaussian_sfcanl.sh
@@ -23,17 +23,7 @@
 #     OUTPUT_FILE   Output gaussian analysis file format.  Default is "nemsio"
 #                   Set to "netcdf" for netcdf output file
 #                   Otherwise, output in nemsio.
-#     BASEDIR       Root directory where all scripts and fixed files reside.
-#                   Default is /nwprod2.
-#     HOMEgfs       Directory for gfs version.  Default is
-#                   $BASEDIR/gfs_ver.v15.0.0}
-#     FIXam         Directory for the global fixed climatology files.
-#                   Defaults to $HOMEgfs/fix/am
-#     FIXorog       Directory for the model grid and orography netcdf
-#                   files.  Defaults to $HOMEgfs/fix/orog
 #     FIXWGTS       Weight file to use for interpolation
-#     EXECgfs       Directory of the program executable.  Defaults to
-#                   $HOMEgfs/exec
 #     DATA          Working directory
 #                   (if nonexistent will be made, used and deleted)
 #                   Defaults to current working directory
@@ -85,7 +75,7 @@
 #
 #     fixed data : ${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc
 #                  ${FIXWGTS}
-#                  ${FIXam}/global_hyblev.l65.txt
+#                  ${FIXgfs}/am/global_hyblev.l65.txt
 #
 #     input data : ${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc
 #
@@ -110,7 +100,7 @@
 #
 ################################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 CASE=${CASE:-C768}
 res=$(echo $CASE | cut -c2-)
@@ -121,25 +111,17 @@ LATB_SFC=${LATB_SFC:-$LATB_CASE}
 DONST=${DONST:-"NO"}
 LEVS=${LEVS:-64}
 LEVSP1=$(($LEVS+1))
-#  Directories.
-gfs_ver=${gfs_ver:-v16.3.0}
-BASEDIR=${BASEDIR:-${PACKAGEROOT:-/lfs/h1/ops/prod/packages}}
-HOMEgfs=${HOMEgfs:-$BASEDIR/gfs.${gfs_ver}}
-EXECgfs=${EXECgfs:-$HOMEgfs/exec}
-FIXorog=${FIXorog:-$HOMEgfs/fix/orog}
-FIXam=${FIXam:-$HOMEgfs/fix/am}
-FIXWGTS=${FIXWGTS:-$FIXorog/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc}
+FIXWGTS=${FIXWGTS:-${FIXorog}/${CASE}/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc}
 DATA=${DATA:-$(pwd)}
 
 #  Filenames.
 XC=${XC:-}
 GAUSFCANLEXE=${GAUSFCANLEXE:-$EXECgfs/gaussian_sfcanl.x}
-SIGLEVEL=${SIGLEVEL:-$FIXam/global_hyblev.l${LEVSP1}.txt}
+SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVSP1}.txt}
 
 CDATE=${CDATE:?}
 
 #  Other variables.
-export NLN=${NLN:-"/bin/ln -sf"}
 export PGMOUT=${PGMOUT:-${pgmout:-'&1'}}
 export PGMERR=${PGMERR:-${pgmerr:-'&2'}}
 export REDOUT=${REDOUT:-'1>'}
@@ -150,17 +132,9 @@ export REDERR=${REDERR:-'2>'}
 #  Preprocessing
 ${INISCRIPT:-}
 pwd=$(pwd)
-if [[ -d $DATA ]]
-then
-   mkdata=NO
-else
-   mkdir -p $DATA
-   mkdata=YES
-fi
-cd $DATA||exit 99
+cd "${DATA}" || exit 99
 [[ -d "${COM_ATMOS_ANALYSIS}" ]] || mkdir -p "${COM_ATMOS_ANALYSIS}"
 [[ -d "${COM_ATMOS_RESTART}" ]] || mkdir -p "${COM_ATMOS_RESTART}"
-cd $DATA
 
 ################################################################################
 #  Make surface analysis
@@ -224,6 +198,5 @@ $ERRSCRIPT||exit 2
 ################################################################################
 #  Postprocessing
 cd $pwd
-[[ $mkdata = YES ]]&&rmdir $DATA
 
 exit ${err}
diff --git a/ush/getdump.sh b/ush/getdump.sh
index 462ca5e755..12deb725e1 100755
--- a/ush/getdump.sh
+++ b/ush/getdump.sh
@@ -1,13 +1,13 @@
 #! /usr/bin/env bash
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 COMPONENT=${COMPONENT:-atmos}
 
 CDATE=${1:-""}
-CDUMP=${2:-""}
-SOURCE_DIR=${3:-$DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${COMPONENT}}
-TARGET_DIR=${4:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/${COMPONENT}}
+RUN=${2:-""}
+SOURCE_DIR=${3:-$DMPDIR/${RUN}${DUMP_SUFFIX}.${PDY}/${cyc}/${COMPONENT}}
+TARGET_DIR=${4:-$ROTDIR/${RUN}.${PDY}/${cyc}/${COMPONENT}}
 
 DUMP_SUFFIX=${DUMP_SUFFIX:-""}
 
@@ -24,14 +24,14 @@ if [ ! -s $TARGET_DIR ]; then mkdir -p $TARGET_DIR ;fi
 
 # Set file prefix
 cyc=$(echo $CDATE |cut -c 9-10)
-prefix="$CDUMP.t${cyc}z."
+prefix="$RUN.t${cyc}z."
 
 
 # Link dump files from SOURCE_DIR to TARGET_DIR
 cd $SOURCE_DIR
 if [ -s ${prefix}updated.status.tm00.bufr_d ]; then
     for file in $(ls ${prefix}*); do
-	ln -fs $SOURCE_DIR/$file $TARGET_DIR/$file
+	${NLN} $SOURCE_DIR/$file $TARGET_DIR/$file
     done
 else
     echo "***ERROR*** ${prefix}updated.status.tm00.bufr_d NOT FOUND in $SOURCE_DIR"
diff --git a/ush/getges.sh b/ush/getges.sh
index 2fb54fccc7..d960354bf4 100755
--- a/ush/getges.sh
+++ b/ush/getges.sh
@@ -76,7 +76,7 @@
 ################################################################################
 #-------------------------------------------------------------------------------
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Set some default parameters.
 fhbeg=03                         # hour to begin searching backward for guess
diff --git a/ush/gfs_bfr2gpk.sh b/ush/gfs_bfr2gpk.sh
index add68536ec..dbd8defb0e 100755
--- a/ush/gfs_bfr2gpk.sh
+++ b/ush/gfs_bfr2gpk.sh
@@ -10,7 +10,7 @@
 # Log:									#
 # K. Brill/HPC		04/12/05					#
 #########################################################################  
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Set GEMPAK paths.
 
diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh
index 5ed05f9beb..8a7d9b1091 100755
--- a/ush/gfs_bufr.sh
+++ b/ush/gfs_bufr.sh
@@ -17,9 +17,10 @@
 # 2018-05-22 Guang Ping Lou: Making it work for both GFS and FV3GFS 
 # 2018-05-30  Guang Ping Lou: Make sure all files are available.
 # 2019-10-10  Guang Ping Lou: Read in NetCDF files
+# 2024-03-03 Bo Cui: Add options to use different bufr table for different resolution NetCDF files
 # echo "History: February 2003 - First implementation of this utility script"
 #
-source "${HOMEgfs:?}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 if [[ "${F00FLAG}" == "YES" ]]; then
    f00flag=".true."
@@ -50,44 +51,50 @@ cat << EOF > gfsparm
 /
 EOF
 
+sleep_interval=10
+max_tries=1000
 for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do
    hh2=$(printf %02i "${hr}")
    hh3=$(printf %03i "${hr}")
 
    #---------------------------------------------------------
    # Make sure all files are available:
-   ic=0
-   while (( ic < 1000 )); do
-      if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}" ]]; then
-          sleep 10
-          ic=$((ic + 1))
-      else
-          break
-      fi
-
-      if (( ic >= 360 )); then
-         echo "FATAL: COULD NOT LOCATE logf${hh3} file AFTER 1 HOUR"
-         exit 2
-      fi
-   done
+   filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}"
+   if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then
+     echo "FATAL ERROR: COULD NOT LOCATE logf${hh3} file"
+     exit 2
+   fi
+   
    #------------------------------------------------------------------
-   ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" 
-   ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}"
+   ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}"
+   ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}"
 done
 
 #  define input BUFR table file.
-ln -sf "${PARMbufrsnd}/bufr_gfs_${CLASS}.tbl" fort.1
-ln -sf "${STNLIST:-${PARMbufrsnd}/bufr_stalist.meteo.gfs}" fort.8
-ln -sf "${PARMbufrsnd}/bufr_ij13km.txt" fort.7
+${NLN} "${PARMgfs}/product/bufr_gfs_${CLASS}.tbl" fort.1
+${NLN} "${STNLIST:-${PARMgfs}/product/bufr_stalist.meteo.gfs}" fort.8
+
+case "${CASE}" in
+    "C768")
+        ${NLN} "${PARMgfs}/product/bufr_ij13km.txt" fort.7
+        ;;
+    "C1152")
+        ${NLN} "${PARMgfs}/product/bufr_ij9km.txt"  fort.7
+        ;;
+    *)
+        echo "WARNING: No bufr table for this resolution, using the one for C768"
+        ${NLN} "${PARMgfs}/product/bufr_ij13km.txt" fort.7
+        ;;
+esac
 
-${APRUN_POSTSND} "${EXECbufrsnd}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}"
+${APRUN_POSTSND} "${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}"
 export err=$?
 
-if [ $err -ne 0 ]; then
+if [[ "${err}" -ne 0 ]]; then
    echo "GFS postsnd job error, Please check files "
    echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh2}.${atmfm}"
    echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh2}.${atmfm}"
    err_chk
 fi
 
-exit ${err}
+exit "${err}"
diff --git a/ush/gfs_bufr_netcdf.sh b/ush/gfs_bufr_netcdf.sh
index b358c6b69a..f03ff3b9af 100755
--- a/ush/gfs_bufr_netcdf.sh
+++ b/ush/gfs_bufr_netcdf.sh
@@ -19,7 +19,7 @@
 # 2019-10-10  Guang Ping Lou: Read in NetCDF files
 # echo "History: February 2003 - First implementation of this utility script"
 #
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 if test "$F00FLAG" = "YES"
 then
@@ -66,6 +66,9 @@ hh=$FSTART
       hh1=$(echo "${hh#"${hh%??}"}")
       hh=$hh1
    fi
+
+sleep_interval=10
+max_tries=360   
 while  test $hh -le $FEND
 do  
    if test $hh -lt 100
@@ -75,27 +78,14 @@ do
       hh2=$hh
    fi
 
-#---------------------------------------------------------
-# Make sure all files are available:
-   ic=0
-   while [ $ic -lt 1000 ]
-   do
-      if [ ! -f $COMIN/${RUN}.${cycle}.logf${hh2}.txt ]
-      then
-          sleep 10
-          ic=$(expr $ic + 1)
-      else
-          break
-      fi
+   filename="${COMIN}/${RUN}.${cycle}.logf${hh2}.txt"
+   if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}" ; then
+     err_exit "FATAL ERROR COULD NOT LOCATE logf${hh2} file"
+   fi
 
-      if [ $ic -ge 360 ]
-      then
-         err_exit "COULD NOT LOCATE logf${hh2} file AFTER 1 HOUR"
-      fi
-   done
 #------------------------------------------------------------------
-   ln -sf $COMIN/${RUN}.${cycle}.atmf${hh2}.nc sigf${hh} 
-   ln -sf $COMIN/${RUN}.${cycle}.${SFCF}f${hh2}.nc flxf${hh}
+   ${NLN} $COMIN/${RUN}.${cycle}.atmf${hh2}.nc sigf${hh}
+   ${NLN} $COMIN/${RUN}.${cycle}.${SFCF}f${hh2}.nc flxf${hh}
 
    hh=$( expr $hh + $FINT )
    if test $hh -lt 10
@@ -105,11 +95,11 @@ do
 done  
 
 #  define input BUFR table file.
-ln -sf $PARMbufrsnd/bufr_gfs_${CLASS}.tbl fort.1
-ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8
-ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7
+${NLN} ${PARMgfs}/product/bufr_gfs_${CLASS}.tbl fort.1
+${NLN} ${STNLIST:-${PARMgfs}/product/bufr_stalist.meteo.gfs} fort.8
+${NLN} ${PARMgfs}/product/bufr_ij13km.txt fort.7
 
-${APRUN_POSTSND} "${EXECbufrsnd}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}"
+${APRUN_POSTSND} "${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}"
 export err=$?
 
 exit ${err}
diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh
index 99c5c68fa3..ade49eec36 100755
--- a/ush/gfs_sndp.sh
+++ b/ush/gfs_sndp.sh
@@ -7,7 +7,7 @@
 #   1) 2004-09-10       Steve Gilbert       First Implementation
 ################################################################
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
   #  Create "collectives" consisting of groupings of the soundings
   #  into files designated by geographical region.   Each input
@@ -16,7 +16,7 @@ source "$HOMEgfs/ush/preamble.sh"
 export m=$1
 mkdir $DATA/$m
 cd $DATA/$m
-  cp $FIXbufrsnd/gfs_collective${m}.list $DATA/$m/. 
+  cp ${FIXgfs}/product/gfs_collective${m}.list $DATA/$m/.
   CCCC=KWBC
     file_list=gfs_collective${m}.list
 
@@ -37,7 +37,7 @@ cd $DATA/$m
        #. prep_step
        export FORT11=$DATA/${m}/bufrin
        export FORT51=./bufrout
-       ${EXECbufrsnd}/${pgm} << EOF
+       ${EXECgfs}/${pgm} << EOF
  &INPUT
   BULHED="$WMOHEAD",KWBX="$CCCC",
   NCEP2STD=.TRUE.,
diff --git a/ush/gfs_truncate_enkf.sh b/ush/gfs_truncate_enkf.sh
index 0a7d6fc0dd..6102ada75d 100755
--- a/ush/gfs_truncate_enkf.sh
+++ b/ush/gfs_truncate_enkf.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 member=$1
 export SIGINP=$2
@@ -14,17 +14,16 @@ mkdir -p $DATATMP
 cd $DATATMP
 
 export LEVS=${LEVS_LORES:-64}
-export FIXam=${FIXam:-$HOMEgfs/fix/am}
 
 export CHGRESSH=${CHGRESSH:-${USHgfs}/global_chgres.sh}
 export CHGRESEXEC=${CHGRESEXEC-${EXECgfs}/global_chgres}
-export OROGRAPHY=${OROGRAPHY_LORES:-$FIXam/global_orography.t$JCAP.$LONB.$LATB.grb}
-export OROGRAPHY_UF=${OROGRAPHY_UF_LORES:-$FIXam/global_orography_uf.t$JCAP.$LONB.$LATB.grb}
-export LONSPERLAT=${LONSPERLAT_LORES:-$FIXam/global_lonsperlat.t${JCAP}.$LONB.$LATB.txt}
-export SLMASK=${SLMASK_LORES:-$FIXam/global_slmask.t$JCAP.$LONB.$LATB.grb}
-export MTNVAR=${MTNVAR_LORES:-$FIXam/global_mtnvar.t$JCAP.$LONB.$LATB.f77}
-export SIGLEVEL=${SIGLEVEL_LORES:-$FIXam/global_hyblev.l${LEVS}.txt}
-export O3CLIM=${O3CLIM:-$FIXam/global_o3clim.txt}
+export OROGRAPHY=${OROGRAPHY_LORES:-${FIXgfs}/am/global_orography.t$JCAP.$LONB.$LATB.grb}
+export OROGRAPHY_UF=${OROGRAPHY_UF_LORES:-${FIXgfs}/am/global_orography_uf.t$JCAP.$LONB.$LATB.grb}
+export LONSPERLAT=${LONSPERLAT_LORES:-${FIXgfs}/am/global_lonsperlat.t${JCAP}.$LONB.$LATB.txt}
+export SLMASK=${SLMASK_LORES:-${FIXgfs}/am/global_slmask.t$JCAP.$LONB.$LATB.grb}
+export MTNVAR=${MTNVAR_LORES:-${FIXgfs}/am/global_mtnvar.t$JCAP.$LONB.$LATB.f77}
+export SIGLEVEL=${SIGLEVEL_LORES:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt}
+export O3CLIM=${O3CLIM:-${FIXgfs}/am/global_o3clim.txt}
 
 use_ufo=.true.
 
diff --git a/ush/global_savefits.sh b/ush/global_savefits.sh
index f26132dd8a..973d27a358 100755
--- a/ush/global_savefits.sh
+++ b/ush/global_savefits.sh
@@ -3,7 +3,7 @@
 ########################################################
 #  save fit and horiz files for all analysis cycles
 ########################################################
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 export FIT_DIR=${FIT_DIR:-$COMOUT/fits}
 export HORZ_DIR=${HORZ_DIR:-$COMOUT/horiz}
diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh
deleted file mode 120000
index 7ae08ab214..0000000000
--- a/ush/hpssarch_gen.sh
+++ /dev/null
@@ -1 +0,0 @@
-hpssarch_gen_gsl.sh
\ No newline at end of file
diff --git a/ush/icepost.ncl b/ush/icepost.ncl
deleted file mode 100755
index ad102971c4..0000000000
--- a/ush/icepost.ncl
+++ /dev/null
@@ -1,382 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap CICE5 output on the tripole grid to 
-; a set of rectilinear grids using pre-computed ESMF weights to remap 
-; the listed fields to the destination grid and write the results 
-; to a new netCDF file
-; 
-; See ocnpost.ncl for a complete description
-;
-; Bin.Li@noaa.gov (May 2019)
-; This script is revised to be used in the coupled workflow.
-; Revised parts are marked by <BL. and BL.>
-
-  load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-
-;************************************************
-; specify parameters
-;************************************************
-; <BL.
-; pull from environment
-  COMDIR          = getenv("COMOUTice")
-  IDATE           = getenv("IDATE")
-  FHR             = getenv("FHR")
-  ENSMEM          = getenv("ENSMEM")
-  DATA_TMP        = getenv("DATA")
-  nemsrc          = getenv("FIXreg2grb2")
-;   nemsrc     = "/scratch2/NCEPDEV/climate/Bin.Li/S2S/fix/ocean_ice_post/FIXDIR/"
-
-; calculate verification date
-  VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE))
-; BL.>
-
-   output_masks = False
- ; destination grid sizes and name
-  dsttype     = (/"rect."/)
- ;dstgrds     = (/"1p0", "0p5", "0p25"/)
-; <BL.
-  dstgrds     = (/"0p25"/)
-; BL.>
-
- ; specify a location to use
- ;  nemsrc     = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
- ; interpolation methods
-   methods        = (/"bilinear" ,"conserve"/)
- ; ocean model output location 
- ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ice/"
-
-
-   ; variables to be regridded with the native tripole stagger location
-
-   varlist = (/ (/     "hi_h", "Ct", "bilinear"/) \
-               ,(/     "hs_h", "Ct", "bilinear"/) \
-               ,(/   "Tsfc_h", "Ct", "bilinear"/) \
-               ,(/   "aice_h", "Ct", "bilinear"/) \
-               ,(/    "sst_h", "Ct", "bilinear"/) \
-             /)
-     dims = dimsizes(varlist)
-    nvars = dims(0)
-   delete(dims)
-   ;print(varlist)
-
-    ; vectors to be regridded with the native tripole stagger location
-    ; and dimensionality 
-    ; note: vectors are always unstaggered using bilinear weights, but can
-    ; be remapped using conservative
-   nvpairs = 1
-   veclist = new( (/nvpairs,3,2/),"string")
-   veclist = (/ (/ (/"uvel_h", "vvel_h"/), (/"Bu", "Bu"/), (/"bilinear", "bilinear"/) /) \
-             /)
-   ;print(veclist)
-
-   begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run 
-;----------------------------------------------------------------------
-;   idate = "20120101"
-;   icefilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ice*.nc")
-;          icef = addfiles(icefilelist,"r")
-;   nfiles = dimsizes(icefilelist)
-; <BL.
-; specify input file name and input directory
-   icefilelist = "ice"+VDATE+"."+ENSMEM+"."+IDATE+".nc"
-   icef = addfiles(COMDIR+"/"+icefilelist,"r")
-; BL.>
-
-  ; get the rotation angle 
-    angleT = icef[0]->ANGLET
-
-  ; get a 2 dimensional fields for creating the interpolation mask
-  ; the mask2d contain 1's on land and 0's at valid points.
-  mask2d = where(ismissing(icef[0]->sst_h),  1.0, 0.0)
-  ;printVarSummary(mask2d)
-
-  ; create conformed rotation arrays to make vector rotations cleaner
-  angleT2d=conform_dims(dimsizes(mask2d),angleT,(/1,2/)) 
-
-;----------------------------------------------------------------------
-; loop over the output resolutions 
-;----------------------------------------------------------------------
-
-      jj = 1
-      ii = 0
-
-   do jj = 0,dimsizes(dstgrds)-1
-    ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
-    outres = dstgrds(jj)+"x"+dstgrds(jj)
-   outgrid = dstgrds(jj)
-
-   ; regrid a field to obtain the output xy dimensions
-    wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
-          tt = ESMF_regrid_with_weights(angleT,wgtsfile,False)
-        tt!0 = "lat"
-        tt!1 = "lon"
-         lat = tt&lat
-         lon = tt&lon
-        dims = dimsizes(tt)
-        nlat = dims(0)
-        nlon = dims(1)
-     print("fields will be remapped to destination grid size "\
-           +nlon+"  "+nlat)
-  
-     delete(tt)
-     delete(dims)
-
-    ; regrid the masks to obtain the interpolation masks. 
-    ; the mask2d contain 1's on land and 0's at valid points.
-    ; when remapped, any mask value > 0 identifies land values that 
-    ; have crept into the field. remapped model fields are then
-    ; masked with this interpolation mask
-
-    wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
-    rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
-
-    if(output_masks)then
-     testfile = "masks_"+dstgrds(jj)+".nc"
-     system("/bin/rm -f "+testfile)
-     ; create
-     testcdf = addfile(testfile,"c")
-     testcdf->rgmask2d = rgmask2d
-     ; close
-     delete(testcdf)
-    end if
-
-    ; create the interpolation mask
-    rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
-
-;----------------------------------------------------------------------
-; loop over each file in the icefilelist
-;----------------------------------------------------------------------
-; <BL. removing the file loop 
-;   do ii = 0,nfiles-1
-;     infile = icefilelist(ii)
-;     print("working on "+infile)
-;    ; create the output file name
-;    outfile = infile
-    ; find the index where the actual filename begins
-;     indstr = str_index_of_substr(outfile, "/", -1)
-    ; insert a sub-directory name
-;    outfile = str_insert(outfile,outgrid+"/",indstr+1)
-    ; insert a string indicating regridding resolution
-;    outfile = str_insert(outfile,outres,-4)
-    ; find the index where the actual filename begins
-;     indstr = str_index_of_substr(outfile, "ice", -1)
-    ; insert an 'r' after the leading occurence of 'ice' 
-    ; to indicate regular grid
-;    outfile = str_insert(outfile,"r",indstr+3)
-    ; check if the outgrid directory exists and create
-    ; it if it does not
-;    ret = systemfunc("test -d "+dirsrc+"gfs."+idate+"/00/"+outgrid+"; echo $?")
-;    if(ret .eq. 1)then
-;     system("mkdir "+dirsrc+"gfs."+idate+"/00/"+outgrid)
-;    end if
-; BL.>
-    ; retrieve the time stamp
-       time = icef[0]->time
-    delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-;    system("/bin/rm -f " + outfile)    ; remove if exists
-;    outcdf  = addfile (outfile, "c")  ; open output file
-;
-; <BL.
-; specify output file information and open file for output
-  FILENAME_REGRID = DATA_TMP+"/icer"+VDATE+"."+ENSMEM+"."+IDATE+"_"+outres+"_CICE.nc"
-  if (isfilepresent(FILENAME_REGRID)) then
-    system("rm -f "+FILENAME_REGRID)
-  end if
-  outcdf = addfile(FILENAME_REGRID,"c")
-  infile = icefilelist
-; BL.>
-
-    ; explicitly declare file definition mode. Improve efficiency.
-    setfileoption(outcdf,"DefineMode",True)
-
-    ; create global attributes of the file
-    fAtt               = True            ; assign file attributes
-    fAtt@creation_date = systemfunc ("date")
-    fAtt@source_file   = infile        
-    fileattdef( outcdf, fAtt )           ; copy file attributes    
-
-    ; predefine the coordinate variables and their dimensionality
-    dimNames = (/"time", "lat", "lon"/)  
-    dimSizes = (/ -1   ,  nlat,  nlon/) 
-    dimUnlim = (/ True , False, False/)   
-    filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
-    ; predefine the the dimensionality of the variables to be written out
-    filevardef(outcdf, "time", typeof(time), getvardims(time)) 
-    filevardef(outcdf,  "lat",  typeof(lat),  getvardims(lat))                          
-    filevardef(outcdf,  "lon",  typeof(lon),  getvardims(lon))                          
-
-    ; Copy attributes associated with each variable to the file
-    filevarattdef(outcdf, "time", time)                 
-    filevarattdef(outcdf,  "lat",  lat)             
-    filevarattdef(outcdf,  "lon",  lon)            
-
-    ; predefine variables
-    do nv = 0,nvars-1
-     varname = varlist(nv,0)
-       odims = (/"time", "lat", "lon"/)
-      ;print("creating variable "+varname+" in file")
-      filevardef(outcdf, varname, "float", odims)
-      delete(odims)
-    end do
-
-    do nv = 0,nvpairs-1
-     do nn = 0,1
-     vecname = veclist(nv,0,nn)
-      odims = (/"time", "lat", "lon"/)
-     ;print("creating variable "+vecname+" in file")
-     filevardef(outcdf, vecname, "float", odims) 
-     delete(odims)
-     end do
-    end do
- 
-    ; explicitly exit file definition mode.
-    setfileoption(outcdf,"DefineMode",False)
-
-    lat=lat(::-1)
-    ; write the dimensions to the file
-    outcdf->time   = (/time/)     
-    outcdf->lat    = (/lat/)
-    outcdf->lon    = (/lon/) 
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
-    ;nv = 1
-    do nv = 0,nvars-1
-     varname = varlist(nv,0)
-     vargrid = varlist(nv,1)
-     varmeth = varlist(nv,2)
-  
-     ;print(nv+"   "+varname+"  "+vargrid+"  "+varmeth)
-     icevar = icef[ii]->$varname$
-     ndims = dimsizes(dimsizes(icevar))
-     ;print(ndims+"   "+dimsizes(icevar))
-
-     if(vargrid .ne. "Ct")then
-      ; print error if the variable is not on the Ct grid
-      print("Variable is not on Ct grid")
-      exit
-     end if
-
-     ; regrid to dsttype+dstgrd with method
-     ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
-     wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
-     rgtt = ESMF_regrid_with_weights(icevar,wgtsfile,False)
-     rgtt = where(ismissing(rgmask2d),icevar@_FillValue,rgtt)
-     rgtt=rgtt(:,::-1,:)
-
-     ; enter file definition mode to add variable attributes
-     setfileoption(outcdf,"DefineMode",True)
-     filevarattdef(outcdf, varname, rgtt)                 
-     setfileoption(outcdf,"DefineMode",False)
-
-     
-     outcdf->$varname$   = (/rgtt/)
-
-     delete(icevar)
-     delete(rgtt)
- 
-    ; nv, loop over number of variables
-    end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-   
-   ;nv = 0
-   do nv = 0,nvpairs-1
-     vecnames = veclist(nv,0,:)
-     vecgrids = veclist(nv,1,:)
-     vecmeth  = veclist(nv,2,:)
-     ;print(nv+"   "+vecnames+"  "+vecgrids+"  "+vecmeth)
-
-     ; create a vector pair list
-     vecpairs = NewList("fifo")
-            n = 0
-         uvel = icef[ii]->$vecnames(n)$
-       vecfld = where(ismissing(uvel),0.0,uvel)
-        copy_VarAtts(uvel,vecfld)
-     ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
-     wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
-           ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-     delete(ut@remap)
-
-            n = 1
-         vvel = icef[ii]->$vecnames(n)$
-       vecfld = where(ismissing(vvel),0.0,vvel)
-        copy_VarAtts(vvel,vecfld)
-     ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
-     wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
-           vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-     delete(vt@remap)
-
-     ListAppend(vecpairs,ut)
-     ListAppend(vecpairs,vt)
-     ;print(vecpairs)
-
-     ; rotate
-     ; first copy Metadata
-     urot = vecpairs[0]
-     vrot = vecpairs[1]
-     urot = cos(angleT2d)*ut - sin(angleT2d)*vt
-     vrot = sin(angleT2d)*ut + cos(angleT2d)*vt
-
-     ; change attribute to indicate these are now rotated velocities
-     urot@long_name=str_sub_str(urot@long_name,"(x)","zonal")
-     vrot@long_name=str_sub_str(vrot@long_name,"(y)","meridional")
-     ; copy back
-     vecpairs[0] = urot
-     vecpairs[1] = vrot
-     delete([/urot, vrot/])
-
-     ; remap
-     do n = 0,1
-      vecfld = vecpairs[n]
-      ; regrid to dsttype+dstgrd with method
-      ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
-      wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
-       rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-       rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
-       rgtt=rgtt(:,::-1,:)
-
-      ; enter file definition mode to add variable attributes
-      setfileoption(outcdf,"DefineMode",True)
-      filevarattdef(outcdf, vecnames(n), rgtt)                 
-      setfileoption(outcdf,"DefineMode",False)
-    
-      outcdf->$vecnames(n)$   = (/rgtt/)
-      delete(rgtt)
-     end do
-      delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
-      delete([/vecnames,vecgrids,vecmeth/])
-    ; nv, loop over number of vector pairs
-    end do
-  
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-   
-   delete(outcdf)
-
-   ; ii, loop over files
-   ;end do
-   ;jj, loop over destination grids
-   delete([/lat,lon,nlon,nlat/])
-   delete([/rgmask2d/])
-  end do
-  print("One complete ice file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/interp_atmos_master.sh b/ush/interp_atmos_master.sh
index 0abc6ad185..4c4ee4b03c 100755
--- a/ush/interp_atmos_master.sh
+++ b/ush/interp_atmos_master.sh
@@ -4,7 +4,7 @@
 # Generate 0.25 / 0.5 / 1 degree interpolated grib2 files for each input grib2 file
 # trim's RH and tweaks sea-ice cover
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 input_file=${1:-"pgb2file_in"}  # Input pressure grib2 file
 output_file_prefix=${2:-"pgb2file_out"}  # Prefix for output grib2 file; the prefix is appended by resolution e.g. _0p25
@@ -29,7 +29,7 @@ grid0p50="latlon 0:720:0.5 90:361:-0.5"
 grid1p00="latlon 0:360:1.0 90:181:-1.0"
 
 # "Import" functions used in this script
-source "${HOMEgfs}/ush/product_functions.sh"
+source "${USHgfs}/product_functions.sh"
 
 # Transform the input ${grid_string} into an array for processing
 IFS=':' read -ra grids <<< "${grid_string}"
diff --git a/ush/interp_atmos_sflux.sh b/ush/interp_atmos_sflux.sh
index 516a2f5e4a..cdf748f666 100755
--- a/ush/interp_atmos_sflux.sh
+++ b/ush/interp_atmos_sflux.sh
@@ -3,7 +3,7 @@
 # This script takes in a master flux file and creates interpolated flux files at various interpolated resolutions
 # Generate 0.25 / 0.5 / 1 degree interpolated grib2 flux files for each input sflux grib2 file
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 input_file=${1:-"sfluxfile_in"}  # Input sflux grib2 file
 output_file_prefix=${2:-"sfluxfile_out"}  # Prefix for output sflux grib2 file; the prefix is appended by resolution e.g. _0p25
@@ -46,4 +46,4 @@ ${WGRIB2} "${input_file}" ${defaults} \
                           ${output_grids}
 export err=$?; err_chk
 
-exit 0
\ No newline at end of file
+exit 0
diff --git a/ush/jjob_header.sh b/ush/jjob_header.sh
old mode 100644
new mode 100755
index 45fa6402ae..dc75437f1a
--- a/ush/jjob_header.sh
+++ b/ush/jjob_header.sh
@@ -39,7 +39,6 @@
 #                     [default: "YES"]
 #   - $pid          : Override the default process id
 #                     [default: $$]
-# 
 
 OPTIND=1
 while getopts "c:e:" option; do
@@ -99,7 +98,7 @@ for config in "${configs[@]:-''}"; do
     status=$?
     if (( status != 0 )); then
         echo "FATAL [${BASH_SOURCE[0]}]: Unable to load config config.${config}"
-    	exit "${status}"
+        exit "${status}"
     fi
 done
 
@@ -111,5 +110,5 @@ source "${HOMEgfs}/env/${machine}.env" "${env_job}"
 status=$?
 if (( status != 0 )); then
     echo "FATAL [${BASH_SOURCE[0]}]: Error while sourcing machine environment ${machine}.env for job ${env_job}"
-	exit "${status}"
+    exit "${status}"
 fi
diff --git a/ush/link_crtm_fix.sh b/ush/link_crtm_fix.sh
index 61ac3f7870..fae1e4b717 100755
--- a/ush/link_crtm_fix.sh
+++ b/ush/link_crtm_fix.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # Get CRTM fix directory from (in this order):
 # 1. First argument to script, or
@@ -22,16 +22,16 @@ for what in "amsre_aqua" "imgr_g11" "imgr_g12" "imgr_g13" \
 	"ssmi_f13" "ssmi_f14" "ssmi_f15" "ssmis_f16" \
 	"ssmis_f17" "ssmis_f18" "ssmis_f19" "ssmis_f20" \
 	"tmi_trmm" "v.seviri_m10" "imgr_insat3d" "abi_gr" "ahi_himawari8" ; do
-	ln -s "${CRTM_FIX}/${what}.TauCoeff.bin" .
-	ln -s "${CRTM_FIX}/${what}.SpcCoeff.bin" .
+	${NLN} "${CRTM_FIX}/${what}.TauCoeff.bin" "${what}.TauCoeff.bin"
+	${NLN} "${CRTM_FIX}/${what}.SpcCoeff.bin" "${what}.SpcCoeff.bin"
 done
 
 for what in 'Aerosol' 'Cloud' ; do
-	ln -s "${CRTM_FIX}/${what}Coeff.bin" .
+	${NLN} "${CRTM_FIX}/${what}Coeff.bin" "${what}Coeff.bin"
 done
 
-for what in  ${CRTM_FIX}/*Emis* ; do
-	ln -s ${what} .
+for what in "${CRTM_FIX}/"*Emis* ; do
+	${NLN} "${what}" "$(basename "${what}")"
 done
 
 exit 0
diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh
index 57a188b93e..a8aea747eb 100755
--- a/ush/load_fv3gfs_modules.sh
+++ b/ush/load_fv3gfs_modules.sh
@@ -10,7 +10,8 @@ fi
 ulimit_s=$( ulimit -S -s )
 
 # Find module command and purge:
-source "${HOMEgfs}/modulefiles/module-setup.sh.inc"
+source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/module-setup.sh"
 
 # Source versions file for runtime
 source "${HOMEgfs}/versions/run.ver"
@@ -19,39 +20,22 @@ source "${HOMEgfs}/versions/run.ver"
 #KYWmodule use "${HOMEgfs}/modulefiles"
 module use "/scratch1/BMC/gsd-fv3/rtruns/modulefiles/"
 
-if [[ -d /lfs/f1 ]]; then
-  # We are on WCOSS2 (Cactus or Dogwood)
-  module load module_base.wcoss2
-elif [[ -d /mnt/lfs1 ]] ; then
-  # We are on NOAA Jet
-  module load module_base.jet
-elif [[ -d /scratch1 ]] ; then
-  # We are on NOAA Hera
-  module load module_base.hera
-elif [[ -d /work ]] ; then
-  # We are on MSU Orion or Hercules
-  if [[ -d /apps/other ]] ; then
-     # Hercules
-     module load module_base.hercules
-  else
-     # Orion
-     module load module_base.orion
-  fi
-elif [[ -d /glade ]] ; then
-  # We are on NCAR Yellowstone
-  module load module_base.cheyenne
-elif [[ -d /lustre && -d /ncrc ]] ; then
-  # We are on GAEA.
-  module load module_base.gaea
-elif [[ -d /data/prod ]] ; then
-  # We are on SSEC S4
-  module load module_base.s4
-else
-  echo WARNING: UNKNOWN PLATFORM
-fi
+case "${MACHINE_ID}" in
+  "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4")
+    module load "module_base.${MACHINE_ID}"
+    ;;
+  *)
+    echo "WARNING: UNKNOWN PLATFORM"
+    ;;
+esac
 
 module list
 
+# Add wxflow to PYTHONPATH
+wxflowPATH="${HOMEgfs}/ush/python"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+export PYTHONPATH
+
 # Restore stack soft limit:
 ulimit -S -s "${ulimit_s}"
 unset ulimit_s
diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh
index da8e2d8096..8117d3f359 100755
--- a/ush/load_ufsda_modules.sh
+++ b/ush/load_ufsda_modules.sh
@@ -27,57 +27,35 @@ fi
 ulimit_s=$( ulimit -S -s )
 
 # Find module command and purge:
-source "${HOMEgfs}/modulefiles/module-setup.sh.inc"
+source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/module-setup.sh"
 
 # Load our modules:
 module use "${HOMEgfs}/sorc/gdas.cd/modulefiles"
 
-if [[ -d /lfs/f1 ]]; then
-  # We are on WCOSS2 (Cactus or Dogwood)
-  echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /lfs3 ]] ; then
-  # We are on NOAA Jet
-  echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /scratch1 ]] ; then
-  # We are on NOAA Hera
-  module load "${MODS}/hera"
-  # set NETCDF variable based on ncdump location
-  NETCDF=$( which ncdump )
-  export NETCDF
-  # prod_util stuff, find a better solution later...
-  module use /scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/compiler/intel/2022.1.2/
-  module load prod_util
-elif [[ -d /work ]] ; then
-  # We are on MSU Orion
-  # prod_util stuff, find a better solution later...
-  #module use /apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/compiler/intel/2022.1.2/
-  #module load prod_util
-  export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2
-  export MDATE=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/mdate
-  export NDATE=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/ndate
-  export NHOUR=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/nhour
-  export FSYNC=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/fsync_file
-  module load "${MODS}/orion"
-  # set NETCDF variable based on ncdump location
-  ncdump=$( which ncdump )
-  NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
-  export NETCDF
-elif [[ -d /glade ]] ; then
-  # We are on NCAR Yellowstone
-  echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /lustre && -d /ncrc ]] ; then
-  # We are on GAEA.
-  echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-elif [[ -d /data/prod ]] ; then
-  # We are on SSEC S4
-  echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
-else
-  echo WARNING: UNKNOWN PLATFORM
-fi
+case "${MACHINE_ID}" in
+  ("hera" | "orion" | "hercules" | "wcoss2")
+    module load "${MODS}/${MACHINE_ID}"
+    ncdump=$( command -v ncdump )
+    NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
+    export NETCDF
+    ;;
+  ("jet" | "gaea" | "s4" | "acorn")
+    echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM
+    ;;  
+  *)
+    echo "WARNING: UNKNOWN PLATFORM"
+    ;;
+esac
 
 module list
 pip list
 
+# Add wxflow to PYTHONPATH
+wxflowPATH="${HOMEgfs}/ush/python"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+export PYTHONPATH
+
 # Restore stack soft limit:
 ulimit -S -s "${ulimit_s}"
 unset ulimit_s
diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl
index 502032da80..c56ac3bdad 100755
--- a/ush/minmon_xtrct_costs.pl
+++ b/ush/minmon_xtrct_costs.pl
@@ -22,8 +22,8 @@
 #
 #---------------------------
 
-if ($#ARGV != 4 ) {
-	print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+	print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile\n";
 	exit;
 }
 my $suffix = $ARGV[0];
@@ -31,7 +31,6 @@
 my $pdy      = $ARGV[1];
 my $cyc      = $ARGV[2];
 my $infile   = $ARGV[3];
-my $jlogfile = $ARGV[4];
 
 my $use_costterms = 0;
 my $no_data       = 0.00;
diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl
index 0125c58ac8..ac83c08cd3 100755
--- a/ush/minmon_xtrct_gnorms.pl
+++ b/ush/minmon_xtrct_gnorms.pl
@@ -185,8 +185,8 @@ sub updateGnormData {
 #
 #---------------------------------------------------------------------------
 
-if ($#ARGV != 4 ) {
-   print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+   print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile \n";
    exit;
 }
 
@@ -195,7 +195,6 @@ sub updateGnormData {
 my $pdy      = $ARGV[1];
 my $cyc      = $ARGV[2];
 my $infile   = $ARGV[3];
-my $jlogfile = $ARGV[4];
 
 
 my $scr = "minmon_xtrct_gnorms.pl";
diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl
index 1b8186b6ad..cc5da86af8 100755
--- a/ush/minmon_xtrct_reduct.pl
+++ b/ush/minmon_xtrct_reduct.pl
@@ -9,20 +9,18 @@
 #  reduction.ieee_d files ready for GrADS use.
 #---------------------------------------------------------------------------
 
-if ($#ARGV != 4 ) {
-	print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile jlogfile\n";
+if ($#ARGV != 3 ) {
+	print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile\n";
         print " suffix is data source identifier\n";
         print " pdy is YYYYMMDD of the cycle to be processed\n";
         print " cyc is HH of the cycle to be processed\n";
         print " infile is the data file containing the reduction stats\n";
-        print " jlogfile is the job log file\n";
 	exit;
 }
 my $suffix   = $ARGV[0];
 my $pdy      = $ARGV[1];
 my $cyc      = $ARGV[2];
 my $infile   = $ARGV[3];
-my $jlogfile = $ARGV[4];
 
 my $scr = "minmon_xtrct_reduct.pl";
 print "$scr has started\n";
diff --git a/ush/module-setup.sh b/ush/module-setup.sh
index fd656966bf..b4ec3edafa 100755
--- a/ush/module-setup.sh
+++ b/ush/module-setup.sh
@@ -1,6 +1,8 @@
 #!/bin/bash
 set -u
 
+source "${HOMEgfs}/ush/detect_machine.sh"
+
 if [[ ${MACHINE_ID} = jet* ]] ; then
     # We are on NOAA Jet
     if ( ! eval module help > /dev/null 2>&1 ) ; then
@@ -34,10 +36,10 @@ elif [[ ${MACHINE_ID} = orion* ]] ; then
     if ( ! eval module help > /dev/null 2>&1 ) ; then
         source /apps/lmod/lmod/init/bash
     fi
-    export LMOD_SYSTEM_DEFAULT_MODULES=contrib
-    set +u
-    module reset
-    set -u
+    #export LMOD_SYSTEM_DEFAULT_MODULES=git/2.28.0  # contrib has a lot of stuff we shouldn't put in MODULEPATH
+    #set +u
+    module purge # reset causes issues on Orion sometimes.
+    #set -u
 
 elif [[ ${MACHINE_ID} = s4* ]] ; then
     # We are on SSEC Wisconsin S4
@@ -68,39 +70,10 @@ elif [[ ${MACHINE_ID} = stampede* ]] ; then
 elif [[ ${MACHINE_ID} = gaea* ]] ; then
     # We are on GAEA.
     if ( ! eval module help > /dev/null 2>&1 ) ; then
-        # We cannot simply load the module command.  The GAEA
-        # /etc/profile modifies a number of module-related variables
-        # before loading the module command.  Without those variables,
-        # the module command fails.  Hence we actually have to source
-        # /etc/profile here.
-        source /etc/profile
-        __ms_source_etc_profile=yes
-    else
-        __ms_source_etc_profile=no
-    fi
-    module purge
-    # clean up after purge
-    unset _LMFILES_
-    unset _LMFILES_000
-    unset _LMFILES_001
-    unset LOADEDMODULES
-    module load modules
-    if [[ -d /opt/cray/ari/modulefiles ]] ; then
-        module use -a /opt/cray/ari/modulefiles
-    fi
-    if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then
-        module use -a /opt/cray/pe/ari/modulefiles
-    fi
-    if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then
-        module use -a /opt/cray/pe/craype/default/modulefiles
-    fi
-    if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then
-        source /etc/opt/cray/pe/admin-pe/site-config
-    fi
-    if [[ "${__ms_source_etc_profile}" == yes ]] ; then
+        source /usr/share/lmod/lmod/init/bash
         source /etc/profile
-        unset __ms_source_etc_profile
     fi
+    module reset
 
 elif [[ ${MACHINE_ID} = expanse* ]]; then
     # We are on SDSC Expanse
@@ -123,7 +96,7 @@ elif [[ ${MACHINE_ID} = "noaacloud" ]]; then
     export SPACK_ROOT=/contrib/global-workflow/spack-stack/spack
     export PATH=${PATH}:${SPACK_ROOT}/bin
     . "${SPACK_ROOT}"/share/spack/setup-env.sh
-    
+
 else
     echo WARNING: UNKNOWN PLATFORM 1>&2
 fi
diff --git a/ush/oceanice_nc2grib2.sh b/ush/oceanice_nc2grib2.sh
new file mode 100755
index 0000000000..5781e06b36
--- /dev/null
+++ b/ush/oceanice_nc2grib2.sh
@@ -0,0 +1,319 @@
+#!/bin/bash
+
+# This script contains functions to convert ocean/ice rectilinear netCDF files to grib2 format
+# This script uses the wgrib2 utility to convert the netCDF files to grib2 format and then indexes it
+
+source "${USHgfs}/preamble.sh"
+
+################################################################################
+function _ice_nc2grib2 {
+# This function converts the ice rectilinear netCDF files to grib2 format
+
+  # Set the inputs
+  local grid=${1} # 0p25, 0p50, 1p00, 5p00
+  local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+  local current_cycle=${3} # YYYYMMDDHH
+  local aperiod=${4} # 0-6
+  local infile=${5} # ice.0p25.nc
+  local outfile=${6} # ice.0p25.grib2
+  local template=${7} # template.global.0p25.gb2
+
+  ${WGRIB2} "${template}" \
+  -import_netcdf "${infile}" "hi_h" "0:1:${latlon_dims}" \
+      -set_var ICETK -set center 7 \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "aice_h" "0:1:${latlon_dims}" \
+      -set_var ICEC -set center 7 \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "Tsfc_h" "0:1:${latlon_dims}" \
+      -set_var ICETMP -set center 7 -rpn "273.15:+" \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "uvel_h" "0:1:${latlon_dims}" \
+      -set_var UICE -set center 7 \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "vvel_h" "0:1:${latlon_dims}" \
+      -set_var VICE -set center 7 \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+
+#  -import_netcdf "${infile}" "hs_h" "0:1:${latlon_dims}" \
+#    -set_var SNVOLSI -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "frzmlt_h" "0:1:${latlon_dims}" \
+#    -set_var FRZMLTPOT -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "albsni_h" "0:1:${latlon_dims}" \
+#    -set_var ALBDOICE -set center 7 -rpn "100.0:/" \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "mlt_onset_h" "0:1:${latlon_dims}" \
+#    -set_var MLTDATE -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "frz_onset_h" "0:1:${latlon_dims}" \
+#    -set_var FRZDATE -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+  rc=$?
+  # Check if the conversion was successful
+  if (( rc != 0 )); then
+   echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+  fi
+  return "${rc}"
+
+}
+
+################################################################################
+function _ocean2D_nc2grib2 {
+# This function converts the ocean 2D rectilinear netCDF files to grib2 format
+
+  # Set the inputs
+  local grid=${1} # 0p25, 0p50, 1p00, 5p00
+  local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+  local current_cycle=${3} # YYYYMMDDHH
+  local aperiod=${4} # 0-6
+  local infile=${5} # ocean.0p25.nc
+  local outfile=${6} # ocean_2D.0p25.grib2
+  local template=${7} # template.global.0p25.gb2
+
+  ${WGRIB2} "${template}" \
+  -import_netcdf "${infile}" "SSH" "0:1:${latlon_dims}" \
+    -set_var SSHG -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "SST" "0:1:${latlon_dims}" \
+    -set_var WTMP -set center 7 -rpn "273.15:+" \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "SSS" "0:1:${latlon_dims}" \
+    -set_var SALIN -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "speed" "0:1:${latlon_dims}" \
+    -set_var SPC -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "SSU" "0:1:${latlon_dims}" \
+    -set_var UOGRD -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "SSV" "0:1:${latlon_dims}" \
+    -set_var VOGRD -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "latent" "0:1:${latlon_dims}" \
+    -set_var LHTFL -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "sensible" "0:1:${latlon_dims}" \
+    -set_var SHTFL -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "SW" "0:1:${latlon_dims}" \
+    -set_var NSWRF -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "LW" "0:1:${latlon_dims}" \
+    -set_var NLWRF -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "LwLatSens" "0:1:${latlon_dims}" \
+    -set_var THFLX -set center 7 \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+  -import_netcdf "${infile}" "MLD_003" "0:1:${latlon_dims}" \
+    -set_var WDEPTH -set center 7 -set_lev "mixed layer depth" \
+    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+#
+#  -import_netcdf "${infile}" "Heat_PmE" "0:1:${latlon_dims}" \
+#    -set_var DWHFLUX -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "taux" "0:1:${latlon_dims}" \
+#    -set_var XCOMPSS -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+#  -import_netcdf "${infile}" "tauy" "0:1:${latlon_dims}" \
+#    -set_var YCOMPSS -set center 7 \
+#    -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+#    -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+  rc=$?
+  # Check if the conversion was successful
+  if (( rc != 0 )); then
+   echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format"
+  fi
+  return "${rc}"
+
+}
+
+################################################################################
+function _ocean3D_nc2grib2 {
+# This function converts the ocean 3D rectilinear netCDF files to grib2 format
+
+  # Set the inputs
+  local grid=${1} # 0p25, 0p50, 1p00, 5p00
+  local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+  local levels=${3} # 5:15:25:35:45:55:65:75:85:95:105:115:125
+  local current_cycle=${4} # YYYYMMDDHH
+  local aperiod=${5} # 0-6
+  local infile=${6} # ocean.0p25.nc
+  local outfile=${7} # ocean_3D.0p25.grib2
+  local template=${8} # template.global.0p25.gb2
+
+  IFS=':' read -ra depths <<< "${levels}"
+
+  zl=0
+  for depth in "${depths[@]}"; do
+
+    [[ -f "tmp.gb2" ]] && rm -f "tmp.gb2"
+
+    ${WGRIB2} "${template}" \
+    -import_netcdf "${infile}" "temp" "0:1:${zl}:1:${latlon_dims}" \
+      -set_var WTMP -set center 7 -rpn "273.15:+" \
+      -set_lev "${depth} m below sea level" \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+    -import_netcdf "${infile}" "so" "0:1:${zl}:1:${latlon_dims}" \
+      -set_var SALIN -set center 7 \
+      -set_lev "${depth} m below sea level" \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+    -import_netcdf "${infile}" "uo" "0:1:${zl}:1:${latlon_dims}" \
+      -set_var UOGRD -set center 7 \
+      -set_lev "${depth} m below sea level" \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+    -import_netcdf "${infile}" "vo" "0:1:${zl}:1:${latlon_dims}" \
+      -set_var VOGRD -set center 7 \
+      -set_lev "${depth} m below sea level" \
+      -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+      -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2
+
+    rc=$?
+    # Check if the conversion was successful
+    if (( rc != 0 )); then
+      echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format at depth ${depth}m, ABORT!"
+      return "${rc}"
+    fi
+
+    cat tmp.gb2 >> "${outfile}"
+    rm -f tmp.gb2
+    ((zl = zl + 1))
+
+  done
+
+  # Notes:
+  #   WATPTEMP (water potential temperature (theta)) may be a better
+  #   GRIB2 parameter than WTMP (water temperature) if MOM6 outputs
+  #   potential temperature. WATPTEMP is not available in NCEP
+  #   (-set center 7) tables in wgrib2 v2.0.8.
+
+  return "${rc}"
+
+}
+
+################################################################################
+# Input arguments
+component=${1:?"Need a valid component; options: ice|ocean"}
+grid=${2:-"0p25"} # Default to 0.25-degree grid
+current_cycle=${3:-"2013100100"} # Default to 2013100100
+avg_period=${4:-"0-6"} # Default to 6-hourly average
+ocean_levels=${5:-"5:15:25:35:45:55:65:75:85:95:105:115:125"} # Default to 12-levels
+
+case "${grid}" in
+  "0p25")
+    latlon_dims="0:721:0:1440"
+  ;;
+  "0p50")
+    latlon_dims="0:361:0:720"
+  ;;
+  "1p00")
+    latlon_dims="0:181:0:360"
+  ;;
+  "5p00")
+    latlon_dims="0:36:0:72"
+  ;;
+  *)
+    echo "FATAL ERROR: Unsupported grid '${grid}', ABORT!"
+    exit 1
+  ;;
+esac
+
+input_file="${component}.${grid}.nc"
+template="template.global.${grid}.gb2"
+
+# Check if the template file exists
+if [[ ! -f "${template}" ]]; then
+  echo "FATAL ERROR: '${template}' does not exist, ABORT!"
+  exit 127
+fi
+
+# Check if the input file exists
+if [[ ! -f "${input_file}" ]]; then
+  echo "FATAL ERROR: '${input_file}' does not exist, ABORT!"
+  exit 127
+fi
+
+case "${component}" in
+  "ice")
+    rm -f "${component}.${grid}.grib2" || true
+    _ice_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}.${grid}.grib2" "${template}"
+    rc=$?
+    if (( rc != 0 )); then
+      echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+      exit "${rc}"
+    fi
+  ;;
+  "ocean")
+    rm -f "${component}_2D.${grid}.grib2" || true
+    _ocean2D_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_2D.${grid}.grib2" "${template}"
+    rc=$?
+    if (( rc != 0 )); then
+      echo "FATAL ERROR: Failed to convert the ocean 2D rectilinear netCDF file to grib2 format"
+      exit "${rc}"
+    fi
+    rm -f "${component}_3D.${grid}.grib2" || true
+    _ocean3D_nc2grib2 "${grid}" "${latlon_dims}" "${ocean_levels}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_3D.${grid}.grib2" "${template}"
+    rc=$?
+    if (( rc != 0 )); then
+      echo "FATAL ERROR: Failed to convert the ocean 3D rectilinear netCDF file to grib2 format"
+      exit "${rc}"
+    fi
+    # Combine the 2D and 3D grib2 files into a single file
+    rm -f "${component}.${grid}.grib2" || true
+    cat "${component}_2D.${grid}.grib2" "${component}_3D.${grid}.grib2" > "${component}.${grid}.grib2"
+
+  ;;
+  *)
+    echo "FATAL ERROR: Unknown component: '${component}'. ABORT!"
+    exit 3
+  ;;
+esac
+
+# Index the output grib2 file
+${WGRIB2} -s "${component}.${grid}.grib2" > "${component}.${grid}.grib2.idx"
+rc=$?
+# Check if the indexing was successful
+if (( rc != 0 )); then
+  echo "FATAL ERROR: Failed to index the file '${component}.${grid}.grib2'"
+  exit "${rc}"
+fi
+
+exit 0
diff --git a/ush/ocnice_extractvars.sh b/ush/ocnice_extractvars.sh
new file mode 100755
index 0000000000..f0660bb6ec
--- /dev/null
+++ b/ush/ocnice_extractvars.sh
@@ -0,0 +1,66 @@
+#! /usr/bin/env bash
+
+################################################################################
+## UNIX Script Documentation Block
+## Script name:         ocnice_extractvars.sh
+## Script description:  Extracts and optionally compresses variables
+##                      from ocean and ice products
+##                      and saves these variables in arcdir
+#######################
+# Main body starts here
+#######################
+
+source "${USHgfs}/preamble.sh"
+
+subdata=${1}
+varlist=${2}
+datares=${3}
+datacompress=${4}
+fhout_ocnice=${5}
+comout_rfcst_prod_ocnice=${6}
+
+[[ -d "${subdata}" ]] || mkdir -p "${subdata}"
+
+for (( nh = FHMIN_GFS; nh <= FHMAX_GFS; nh = nh + fhout_ocnice )); do
+  fnh=$(printf "%3.3d" "${nh}")
+
+  if [[ ${component_name} == "ocn" ]]; then
+    infile=${COMIN_OCEAN_NETCDF}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc
+    # For ocean products, add an argument to extract a subset of levels
+    otherargs=(-d "${depthvar_name},""${zmin},""${zmax}")
+  elif [[ ${component_name} == "ice" ]]; then
+    infile=${COMIN_ICE_NETCDF}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc
+    otherargs=()
+  fi
+  outfile=${subdata}/${RUN}.${component_name}.t${cyc}z.${datares}.f${fnh}.nc
+
+  if [[ -f "${infile}" ]]; then #check if input file exists before extraction
+    varsrequested=$(paste -s "${varlist}")
+    varsinfile=$(cdo -showname "${infile}")
+    varsavailable=""
+    for i in ${varsrequested}; do
+      # Check if variable from parm file is available in netcdf file. If variable is not in netcdf file, do not try to extract that variable.
+      if [[ ${varsinfile} == *"${i}"* ]]; then
+        varsavailable+="${i},"
+      else
+        echo "WARNING: ${i} is not available in ${infile}."
+      fi
+    done
+    if [[ -z "${varsavailable}" ]]; then
+      echo "WARNING: No variables from parm file ${varlist} are available in netcdf file ${infile}."
+    else
+      ocnice_vars=${varsavailable::-1}
+      ncks -v "${ocnice_vars}" "${otherargs[@]}" "${infile}" "${outfile}"
+    fi
+    if [[ ${datacompress} -eq 1 ]]; then
+      ${COMPRSCMD} "${outfile}"
+      copy_to_comout "${outfile}.bz2" "${comout_rfcst_prod_ocnice}"
+    else
+      copy_to_comout "${outfile}" "${comout_rfcst_prod_ocnice}"
+    fi 
+  else
+    echo "WARNING: ${infile} does not exist."
+  fi
+done # nh
+
+exit 0                                                                                                                                                                                        
diff --git a/ush/ocnpost.ncl b/ush/ocnpost.ncl
deleted file mode 100755
index 27e60b0edf..0000000000
--- a/ush/ocnpost.ncl
+++ /dev/null
@@ -1,588 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap MOM6 ocean output on the tripole grid to 
-; a set of rectilinear grids using pre-computed ESMF weights to remap 
-; the listed fields to the destination grid and write the results 
-; to a new netCDF file
-;
-; Prior to running this script, files containing the conservative 
-; and bilinear regridding weights must be generated. These weights
-; are created using the generate_iceocnpost_weights.ncl script.
-;
-; Note: the descriptive text below assumes fortran type indexing
-; where the variables are indexed as (i,j) and indices start at 1
-; NCL indices are (j,i) and start at 0
-;
-; The post involves these steps
-;
-;  a) unstaggering velocity points
-;     MOM6 is on an Arakawa C grid. MOM6 refers to these 
-;     locations as "Ct" for the centers and "Cu", "Cv" 
-;     "Bu" for the left-right, north-south and corner 
-;     points, respectively.
-;
-;     The indexing scheme in MOM6 is as follows:  
-;
-;        Cv@i,j
-;     ----X------X Bu@i,j
-;                |
-;                |
-;       Ct@i,j    |
-;         X      X Cu@i,j
-;                |
-;                |
-;                |
-;
-;     CICE5 is on an Arakawa B grid. CICE5 refers to these
-;     locations as TLAT,TLON for the centers and ULAT,ULON
-;     for the corners
-;  
-;     In UFS, the CICE5 grid has been created using the MOM6
-;     supergrid file. Therefore, all grid points are consistent
-;     between the two models. 
-;   
-;     In the following, MOM6's nomenclature will be followed,
-;     so that CICE5's U-grid will be referred to as "Bu".
-;
-;  b) rotation of tripole vectors to East-West
-;     MOM6 and CICE6 both output velocties on their native
-;     velocity points. For MOM6, that is u-velocities on the
-;     Cu grid and v-velocites on the Cv grid. For CICE5, it is
-;     both u and v-velocities on the Bu grid.
-;   
-;     The rotation angle for both models are defined at center
-;     grid points; therefore the velocities need to be first 
-;     unstaggered before rotation. MOM6 and CICE5 also define 
-;     opposite directions for the rotations. Finally, while the
-;     grid points are identical between the two models, CICE5 
-;     calculates the rotation angle at center grid points by 
-;     averaging the four surrounding B grid points. MOM6 derives
-;     the rotation angle at the center directly from the latitude
-;     and longitude of the center grid points. The angles are therefor
-;     not identical between the two grids.
-; 
-; c) conservative regridding of some fields
-;    Fields such as ice concentration or fluxes which inherently
-;    area area-weighted require conservative regridding. Most other
-;    variables are state variables and can be regridded using
-;    bilinear weighting.
-;
-; An efficient way to accomplish the unstaggering of velocities 
-; is to use the bilinear interpolation weights between grid 
-; points of the Arakawa C grid and the center grid points (for example 
-; Cu->Ct). These weights are generated by the weight generation script 
-;
-; Remapping from the tripole to rectilinear uses either the bilinear
-; or conservative weights from the weight generation script.  Bilinear weights 
-; generated for the first vertical level can be used on other levels
-; (where the masking changes) by utilizing the correct masking procedure.
-; Set output_masks to true to examine the interpolation masks.
-; 
-; Intermediate file output can easily be generated for debugging by 
-; follwing the example in the output_masks logical
-;
-; Bin.Li@noaa.gov (May 2019)
-; The scripts is revised for use in the coupled workflow.
-;
-  load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-; <BL.
-; pull from environment
-  COMDIR          = getenv("COMOUTocean")
-  IDATE           = getenv("IDATE")
-  VDATE           = getenv("VDATE")
-  FHR2            = getenv("FHR")
-  FHR=FHR2
-  ENSMEM          = getenv("ENSMEM")
-  DATA_TMP        = getenv("DATA")
-  nemsrc          = getenv("FIXreg2grb2")
-;  nemsrc     = "/scratch2/NCEPDEV/climate/Bin.Li/S2S/fix/ocean_ice_post/FIXDIR/"
-
-; calculate and break apart verification date
-  ; VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE))
-;  YYYY  = tostring(VDATE(0:3))
-;  MM    = tostring(VDATE(4:5))
-;  DD    = tostring(VDATE(6:7))
-;  HH    = tostring(VDATE(8:9))
-;  HHS   = tostring(tointeger(HH)*3600)
-; BL.>
-
-   ; warnings (generated by int2p_n_Wrap) can be supressed by
-   ; the following (comment out to get the warnings)
-   err = NhlGetErrorObjectId()
-   setvalues err
-;    "errLevel" : "Fatal"          ; only report Fatal errors
-    "errLevel" : "Verbose"
-   end setvalues
-
-   output_masks = False
-
-   ; specify a location to use
-   ;    nemsrc     = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
-   ; interpolation methods
-   methods        = (/"bilinear" ,"conserve"/)
-   ; ocean model output location 
-   ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ocn/"
-
-   ; destination grid sizes and name
-      dsttype     = (/"rect."/)
-      ;dstgrds     = (/"1p0", "0p5", "0p25"/)
-      ;dstgrds     = (/"0p5"/)
-      dstgrds     = (/"0p25"/)
-
-   ; variables to be regridded with the native tripole stagger location
-   ; and dimensionality 
-   ; first BM contained only field "mld", which was actually ePBL
-   ; the remaining BMs contain ePBL, MLD_003 and MLD_0125 
-   ; the following NCO command will be issued at the end 
-   ; to rename the variable mld to ePBL if the variable mld is found
-   ; ncocmd = "ncrename -O -v mld,ePBL "
-   ncocmd = "ncrename -O -v MLD_003,mld"
-
-   varlist = (/ (/      "SSH", "Ct", "bilinear", "2"/) \
-               ,(/      "SST", "Ct", "bilinear", "2"/) \
-               ,(/      "SSS", "Ct", "bilinear", "2"/) \
-               ,(/    "speed", "Ct", "bilinear", "2"/) \
-               ,(/     "temp", "Ct", "bilinear", "3"/) \
-               ,(/       "so", "Ct", "bilinear", "3"/) \
-               ,(/   "latent", "Ct", "conserve", "2"/) \
-               ,(/ "sensible", "Ct", "conserve", "2"/) \
-               ,(/       "SW", "Ct", "conserve", "2"/) \
-               ,(/       "LW", "Ct", "conserve", "2"/) \
-               ,(/     "evap", "Ct", "conserve", "2"/) \
-               ,(/    "lprec", "Ct", "conserve", "2"/) \
-               ,(/    "fprec", "Ct", "conserve", "2"/) \
-               ,(/"LwLatSens", "Ct", "conserve", "2"/) \
-               ,(/ "Heat_PmE", "Ct", "conserve", "2"/) \
-;               ,(/      "mld", "Ct", "bilinear", "2"/) \
-               ,(/     "ePBL", "Ct", "bilinear", "2"/) \
-               ,(/  "MLD_003", "Ct", "bilinear", "2"/) \
-               ,(/ "MLD_0125", "Ct", "bilinear", "2"/) \
-             /)
-     dims = dimsizes(varlist)
-    nvars = dims(0)
-   delete(dims)
-   ;print(varlist)
-
-   ; vectors to be regridded with the native tripole stagger location
-   ; and dimensionality 
-   ; note: vectors are always unstaggered using bilinear weights, but can
-   ; be remapped using conservative
-   nvpairs = 3
-   veclist = new( (/nvpairs,4,2/),"string")
-   veclist = (/ (/ (/  "SSU",   "SSV"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"2", "2"/) /) \
-              , (/ (/   "uo",    "vo"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"3", "3"/) /) \
-              , (/ (/ "taux",  "tauy"/), (/"Cu", "Cv"/), (/"conserve", "conserve"/), (/"2", "2"/) /) \
-             /)
-   ;print(veclist)
-
-   begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run 
-;----------------------------------------------------------------------
-
-;   idate = "20120101"
-
-;   ocnfilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ocn*.nc")
-;          ocnf = addfiles(ocnfilelist,"r")
-;        nfiles = dimsizes(ocnfilelist)
-; <BL.
-; specify input file name and input directory
-   ocnfilelist = "ocn"+VDATE+"."+ENSMEM+"."+IDATE+".nc"
-   ocnf = addfiles(COMDIR+"/"+ocnfilelist,"r")
-   infile = ocnfilelist
-; BL.>
-
-  ; get the rotation angles and vertical grid from the first file
-  ; two different name were used for the angles, either sinrot,cosrot 
-  ; or sin_rot,cos_rot
-   if(isfilevar(ocnf[0],"sin_rot"))then
-    sinrot = ocnf[0]->sin_rot
-   else
-    sinrot = ocnf[0]->sinrot
-   end if
-   if(isfilevar(ocnf[0],"cos_rot"))then
-    cosrot = ocnf[0]->cos_rot
-   else
-    cosrot = ocnf[0]->cosrot
-   end if
-       z_l = ocnf[0]->z_l
-       z_i = ocnf[0]->z_i
-     nlevs = dimsizes(z_l)
-
-  ; get a 2 and 3 dimensional fields for creating the interpolation masks
-  ; the mask2d,mask3d contain 1's on land and 0's at valid points.
-  mask2d = where(ismissing(ocnf[0]->SST),  1.0, 0.0)
-  mask3d = where(ismissing(ocnf[0]->temp), 1.0, 0.0)
-  ;printVarSummary(mask2d)
-  ;printVarSummary(mask3d)
-
-  ; create conformed rotation arrays to make vector rotations cleaner
-  sinrot2d=conform_dims(dimsizes(mask2d),sinrot,(/1,2/)) 
-  cosrot2d=conform_dims(dimsizes(mask2d),cosrot,(/1,2/)) 
-
-  sinrot3d=conform_dims(dimsizes(mask3d),sinrot,(/2,3/)) 
-  cosrot3d=conform_dims(dimsizes(mask3d),cosrot,(/2,3/)) 
-
-  ; check for variables in file. this is only required because
-  ; of the missing/misnamed MLD variables in the first BM
-  ; only the varlist is checked, since it is assumed there are
-  ; no other variables missing after the first benchmark
-  valid = new((/nvars/),"logical")
-  valid = False
-  do nv = 0,nvars-1
-   varname = varlist(nv,0)
-   if(isfilevar(ocnf[0],varname))then
-    valid(nv) = True
-   end if
-  print(varlist(nv,0)+"   "+valid(nv))
-  end do
-
-;----------------------------------------------------------------------
-; loop over the output resolutions 
-;----------------------------------------------------------------------
-
-      jj = 1
-      ii = 0
-
-   do jj = 0,dimsizes(dstgrds)-1
-   ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
-   outres = dstgrds(jj)+"x"+dstgrds(jj)
-   outgrid = dstgrds(jj)
-
-   ; regrid a field to obtain the output xy dimensions
-    wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
-          tt = ESMF_regrid_with_weights(sinrot,wgtsfile,False)
-        tt!0 = "lat"
-        tt!1 = "lon"
-         lat = tt&lat
-         lon = tt&lon
-        dims = dimsizes(tt)
-        nlat = dims(0)
-        nlon = dims(1)
-
-     print("fields will be remapped to destination grid size "\
-           +nlon+"  "+nlat)
-  
-     delete(tt)
-     delete(dims)
-
-    ; regrid the masks to obtain the interpolation masks. 
-    ; the mask2d,mask3d contain 1's on land and 0's at valid points.
-    ; when remapped, any mask value > 0 identifies land values that 
-    ; have crept into the field. remapped model fields are then
-    ; masked with this interpolation mask
-
-    wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
-    rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
-    rgmask3d = ESMF_regrid_with_weights(mask3d, wgtsfile,False)
-
-    if(output_masks)then
-     testfile = "masks_"+dstgrds(jj)+".nc"
-     system("/bin/rm -f "+testfile)
-     ; create
-     testcdf = addfile(testfile,"c")
-     testcdf->rgmask2d = rgmask2d
-     testcdf->rgmask3d = rgmask3d
-     ; close
-     delete(testcdf)
-    end if
-
-    ; create the interpolation mask
-    rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
-    rgmask3d = where(rgmask3d .gt. 0.0, rgmask3d@_FillValue, 1.0)
-
-    ; conformed depth array 
-    depth = conform_dims(dimsizes(mask3d), z_l, (/1/))
-    ;print(dimsizes(depth))
-
-;----------------------------------------------------------------------
-; loop over each file in the ocnfilelist
-;----------------------------------------------------------------------
-;<BL.  removing file loop
-;   do ii = 0,2
-;do ii = 0,nfiles-1
-;     infile = ocnfilelist(ii)
-;    print("working on "+infile)
-; create the output file name
-;    outfile = infile
-; find the index where the actual filename begins
-;  indstr = str_index_of_substr(outfile, "/", -1)
-; insert a sub-directory name
-;    outfile = str_insert(outfile,outgrid+"/",indstr+1)
-; insert a string indicating regridding resolution
-;    outfile = str_insert(outfile,outres,-4)
-; find the index where the actual filename begins
-;     indstr = str_index_of_substr(outfile, "ocn", -1)
-; insert an 'r' after the leading occurence of 'ocn' 
-; to indicate regular grid
-;    outfile = str_insert(outfile,"r",indstr+3)
-; check if the outgrid directory exists and create
-; it if it does not
-;   ret = systemfunc("test -d "+dirsrc+"gfs."+idate+"/00/"+outgrid+"; echo $?")
-;    if(ret .eq. 1)then
-;     system("mkdir "+dirsrc+"gfs."+idate+"/00/"+outgrid)
-;    end if
-; BL.>
-
-    ; retrieve the time stamp
-       time = ocnf[0]->time
-    delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-;    system("/bin/rm -f " + outfile)    ; remove if exists
-;    outcdf  = addfile (outfile, "c")  ; open output file
-; specify output file information and open file for output
-  FILENAME_REGRID = DATA_TMP+"/ocnr"+VDATE+"."+ENSMEM+"."+IDATE+"_"+outres+"_MOM6.nc"
-  if (isfilepresent(FILENAME_REGRID)) then
-    system("rm -f "+FILENAME_REGRID)
-  end if
-  outcdf = addfile(FILENAME_REGRID,"c")
-  outfile=FILENAME_REGRID
-
-    ; explicitly declare file definition mode. Improve efficiency.
-    setfileoption(outcdf,"DefineMode",True)
-
-    ; create global attributes of the file
-    fAtt               = True            ; assign file attributes
-    fAtt@creation_date = systemfunc ("date")
-    fAtt@source_file   = infile        
-    fileattdef( outcdf, fAtt )           ; copy file attributes    
-
-    ; predefine the coordinate variables and their dimensionality
-   ; dimNames = (/"time", "z_l",   "z_i",  "z_T",  "lat", "lon"/)  
-    dimNames = (/"time", "z_l",   "z_i",   "lat", "lon"/)  
-    ;dimSizes = (/ -1   , nlevs, nlevs+1,    nTd,   nlat,  nlon/) 
-    dimSizes = (/ -1   , nlevs, nlevs+1,    nlat,  nlon/) 
-    ;dimUnlim = (/ True , False,   False,  False,  False, False/)   
-    dimUnlim = (/ True , False,   False,  False, False/)   
-    filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
-    ; predefine the the dimensionality of the variables to be written out
-    filevardef(outcdf,  "time", typeof(time), getvardims(time)) 
-    filevardef(outcdf,   "z_l",  typeof(z_l),  getvardims(z_l))                           
-    filevardef(outcdf,   "z_i",  typeof(z_i),  getvardims(z_i))
-    ;filevardef(outcdf,   "z_T",  typeof(z_T),  getvardims(z_T))
-    filevardef(outcdf,   "lat",  typeof(lat),  getvardims(lat))                          
-    filevardef(outcdf,   "lon",  typeof(lon),  getvardims(lon))                          
-
-    ; Copy attributes associated with each variable to the file
-    filevarattdef(outcdf, "time", time)                 
-    filevarattdef(outcdf,  "z_l",  z_l)               
-    filevarattdef(outcdf,  "z_i",  z_i)               
-    ;filevarattdef(outcdf,  "z_T",  z_T)               
-    filevarattdef(outcdf,  "lat",  lat)             
-    filevarattdef(outcdf,  "lon",  lon)            
-
-    ; predefine variables
-    do nv = 0,nvars-1
-     varname = varlist(nv,0)
-     vardims = varlist(nv,3)
-     if(valid(nv))then
-      if(vardims .eq. "2")then
-       odims = (/"time", "lat", "lon"/)
-      else
-       odims = (/"time", "z_l", "lat", "lon"/)
-      end if
-      ;print("creating variable "+varname+" in file")
-      filevardef(outcdf, varname, "float", odims)
-      delete(odims)
-     end if
-    end do
-
-    do nv = 0,nvpairs-1
-     do nn = 0,1
-     vecname = veclist(nv,0,nn)
-     vecdims = veclist(nv,3,nn)
-     if(vecdims .eq. "2")then
-      odims = (/"time", "lat", "lon"/)
-     else
-      odims = (/"time", "z_l", "lat", "lon"/)
-     end if
-     ;print("creating variable "+vecname+" in file")
-     filevardef(outcdf, vecname, "float", odims) 
-     delete(odims)
-     delete(vecdims)
-     end do
-    end do
- 
-    ; explicitly exit file definition mode.
-    setfileoption(outcdf,"DefineMode",False)
-
-    ; write the dimensions to the file
-    outcdf->time   = (/time/)     
-    outcdf->z_l    = (/z_l/)     
-    outcdf->z_i    = (/z_i/)     
-;    outcdf->z_T    = (/z_T/)     
-; <BL
-    lat=lat(::-1)
-; BL>
-    outcdf->lat    = (/lat/)
-    outcdf->lon    = (/lon/) 
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
-    do nv = 0,nvars-1
-     varname = varlist(nv,0)
-     vargrid = varlist(nv,1)
-     varmeth = varlist(nv,2)
-     vardims = varlist(nv,3)
-  
-     if(valid(nv))then
-      ;print(nv+"   "+varname+"  "+vargrid+"  "+varmeth)
-      ocnvar = ocnf[ii]->$varname$
-     ndims = dimsizes(dimsizes(ocnvar))
-     ;print(ndims+"   "+dimsizes(ocnvar))
-
-     if(vargrid .ne. "Ct")then
-      ; print error if the variable is not on the Ct grid
-      print("Variable is not on Ct grid")
-      exit
-     end if
-
-     ; regrid to dsttype+dstgrd with method
-     ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
-     wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
-     rgtt = ESMF_regrid_with_weights(ocnvar,wgtsfile,False)
-    if(vardims .eq. "2")then
-     rgtt = where(ismissing(rgmask2d),ocnvar@_FillValue,rgtt)
-     rgtt=rgtt(:,::-1,:)
-    else
-     rgtt = where(ismissing(rgmask3d),ocnvar@_FillValue,rgtt)
-     rgtt=rgtt(:,:,::-1,:)
-    end if
-
-     ; enter file definition mode to add variable attributes
-     setfileoption(outcdf,"DefineMode",True)
-     filevarattdef(outcdf, varname, rgtt)                 
-     setfileoption(outcdf,"DefineMode",False)
-
-     outcdf->$varname$   = (/rgtt/)
-
-     delete(ocnvar)
-     delete(rgtt)
- 
-     ; variable exists    
-     end if
-    ; nv, loop over number of variables
-    end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-   
-   ;nv = 2
-   do nv = 0,nvpairs-1
-     vecnames = veclist(nv,0,:)
-     vecgrids = veclist(nv,1,:)
-     vecmeth  = veclist(nv,2,:)
-     vecdims  = veclist(nv,3,:)
-     ;print(nv+"   "+vecnames+"  "+vecgrids+"  "+vecmeth)
-
-     ; create a vector pair list
-     vecpairs = NewList("fifo")
-            n = 0
-         uvel = ocnf[ii]->$vecnames(n)$
-       vecfld = where(ismissing(uvel),0.0,uvel)
-        copy_VarAtts(uvel,vecfld)
-     ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
-     wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
-           ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-     delete(ut@remap)
-
-            n = 1
-         vvel = ocnf[ii]->$vecnames(n)$
-       vecfld = where(ismissing(vvel),0.0,vvel)
-        copy_VarAtts(vvel,vecfld)
-     ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
-     wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
-           vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-     delete(vt@remap)
-
-     ListAppend(vecpairs,ut)
-     ListAppend(vecpairs,vt)
-     ;print(vecpairs)
-
-     ; rotate
-     ; first copy Metadata
-     urot = vecpairs[0]
-     vrot = vecpairs[1]
-     if(vecdims(0) .eq. "2")then
-       urot =   ut*cosrot2d +   vt*sinrot2d
-       vrot =   vt*cosrot2d -   ut*sinrot2d
-     else
-       urot =   ut*cosrot3d +   vt*sinrot3d
-       vrot =   vt*cosrot3d -   ut*sinrot3d
-     end if
-     ; change attribute to indicate these are now rotated velocities
-     urot@long_name=str_sub_str(urot@long_name,"X","Zonal")
-     vrot@long_name=str_sub_str(vrot@long_name,"Y","Meridional")
-     ; copy back
-     vecpairs[0] = urot
-     vecpairs[1] = vrot
-     delete([/urot, vrot/])
-
-     ; remap
-     do n = 0,1
-      vecfld = vecpairs[n]
-      ; regrid to dsttype+dstgrd with method
-      ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
-      wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
-       rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
-      if(vecdims(n) .eq. "2")then
-       rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
-     rgtt=rgtt(:,::-1,:)
-      else
-       rgtt = where(ismissing(rgmask3d),vecfld@_FillValue,rgtt)
-     rgtt=rgtt(:,:,::-1,:)
-      end if
-
-      ; enter file definition mode to add variable attributes
-      setfileoption(outcdf,"DefineMode",True)
-      filevarattdef(outcdf, vecnames(n), rgtt)                 
-      setfileoption(outcdf,"DefineMode",False)
-    
-      outcdf->$vecnames(n)$   = (/rgtt/)
-      delete(rgtt)
-     end do
-      delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
-      delete([/vecnames,vecgrids,vecmeth,vecdims/])
-    ; nv, loop over number of vector pairs
-    end do
-  
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-   
-   delete(outcdf)
-   ; rename mld to ePBL if required
-  do nv = 0,nvars-1
-   varname = varlist(nv,0)
-  ;  if(varname .eq. "mld" .and. valid(nv))then
-    if(varname .eq. "MLD_003" .and. valid(nv))then
-     print("Renaming MLD_003 to mld")
-     ;print(ncocmd+"  "+outfile)
-     system(ncocmd+"  "+outfile)
-    end if
-   end do
-
-   ; ii, loop over files
-;<BL.
-   ;end do
-;BL.>
-   ;jj, loop over destination grids
-   delete([/lat,lon,nlon,nlat/])
-   delete([/rgmask2d,rgmask3d/])
-  end do
-  print("One complete ocn file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh
index 57ff87be5f..0c623bf03c 100755
--- a/ush/ozn_xtrct.sh
+++ b/ush/ozn_xtrct.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #------------------------------------------------------------------
 #  ozn_xtrct.sh
@@ -132,12 +132,12 @@ else
    #--------------------------------------------------------------------
    #   Copy extraction programs to working directory
    #
-   ${NCP} "${HOMEgfs}/exec/oznmon_time.x" ./oznmon_time.x
+   ${NCP} "${EXECgfs}/oznmon_time.x" ./oznmon_time.x
    if [[ ! -e oznmon_time.x ]]; then
       iret=2
       exit ${iret}
    fi
-   ${NCP} "${HOMEgfs}/exec/oznmon_horiz.x" ./oznmon_horiz.x
+   ${NCP} "${EXECgfs}/oznmon_horiz.x" ./oznmon_horiz.x
    if [[ ! -e oznmon_horiz.x ]]; then
       iret=3
       exit ${iret}
diff --git a/ush/parsing_model_configure_DATM.sh b/ush/parsing_model_configure_DATM.sh
deleted file mode 100755
index ecd3fa6dd6..0000000000
--- a/ush/parsing_model_configure_DATM.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#! /usr/bin/env bash
-
-#####
-## "parsing_model_configure_DATM.sh"
-## This script writes model configure file
-## for DATM model
-##
-## This is the child script of ex-global forecast,
-## writing model configure file for DATM
-## This script is a direct execution.
-#####
-
-DATM_model_configure(){
-
-rm -f model_configure
-cat > model_configure <<EOF
-print_esmf:                ${print_esmf:-.true.}
-start_year:                $SYEAR
-start_month:               $SMONTH
-start_day:                 $SDAY
-start_hour:                $SHOUR
-start_minute:              0
-start_second:              0
-nhours_fcst:               $FHMAX
-RUN_CONTINUE:              ${RUN_CONTINUE:-".false."}
-ENS_SPS:                   ${ENS_SPS:-".false."}
-
-dt_atmos:                  ${DT_ATMOS}
-atm_coupling_interval_sec: ${coupling_interval_fast_sec}
-
-iatm:                      ${IATM}
-jatm:                      ${JATM}
-cdate0:                    ${CDATE}
-nfhout:                    ${NFHOUT}
-filename_base:             ${DATM_FILENAME_BASE}
-EOF
-echo "$(cat model_configure)"
-}
diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh
index e08a4a2b17..7d64ab38f9 100755
--- a/ush/parsing_model_configure_FV3.sh
+++ b/ush/parsing_model_configure_FV3.sh
@@ -1,67 +1,71 @@
 #! /usr/bin/env bash
 
-#####
-## "parsing_model_configure_FV3.sh"
-## This script writes model configure file
-## for FV3 model
-##
-## This is the child script of ex-global forecast,
-## writing model configure file for FV3
-## This script is a direct execution.
-#####
+# parsing model_configure for UFSWM FV3
 
+# shellcheck disable=SC2034
 FV3_model_configure(){
 
-local restile=$(echo "${CASE}" |cut -c2-)
-local ichunk2d=$((4*restile))
-local jchunk2d=$((2*restile))
-local ichunk3d=$((4*restile))
-local jchunk3d=$((2*restile))
-local kchunk3d=1
+local restile=${CASE:1}
 
-rm -f model_configure
-cat >> model_configure <<EOF
-start_year:              ${tPDY:0:4}
-start_month:             ${tPDY:4:2}
-start_day:               ${tPDY:6:2}
-start_hour:              ${tcyc}
-start_minute:            0
-start_second:            0
-nhours_fcst:             ${FHMAX}
-fhrot:                   ${IAU_FHROT:-0}
+# Prepare local variables for use in model_configure.IN from UFSWM
+# The ones already defined are left commented as a reminder
 
-dt_atmos:                ${DELTIM}
-calendar:                ${calendar:-'julian'}
-restart_interval:        ${restart_interval} -1
-output_1st_tstep_rst:    .false.
+local model_start_date
+if [[ "${DOIAU}" == "YES" ]]; then
+  model_start_date="${previous_cycle}"
+else
+  model_start_date="${current_cycle}"
+fi
 
-quilting:                ${QUILTING}
-quilting_restart:        .true.
-write_groups:            ${WRITE_GROUP:-1}
-write_tasks_per_group:   ${WRTTASK_PER_GROUP:-24}
-itasks:                  1
-output_history:          ${OUTPUT_HISTORY:-".true."}
-history_file_on_native_grid: .false.
-write_dopost:            ${WRITE_DOPOST:-".false."}
-write_nsflip:            ${WRITE_NSFLIP:-".false."}
-num_files:               ${NUM_FILES:-2}
-filename_base:           'atm' 'sfc'
-output_grid:             ${OUTPUT_GRID}
-output_file:             '${OUTPUT_FILETYPE_ATM}' '${OUTPUT_FILETYPE_SFC}'
-zstandard_level:         0
-ichunk2d:                ${ichunk2d:-0}
-jchunk2d:                ${jchunk2d:-0}
-ichunk3d:                ${ichunk3d:-0}
-jchunk3d:                ${jchunk3d:-0}
-kchunk3d:                ${kchunk3d:-0}
-ideflate:                ${ideflate:-1}
-quantize_mode:           'quantize_bitround'
-quantize_nsd:            ${QUANTIZE_NSD:-0}
-imo:                     ${LONB_IMO}
-jmo:                     ${LATB_JMO}
-output_fh:               ${FV3_OUTPUT_FH}
-iau_offset:              ${IAU_OFFSET:-0}
-EOF
+local SYEAR=${model_start_date:0:4}
+local SMONTH=${model_start_date:4:2}
+local SDAY=${model_start_date:6:2}
+local SHOUR=${model_start_date:8:2}
+# FHMAX
+local FHROT=${IAU_FHROT:-0}
+local DT_ATMOS=${DELTIM}
+local RESTART_INTERVAL="${FV3_RESTART_FH[*]}"
+# QUILTING
+local QUILTING_RESTART=".true."
+local WRITE_GROUP=${WRITE_GROUP:-1}
+local WRTTASK_PER_GROUP=${WRTTASK_PER_GROUP:-24}
+local ITASKS=1
+local OUTPUT_HISTORY=${OUTPUT_HISTORY:-".true."}
+local HISTORY_FILE_ON_NATIVE_GRID=".false."
+local WRITE_DOPOST=${WRITE_DOPOST:-".false."}
+local WRITE_NSFLIP=${WRITE_NSFLIP:-".false."}
+local NUM_FILES=${NUM_FILES:-2}
+local FILENAME_BASE="'atm' 'sfc'"
+# OUTPUT_GRID
+local OUTPUT_FILE="'${OUTPUT_FILETYPE_ATM}' '${OUTPUT_FILETYPE_SFC}'"
+local ZSTANDARD_LEVEL=0
+local IDEFLATE=1  # netCDF zlib lossless compression (0-9); 0: no compression
+local QUANTIZE_NSD=${QUANTIZE_NSD:-0}  # netCDF compression
+local ICHUNK2D=$((4*restile))
+local JCHUNK2D=$((2*restile))
+local ICHUNK3D=$((4*restile))
+local JCHUNK3D=$((2*restile))
+local KCHUNK3D=1
+local IMO=${LONB_IMO}
+local JMO=${LATB_JMO}
+local OUTPUT_FH=${FV3_OUTPUT_FH}
+local IAU_OFFSET=${IAU_OFFSET:-0}
+
+# Ensure the template exists
+if [[ "${DO_NEST:-NO}" == "YES" ]] ; then
+  local NEST_IMO=${npx_nest}
+  local NEST_JMO=${npy_nest}
+  template="${PARMgfs}/ufs/model_configure_nest.IN"
+else
+  template="${PARMgfs}/ufs/model_configure.IN"
+fi
+if [[ ! -f ${template} ]]; then
+  echo "FATAL ERROR: template '${template}' does not exist, ABORT!"
+  exit 1
+fi
+rm -f "${DATA}/model_configure"
+atparse < "${template}" >> "${DATA}/model_configure"
+echo "Rendered model_configure"
+cat "${DATA}/model_configure"
 
-echo "$(cat model_configure)"
 }
diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh
index 6ef743ebc9..3822094c97 100755
--- a/ush/parsing_namelists_CICE.sh
+++ b/ush/parsing_namelists_CICE.sh
@@ -2,6 +2,8 @@
 
 # parsing namelist of CICE
 
+# Disable variable not used warnings
+# shellcheck disable=SC2034
 CICE_namelists(){
 
 # "warm_start" here refers to whether CICE model is warm starting or not.
@@ -37,374 +39,108 @@ if (( $(( NY_GLB % NPY )) == 0 )); then
 else
   local block_size_y=$(( (NY_GLB / NPY) + 1 ))
 fi
-local max_blocks=-1
 
 local sec stepsperhr npt
 sec=$(to_seconds "${current_cycle:8:2}0000")
 stepsperhr=$((3600/ICETIM))
 npt=$((FHMAX*stepsperhr)) # Need this in order for dump_last to work
 
-cat > ice_in <<eof
-&setup_nml
-   days_per_year  = 365
-   use_leap_years = .true.
-   year_init      = ${current_cycle:0:4}
-   month_init     = ${current_cycle:4:2}
-   day_init       = ${current_cycle:6:2}
-   sec_init       = ${sec}
-   dt             = ${ICETIM}
-   npt            = ${npt}
-   ndtd           = 1
-   runtype        = '${runtype}'
-   runid          = 'unknown'
-   ice_ic         = 'cice_model.res.nc'
-   restart        = .true.
-   restart_ext    = .false.
-   use_restart_time = ${use_restart_time}
-   restart_format = 'nc'
-   lcdf64         = .false.
-   numin          = 21
-   numax          = 89
-   restart_dir    = './CICE_RESTART/'
-   restart_file   = 'cice_model.res'
-   pointer_file   = './ice.restart_file'
-   dumpfreq       = '${dumpfreq}'
-   dumpfreq_n     =  ${dumpfreq_n}
-   dump_last      = .false.
-   bfbflag        = 'off'
-   diagfreq       = 6
-   diag_type      = 'file'
-   diag_file      = 'ice_diag.d'
-   print_global   = .true.
-   print_points   = .true.
-   latpnt(1)      =  90.
-   lonpnt(1)      =   0.
-   latpnt(2)      = -65.
-   lonpnt(2)      = -45.
-   histfreq       = 'm','d','h','x','x'
-   histfreq_n     =  0 , 0 , ${FHOUT} , 1 , 1
-   hist_avg       = ${cice_hist_avg}
-   history_dir    = './CICE_OUTPUT/'
-   history_file   = 'iceh'
-   write_ic       = .true.
-   incond_dir     = './CICE_OUTPUT/'
-   incond_file    = 'iceh_ic'
-   version_name   = 'CICE_6.0.2'
-/
-
-&grid_nml
-   grid_format  = 'nc'
-   grid_type    = 'tripole'
-   grid_file    = '${ice_grid_file}'
-   kmt_file     = '${ice_kmt_file}'
-   kcatbound    = 0
-   ncat         = 5
-   nfsd         = 1
-   nilyr        = 7
-   nslyr        = 1
-   nblyr        = 1
-   nfsd         = 1
-/
-
-&tracer_nml
-   tr_iage      = .true.
-   restart_age  = .false.
-   tr_FY        = .false.
-   restart_FY   = .false.
-   tr_lvl       = .true.
-   restart_lvl  = .false.
-   tr_pond_topo = .false.
-   restart_pond_topo = .false.
-   tr_pond_lvl  = ${tr_pond_lvl}
-   restart_pond_lvl  = ${restart_pond_lvl}
-   tr_aero      = .false.
-   restart_aero = .false.
-   tr_fsd       = .false.
-   restart_fsd  = .false.
-/
-
-&thermo_nml
-   kitd              = 1
-   ktherm            = ${ktherm}
-   conduct           = 'MU71'
-   a_rapid_mode      =  0.5e-3
-   Rac_rapid_mode    =    10.0
-   aspect_rapid_mode =     1.0
-   dSdt_slow_mode    = -5.0e-8
-   phi_c_slow_mode   =    0.05
-   phi_i_mushy       =    0.85
-/
-
-&dynamics_nml
-   kdyn            = 1
-   ndte            = 120
-   revised_evp     = .false.
-   evp_algorithm   = 'standard_2d'
-   brlx            = 300.0
-   arlx            = 300.0
-   ssh_stress      = 'coupled'
-   advection       = 'remap'
-   kstrength       = 1
-   krdg_partic     = 1
-   krdg_redist     = 1
-   mu_rdg          = 3
-   Cf              = 17.
-   Ktens           = 0.
-   e_yieldcurve    = 2.
-   e_plasticpot    = 2.
-   coriolis        = 'latitude'
-   kridge          = 1
-   ktransport      = 1
-/
-
-&shortwave_nml
-   shortwave       = 'dEdd'
-   albedo_type     = 'default'
-   albicev         = 0.78
-   albicei         = 0.36
-   albsnowv        = 0.98
-   albsnowi        = 0.70
-   ahmax           = 0.3
-   R_ice           = 0.
-   R_pnd           = 0.
-   R_snw           = 1.5
-   dT_mlt          = 1.5
-   rsnw_mlt        = 1500.
-   kalg            = 0.0
-   sw_redist       = .true.
-/
-
-&ponds_nml
-   hp1             = 0.01
-   hs0             = 0.
-   hs1             = 0.03
-   dpscale         = 1.e-3
-   frzpnd          = 'hlid'
-   rfracmin        = 0.15
-   rfracmax        = 1.
-   pndaspect       = 0.8
-/
-
-&snow_nml
-   snwredist       = 'none'
-/
-
-&forcing_nml
-   formdrag        = .false.
-   atmbndy         = 'default'
-   calc_strair     = .true.
-   calc_Tsfc       = .true.
-   highfreq        = .false.
-   natmiter        = 5
-   ustar_min       = 0.0005
-   emissivity      = 0.95
-   fbot_xfer_type  = 'constant'
-   update_ocn_f    = ${FRAZIL_FWSALT}
-   l_mpond_fresh   = .false.
-   tfrz_option     = ${tfrz_option}
-   restart_coszen  = .true.
-/
-
-&domain_nml
-   nprocs            = ${ntasks_cice6}
-   nx_global         = ${NX_GLB}
-   ny_global         = ${NY_GLB}
-   block_size_x      = ${block_size_x}
-   block_size_y      = ${block_size_y}
-   max_blocks        = ${max_blocks}
-   processor_shape   = '${processor_shape}'
-   distribution_type = 'cartesian'
-   distribution_wght = 'latitude'
-   ew_boundary_type  = 'cyclic'
-   ns_boundary_type  = 'tripole'
-   maskhalo_dyn      = .false.
-   maskhalo_remap    = .false.
-   maskhalo_bound    = .false.
-/
-
-&zbgc_nml
-/
-
-&icefields_nml
-   f_tmask         = .true.
-   f_blkmask       = .true.
-   f_tarea         = .true.
-   f_uarea         = .true.
-   f_dxt           = .false.
-   f_dyt           = .false.
-   f_dxu           = .false.
-   f_dyu           = .false.
-   f_HTN           = .false.
-   f_HTE           = .false.
-   f_ANGLE         = .true.
-   f_ANGLET        = .true.
-   f_NCAT          = .true.
-   f_VGRDi         = .false.
-   f_VGRDs         = .false.
-   f_VGRDb         = .false.
-   f_VGRDa         = .true.
-   f_bounds        = .false.
-   f_aice          = 'mdhxx'
-   f_hi            = 'mdhxx'
-   f_hs            = 'mdhxx'
-   f_Tsfc          = 'mdhxx'
-   f_sice          = 'mdhxx'
-   f_uvel          = 'mdhxx'
-   f_vvel          = 'mdhxx'
-   f_uatm          = 'mdhxx'
-   f_vatm          = 'mdhxx'
-   f_fswdn         = 'mdhxx'
-   f_flwdn         = 'mdhxx'
-   f_snowfrac      = 'x'
-   f_snow          = 'mdhxx'
-   f_snow_ai       = 'x'
-   f_rain          = 'mdhxx'
-   f_rain_ai       = 'x'
-   f_sst           = 'mdhxx'
-   f_sss           = 'mdhxx'
-   f_uocn          = 'mdhxx'
-   f_vocn          = 'mdhxx'
-   f_frzmlt        = 'mdhxx'
-   f_fswfac        = 'mdhxx'
-   f_fswint_ai     = 'x'
-   f_fswabs        = 'mdhxx'
-   f_fswabs_ai     = 'x'
-   f_albsni        = 'mdhxx'
-   f_alvdr         = 'mdhxx'
-   f_alidr         = 'mdhxx'
-   f_alvdf         = 'mdhxx'
-   f_alidf         = 'mdhxx'
-   f_alvdr_ai      = 'x'
-   f_alidr_ai      = 'x'
-   f_alvdf_ai      = 'x'
-   f_alidf_ai      = 'x'
-   f_albice        = 'x'
-   f_albsno        = 'x'
-   f_albpnd        = 'x'
-   f_coszen        = 'x'
-   f_flat          = 'mdhxx'
-   f_flat_ai       = 'x'
-   f_fsens         = 'mdhxx'
-   f_fsens_ai      = 'x'
-   f_fswup         = 'x'
-   f_flwup         = 'mdhxx'
-   f_flwup_ai      = 'x'
-   f_evap          = 'mdhxx'
-   f_evap_ai       = 'x'
-   f_Tair          = 'mdhxx'
-   f_Tref          = 'mdhxx'
-   f_Qref          = 'mdhxx'
-   f_congel        = 'mdhxx'
-   f_frazil        = 'mdhxx'
-   f_snoice        = 'mdhxx'
-   f_dsnow         = 'mdhxx'
-   f_melts         = 'mdhxx'
-   f_meltt         = 'mdhxx'
-   f_meltb         = 'mdhxx'
-   f_meltl         = 'mdhxx'
-   f_fresh         = 'mdhxx'
-   f_fresh_ai      = 'x'
-   f_fsalt         = 'mdhxx'
-   f_fsalt_ai      = 'x'
-   f_fbot          = 'mdhxx'
-   f_fhocn         = 'mdhxx'
-   f_fhocn_ai      = 'x'
-   f_fswthru       = 'x'
-   f_fswthru_ai    = 'x'
-   f_fsurf_ai      = 'x'
-   f_fcondtop_ai   = 'x'
-   f_fmeltt_ai     = 'x'
-   f_strairx       = 'mdhxx'
-   f_strairy       = 'mdhxx'
-   f_strtltx       = 'x'
-   f_strtlty       = 'x'
-   f_strcorx       = 'x'
-   f_strcory       = 'x'
-   f_strocnx       = 'mdhxx'
-   f_strocny       = 'mdhxx'
-   f_strintx       = 'x'
-   f_strinty       = 'x'
-   f_taubx         = 'x'
-   f_tauby         = 'x'
-   f_strength      = 'x'
-   f_divu          = 'mdhxx'
-   f_shear         = 'mdhxx'
-   f_sig1          = 'x'
-   f_sig2          = 'x'
-   f_sigP          = 'x'
-   f_dvidtt        = 'mdhxx'
-   f_dvidtd        = 'mdhxx'
-   f_daidtt        = 'mdhxx'
-   f_daidtd        = 'mdhxx'
-   f_dagedtt       = 'x'
-   f_dagedtd       = 'x'
-   f_mlt_onset     = 'mdhxx'
-   f_frz_onset     = 'mdhxx'
-   f_hisnap        = 'x'
-   f_aisnap        = 'x'
-   f_trsig         = 'x'
-   f_icepresent    = 'x'
-   f_iage          = 'x'
-   f_FY            = 'x'
-   f_aicen         = 'x'
-   f_vicen         = 'x'
-   f_vsnon         = 'x'
-   f_snowfracn     = 'x'
-   f_keffn_top     = 'x'
-   f_Tinz          = 'x'
-   f_Sinz          = 'x'
-   f_Tsnz          = 'x'
-   f_fsurfn_ai     = 'x'
-   f_fcondtopn_ai  = 'x'
-   f_fmelttn_ai    = 'x'
-   f_flatn_ai      = 'x'
-   f_fsensn_ai     = 'x'
-/
-
-&icefields_mechred_nml
-   f_alvl         = 'x'
-   f_vlvl         = 'x'
-   f_ardg         = 'x'
-   f_vrdg         = 'x'
-   f_dardg1dt     = 'x'
-   f_dardg2dt     = 'x'
-   f_dvirdgdt     = 'x'
-   f_opening      = 'x'
-   f_ardgn        = 'x'
-   f_vrdgn        = 'x'
-   f_dardg1ndt    = 'x'
-   f_dardg2ndt    = 'x'
-   f_dvirdgndt    = 'x'
-   f_krdgn        = 'x'
-   f_aparticn     = 'x'
-   f_aredistn     = 'x'
-   f_vredistn     = 'x'
-   f_araftn       = 'x'
-   f_vraftn       = 'x'
-/
-
-&icefields_pond_nml
-   f_apondn       = 'x'
-   f_apeffn       = 'x'
-   f_hpondn       = 'x'
-   f_apond        = 'mdhxx'
-   f_hpond        = 'mdhxx'
-   f_ipond        = 'mdhxx'
-   f_apeff        = 'mdhxx'
-   f_apond_ai     = 'x'
-   f_hpond_ai     = 'x'
-   f_ipond_ai     = 'x'
-   f_apeff_ai     = 'x'
-/
-
-&icefields_drag_nml
-   f_drag         = 'x'
-   f_Cdn_atm      = 'x'
-   f_Cdn_ocn      = 'x'
-/
-
-&icefields_bgc_nml
-/
-eof
+# Prepare local variables for use in ice_in_template from UFSWM
+# The ones already defined are left commented as a reminder
+# setup_nml section
+local SYEAR=${current_cycle:0:4}
+local SMONTH=${current_cycle:4:2}
+local SDAY=${current_cycle:6:2}
+local SECS=${sec}
+local DT_CICE=${ICETIM}
+local CICE_NPT=${npt}
+local CICE_RUNTYPE=${runtype}
+local CICE_RUNID="unknown"
+local CICE_USE_RESTART_TIME=${use_restart_time}
+local CICE_RESTART_DIR="./CICE_RESTART/"
+local CICE_RESTART_FILE="cice_model.res"
+local CICE_ICE_IC='cice_model.res.nc'
+local CICE_RESTART_DEFLATE=0
+local CICE_RESTART_CHUNK=0,0
+local CICE_RESTART_STRIDE=-99
+local CICE_RESTART_ROOT=-99
+local CICE_RESTART_REARR="box"
+local CICE_RESTART_IOTASKS=-99
+local CICE_RESTART_FORMAT="pnetcdf2"
+local CICE_DUMPFREQ="y"  # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years"
+local CICE_DUMPFREQ_N=10000  # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure
+local CICE_DIAGFREQ=$(( 86400 / DT_CICE ))  # frequency of diagnostic output in timesteps, recommended for 1x per day
+local CICE_HISTFREQ_N="0, 0, ${FHOUT_ICE}, 1, 1"
+if [[ "${RUN}" =~ "gdas" ]]; then
+  local CICE_HIST_AVG=".false., .false., .false., .false., .false."   # DA needs instantaneous
+else
+  local CICE_HIST_AVG=".true., .true., .true., .true., .true."    # GFS long forecaset wants averaged over CICE_HISTFREQ_N
+fi
+local CICE_HISTORY_FORMAT="pnetcdf2"
+local CICE_HISTORY_DIR="./CICE_OUTPUT/"
+local CICE_INCOND_DIR="./CICE_OUTPUT/"
+local CICE_HISTORY_IOTASKS=-99
+local CICE_HISTORY_REARR="box"
+local CICE_HISTORY_ROOT=-99
+local CICE_HISTORY_STRIDE=-99
+local CICE_HISTORY_CHUNK=0,0
+local CICE_HISTORY_DEFLATE=0
+local CICE_HISTORY_PREC=4
+# grid_nml section
+# CICE_GRID
+# CICE_MASK
+local CICE_GRIDATM="A"  # A-grid for atmosphere (FV3)
+local CICE_GRIDOCN="A"  # A-grid for ocean (MOM6)
+local CICE_GRIDICE="B"  # B-grid for seaice (CICE6)
+# tracer_nml section
+local CICE_TR_POND_LVL=".true."  # Use level melt ponds
+# (if CICE_TR_POND_LVL=true):
+  #   -- if true, initialize the level ponds from restart (if runtype=continue)
+  #   -- if false, re-initialize level ponds to zero (if runtype=initial or continue)
+local CICE_RESTART_POND_LVL=".false."  # Restart level ponds from restart file (if runtype=continue)
+# thermo_nml section
+local CICE_KTHERM=2  # 0=zero-layer thermodynamics, 1=fixed-salinity profile, 2=mushy thermodynamics
+# dynamics_nml section
+# NONE
+# shortwave_nml section
+# NONE
+# ponds_nml section
+# NONE
+# snow_nml section
+# NONE
+# forcing_nml section
+local CICE_FRAZIL_FWSALT=${FRAZIL_FWSALT:-".true."}
+local CICE_TFREEZE_OPTION=${tfrz_option:-"mushy"}
+# domain_nml section
+local CICE_NPROC=${ntasks_cice6}
+# NX_GLB
+# NY_GLB
+local CICE_BLCKX=${block_size_x}
+local CICE_BLCKY=${block_size_y}
+local CICE_DECOMP=${processor_shape}
+# ice_prescribed_nml section 
+local CICE_PRESCRIBED="false"
+local MESH_DICE="none"
+local stream_files_dice="none"
+
+
+
+# Ensure the template exists
+local template=${CICE_TEMPLATE:-"${PARMgfs}/ufs/ice_in.IN"}
+if [[ ! -f "${template}" ]]; then
+  echo "FATAL ERROR: template '${template}' does not exist, ABORT!"
+  exit 1
+fi
+rm -f "${DATA}/ice_in"
+atparse < "${template}" >> "${DATA}/ice_in"
+echo "Rendered ice_in:"
+cat "${DATA}/ice_in"
+
+# Create a ice.restart_file when runtype is "continue"
+# This file is not needed when runtype is "initial"
+rm -f "${DATA}/ice.restart_file"
+if [[ "${runtype}" == "continue" ]]; then
+  echo "${DATA}/cice_model.res.nc" > "${DATA}/ice.restart_file"
+fi
 
 }
diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh
index d21ceab82b..e479e7e965 100755
--- a/ush/parsing_namelists_FV3.sh
+++ b/ush/parsing_namelists_FV3.sh
@@ -1,21 +1,21 @@
 #! /usr/bin/env bash
 
-#####
-## "parsing_namelist_FV3.sh"
-## This script writes namelist for FV3 model
-##
-## This is the child script of ex-global forecast,
-## writing namelist for FV3
-## This script is a direct execution.
-#####
+# parsing namelist of FV3, diag_table, etc.
 
+# Disable variable not used warnings
+# shellcheck disable=SC2034
 FV3_namelists(){
 
 # setup the tables
-DIAG_TABLE=${DIAG_TABLE:-${HOMEgfs}/parm/ufs/fv3/diag_table}
-DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${HOMEgfs}/parm/ufs/fv3/diag_table_aod}
-DATA_TABLE=${DATA_TABLE:-${HOMEgfs}/parm/ufs/fv3/data_table}
-FIELD_TABLE=${FIELD_TABLE:-${HOMEgfs}/parm/ufs/fv3/field_table}
+DIAG_TABLE=${DIAG_TABLE:-${PARMgfs}/ufs/fv3/diag_table}
+DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMgfs}/ufs/fv3/diag_table_aod}
+DATA_TABLE=${DATA_TABLE:-${PARMgfs}/ufs/MOM6_data_table.IN}
+FIELD_TABLE=${FIELD_TABLE:-${PARMgfs}/ufs/fv3/field_table}
+
+# set cdmbgwd
+if (( gwd_opt == 2 )) && [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then
+  cdmbgwd=${cdmbgwd_gsl}
+fi
 
 # ensure non-prognostic tracers are set
 dnats=${dnats:-0}
@@ -33,7 +33,16 @@ if [[ -n "${AERO_DIAG_TABLE:-}" ]]; then
   cat "${AERO_DIAG_TABLE}"
 fi
 cat "${DIAG_TABLE_APPEND}"
-} >> diag_table
+} >> diag_table_template
+
+local template=diag_table_template
+local SYEAR=${current_cycle:0:4}
+local SMONTH=${current_cycle:4:2}
+local SDAY=${current_cycle:6:2}
+local CHOUR=${current_cycle:8:2}
+local MOM6_OUTPUT_DIR="./MOM6_OUTPUT"
+
+atparse < "${template}" >> "diag_table"
 
 
 # copy data table
@@ -109,7 +118,7 @@ cat >> input.nml << EOF
   range_warn = ${range_warn:-".true."}
   reset_eta = .false.
   n_sponge = ${n_sponge:-"10"}
-  nudge_qv = ${nudge_qv:-".true."}
+  nudge_qv = ${nudge_qv:-".false."}
   nudge_dz = ${nudge_dz:-".false."}
   tau = ${tau:-10.}
   rf_cutoff = ${rf_cutoff:-"7.5e2"}
@@ -349,6 +358,43 @@ EOF
   do_gsl_drag_ls_bl    = ${do_gsl_drag_ls_bl:-".true."}
   do_gsl_drag_ss       = ${do_gsl_drag_ss:-".true."}
   do_gsl_drag_tofd     = ${do_gsl_drag_tofd:-".true."}
+  do_gwd_opt_psl       = ${do_gwd_opt_psl:-".false."}
+  do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+  use_cice_alb = ${use_cice_alb:-".false."}
+EOF
+  ;;
+  FV3_global_nest*)
+  local default_dt_inner=$(( DELTIM/2 ))
+  cat >> input.nml << EOF
+  iovr         = ${iovr:-"3"}
+  lcnorm       = ${lcnorm:-".false."}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".true."}
+  ttendlim     = ${ttendlim:-"-999"}
+  dt_inner     = ${dt_inner:-"${default_dt_inner}"}
+  sedi_semi    = ${sedi_semi:-".true."}
+  decfl        = ${decfl:-"10"}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  lsoil_lsm    = ${lsoil_lsm:-"4"}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"}
+  do_ugwp      = ${do_ugwp:-".false."}
+  do_tofd      = ${do_tofd:-".false."}
+  gwd_opt      = ${gwd_opt:-"2"}
+  do_ugwp_v0   = ${do_ugwp_v0:-".false."}
+  do_ugwp_v1   = ${do_ugwp_v1:-".true."}
+  do_ugwp_v0_orog_only = ${do_ugwp_v0_orog_only:-".false."}
+  do_ugwp_v0_nst_only  = ${do_ugwp_v0_nst_only:-".false."}
+  do_gsl_drag_ls_bl    = ${do_gsl_drag_ls_bl:-".true."}
+  do_gsl_drag_ss       = ${do_gsl_drag_ss:-".true."}
+  do_gsl_drag_tofd     = ${do_gsl_drag_tofd:-".true."}
   do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."}
   alpha_fd     = ${alpha_fd:-"12.0"}
   min_lakeice  = ${min_lakeice:-"0.15"}
@@ -390,6 +436,7 @@ cat >> input.nml <<EOF
   random_clds  = ${random_clds:-".true."}
   trans_trac   = ${trans_trac:-".true."}
   cnvcld       = ${cnvcld:-".true."}
+  xr_cnvcld    = ${xr_cnvcld:-".true."}
   imfshalcnv   = ${imfshalcnv:-"2"}
   imfdeepcnv   = ${imfdeepcnv:-"2"}
   progsigma    = ${progsigma:-".true."}
@@ -398,6 +445,7 @@ cat >> input.nml <<EOF
   betadcu      = ${betadcu:-"2.0"}
   ras          = ${ras:-".false."}
   cdmbgwd      = ${cdmbgwd:-"3.5,0.25"}
+  psl_gwd_dx_factor  = ${psl_gwd_dx_factor:-"6.0"}
   prslrd0      = ${prslrd0:-"0."}
   ivegsrc      = ${ivegsrc:-"1"}
   isot         = ${isot:-"1"}
@@ -416,7 +464,7 @@ cat >> input.nml <<EOF
   iopt_tbot    = ${iopt_tbot:-"2"}
   iopt_stc     = ${iopt_stc:-"1"}
   iopt_trs     = ${iopt_trs:-"2"}
-  iopt_diag    = ${iopt_diag:-"1"}
+  iopt_diag    = ${iopt_diag:-"2"}
   debug        = ${gfs_phys_debug:-".false."}
   nstf_name    = ${nstf_name}
   nst_anl      = ${nst_anl}
@@ -458,6 +506,14 @@ cat >> input.nml <<EOF
   cplwav2atm   = ${cplwav2atm:-".false."}
 EOF
 
+if [[ ${DO_SPPT} = "YES" ]]; then
+cat >> input.nml <<EOF
+  pert_mp = .false.
+  pert_radtend = .false.
+  pert_clds = .true.
+EOF
+fi
+
 # Add namelist for IAU
 if [[ ${DOIAU} = "YES" ]]; then
   cat >> input.nml << EOF
@@ -486,7 +542,7 @@ if [[ ${DO_CA:-"NO"} = "YES" ]]; then
 EOF
 fi
 
-if [[ ${DO_LAND_PERT:-"NO"} = "YES" ]]; then
+if [[ "${DO_LAND_PERT:-NO}" == "YES" ]]; then
   cat >> input.nml << EOF
   lndp_type = ${lndp_type:-2}
   n_var_lndp = ${n_var_lndp:-0}
@@ -671,7 +727,7 @@ EOF
   skeb_tau = ${SKEB_TAU:-"-999."}
   skeb_lscale = ${SKEB_LSCALE:-"-999."}
   skebnorm = ${SKEBNORM:-"1"}
-  skeb_npass = ${SKEB_nPASS:-"30"}
+  skeb_npass = ${SKEB_NPASS:-"30"}
   skeb_vdof = ${SKEB_VDOF:-"5"}
 EOF
   fi
@@ -694,11 +750,47 @@ EOF
   sppt_logit = ${SPPT_LOGIT:-".true."}
   sppt_sfclimit = ${SPPT_SFCLIMIT:-".true."}
   use_zmtnblck = ${use_zmtnblck:-".true."}
+  pbl_taper = ${pbl_taper:-"0,0,0,0.125,0.25,0.5,0.75"}
+EOF
+  fi
+
+  if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then
+    cat >> input.nml <<EOF
+  OCNSPPT=${OCNSPPT}
+  OCNSPPT_LSCALE=${OCNSPPT_LSCALE}
+  OCNSPPT_TAU=${OCNSPPT_TAU}
+  ISEED_OCNSPPT=${ISEED_OCNSPPT:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_PERT_EPBL:-NO}" == "YES" ]]; then
+    cat >> input.nml <<EOF
+  EPBL=${EPBL}
+  EPBL_LSCALE=${EPBL_LSCALE}
+  EPBL_TAU=${EPBL_TAU}
+  ISEED_EPBL=${ISEED_EPBL:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then
+    cat >> input.nml <<EOF
+  OCNSPPT=${OCNSPPT}
+  OCNSPPT_LSCALE=${OCNSPPT_LSCALE}
+  OCNSPPT_TAU=${OCNSPPT_TAU}
+  ISEED_OCNSPPT=${ISEED_OCNSPPT:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_PERT_EPBL:-NO}" == "YES" ]]; then
+    cat >> input.nml <<EOF
+  EPBL=${EPBL}
+  EPBL_LSCALE=${EPBL_LSCALE}
+  EPBL_TAU=${EPBL_TAU}
+  ISEED_EPBL=${ISEED_EPBL:-${ISEED}}
 EOF
   fi
 
   cat >> input.nml << EOF
-  ${nam_stochy_nml:-}
 /
 EOF
 
@@ -711,13 +803,11 @@ EOF
   ISEED_LNDP = ${ISEED_LNDP:-${ISEED}}
   lndp_var_list = ${lndp_var_list}
   lndp_prt_list = ${lndp_prt_list}
-  ${nam_sfcperts_nml:-}
 /
 EOF
   else
     cat >> input.nml << EOF
 &nam_sfcperts
-  ${nam_sfcperts_nml:-}
 /
 EOF
   fi
diff --git a/ush/parsing_namelists_FV3_nest.sh b/ush/parsing_namelists_FV3_nest.sh
new file mode 100755
index 0000000000..55fea07f57
--- /dev/null
+++ b/ush/parsing_namelists_FV3_nest.sh
@@ -0,0 +1,834 @@
+#! /usr/bin/env bash
+
+# parsing namelist of FV3, diag_table, etc.
+
+# Disable variable not used warnings and 'masking return value' warnings
+# shellcheck disable=SC2034
+# shellcheck disable=SC2312
+FV3_namelists_nest(){
+
+# First argument tells us which namelist we're writing:
+#   global = writing input.nml for running global with a nest
+#   nest   = writing input_nest02.nml for running the nest
+namelist_mode="${1:-global}"
+
+if [[ "${namelist_mode}" == "nest" ]] ; then
+  nml_file=input_nest02.nml
+  only_input_nml="YES"
+else
+  nml_file=input.nml
+  only_input_nml="NO"
+fi
+
+# setup the tables
+DIAG_TABLE=${DIAG_TABLE:-${PARMgfs}/ufs/fv3/diag_table}
+DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMgfs}/ufs/fv3/diag_table_aod}
+DATA_TABLE=${DATA_TABLE:-${PARMgfs}/ufs/MOM6_data_table.IN}
+FIELD_TABLE=${FIELD_TABLE:-${PARMgfs}/ufs/fv3/field_table}
+
+# set cdmbgwd
+if (( gwd_opt == 2 )) && [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then
+  cdmbgwd=${cdmbgwd_gsl}
+fi
+
+# ensure non-prognostic tracers are set
+dnats=${dnats:-0}
+
+if [[ "${only_input_nml:-NO}" == "NO" ]] ; then
+# build the diag_table
+{
+echo "UFS_Weather_Model_Forecast"
+if [[ "${DOIAU}" = "YES" ]]; then
+  echo "${previous_cycle:0:4} ${previous_cycle:4:2} ${previous_cycle:6:2} ${previous_cycle:8:2} 0 0"
+else
+  echo "${current_cycle:0:4} ${current_cycle:4:2} ${current_cycle:6:2} ${current_cycle:8:2} 0 0"
+fi
+cat "${DIAG_TABLE}"
+if [[ -n "${AERO_DIAG_TABLE:-}" ]]; then
+  cat "${AERO_DIAG_TABLE}"
+fi
+cat "${DIAG_TABLE_APPEND}"
+} >> diag_table_template
+
+local template=diag_table_template
+local SYEAR=${current_cycle:0:4}
+local SMONTH=${current_cycle:4:2}
+local SDAY=${current_cycle:6:2}
+local CHOUR=${current_cycle:8:2}
+local MOM6_OUTPUT_DIR="./MOM6_OUTPUT"
+
+atparse < "${template}" >> "diag_table"
+
+
+# copy data table
+${NCP} "${DATA_TABLE}" data_table
+
+# build field_table
+if [[ -n "${AERO_FIELD_TABLE:-}" ]]; then
+  nrec=$(wc -l < "${FIELD_TABLE}")
+  prec=${nrec}
+  if (( dnats > 0 )); then
+    prec=$( grep -F -n TRACER "${FIELD_TABLE}" 2> /dev/null | tail -n "${dnats}" | head -1 | cut -d: -f1 )
+    prec=${prec:-0}
+    prec=$(( prec > 0 ? prec - 1 : prec ))
+  fi
+  { \
+    head -n "${prec}" "${FIELD_TABLE}" ; \
+    cat "${AERO_FIELD_TABLE}" ; \
+    tail -n $(( nrec - prec )) "${FIELD_TABLE}" ; \
+  } > field_table
+  # add non-prognostic tracers from additional table
+  dnats=$(( dnats + dnats_aero ))
+else
+  ${NCP} "${FIELD_TABLE}" field_table
+fi
+fi # only_input_nml
+
+if [[ "${namelist_mode}" == "global" ]] ; then
+    layout_x_here=${layout_x}
+    layout_y_here=${layout_y}
+    ntiles_here=6
+    npx_here=${npx}
+    npy_here=${npy}
+    k_split_here=${k_split}
+else
+    layout_x_here=${layout_x_nest}
+    layout_y_here=${layout_y_nest}
+    ntiles_here=1
+    nested_here=.true.
+    twowaynest_here=${twowaynest:-.true.}
+    nestupdate_here=${nestupdate:-7}
+    npx_here=${npx_nest}
+    npy_here=${npy_nest}
+    k_split_here=${k_split_nest}
+fi
+
+cat > "${nml_file}" <<EOF
+&atmos_model_nml
+  blocksize = ${blocksize}
+  chksum_debug = ${chksum_debug}
+  dycore_only = ${dycore_only}
+  ccpp_suite = ${CCPP_SUITE}
+  ${atmos_model_nml:-}
+/
+
+&diag_manager_nml
+  prepend_date = .false.
+  max_output_fields = 300
+  ${diag_manager_nml:-}
+/
+
+&fms_nml
+  clock_grain = 'ROUTINE'
+  domains_stack_size = ${domains_stack_size:-3000000}
+  print_memory_usage = ${print_memory_usage:-".false."}
+  ${fms_nml:-}
+/
+
+&fms2_io_nml
+  netcdf_default_format = "netcdf4"
+/
+
+&fv_core_nml
+  layout = ${layout_x_here},${layout_y_here}
+  io_layout = ${io_layout}
+  npx = ${npx_here}
+  npy = ${npy_here}
+  ntiles = ${ntiles_here}
+  ${nested_here:+nested = ${nested_here}}
+  ${twowaynest_here:+twowaynest = ${twowaynest_herei}}
+  ${nestupdate_here:+nestupdate = ${nestupdate_here}}
+  npz = ${npz}
+  dz_min =  ${dz_min:-"6"}
+  psm_bc = ${psm_bc:-"0"}
+  grid_type = ${grid_type:-"-1"}
+  make_nh = ${make_nh}
+  fv_debug = ${fv_debug:-".false."}
+  range_warn = ${range_warn:-".true."}
+  reset_eta = .false.
+  n_sponge = ${n_sponge:-"10"}
+  nudge_qv = ${nudge_qv:-".false."}
+  nudge_dz = ${nudge_dz:-".false."}
+  tau = ${tau:-10.}
+  rf_cutoff = ${rf_cutoff:-"7.5e2"}
+  d2_bg_k1 = ${d2_bg_k1:-"0.15"}
+  d2_bg_k2 = ${d2_bg_k2:-"0.02"}
+  kord_tm = ${kord_tm:-"-9"}
+  kord_mt = ${kord_mt:-"9"}
+  kord_wz = ${kord_wz:-"9"}
+  kord_tr = ${kord_tr:-"9"}
+  hydrostatic = ${hydrostatic}
+  phys_hydrostatic = ${phys_hydrostatic}
+  use_hydro_pressure = ${use_hydro_pressure}
+  beta = 0.
+  a_imp = 1.
+  p_fac = 0.1
+  k_split = ${k_split_here}
+  n_split = ${n_split}
+  nwat = ${nwat:-2}
+  na_init = ${na_init}
+  d_ext = 0.
+  dnats = ${dnats}
+  fv_sg_adj = ${fv_sg_adj:-"450"}
+  d2_bg = 0.
+  nord = ${nord:-3}
+  dddmp = ${dddmp:-0.1}
+  d4_bg = ${d4_bg:-0.15}
+  vtdm4 = ${vtdm4}
+  delt_max = ${delt_max:-"0.002"}
+  ke_bg = 0.
+  do_vort_damp = ${do_vort_damp}
+  external_ic = ${external_ic}
+  external_eta = ${external_eta:-.true.}
+  gfs_phil = ${gfs_phil:-".false."}
+  nggps_ic = ${nggps_ic}
+  mountain = ${mountain}
+  ncep_ic = ${ncep_ic}
+  d_con = ${d_con}
+  hord_mt = ${hord_mt}
+  hord_vt = ${hord_xx}
+  hord_tm = ${hord_xx}
+  hord_dp = -${hord_xx}
+  hord_tr = ${hord_tr:-"8"}
+  adjust_dry_mass = ${adjust_dry_mass:-".true."}
+  dry_mass=${dry_mass:-98320.0}
+  consv_te = ${consv_te}
+  do_sat_adj = ${do_sat_adj:-".false."}
+  fast_tau_w_sec = ${fast_tau_w_sec:-"0.2"}
+  consv_am = .false.
+  fill = .true.
+  dwind_2d = .false.
+  print_freq = ${print_freq}
+  warm_start = ${warm_start}
+  no_dycore = ${no_dycore}
+  z_tracer = .true.
+  agrid_vel_rst = ${agrid_vel_rst:-".true."}
+  read_increment = ${read_increment}
+  res_latlon_dynamics = ${res_latlon_dynamics}
+  ${fv_core_nml-}
+EOF
+
+if [[ "${DO_NEST:-NO}" == "YES" && "${namelist_mode}" == "global" ]] ; then
+  cat >> "${nml_file}" <<EOF
+  do_schmidt = .true.
+  target_lat = ${TARGET_LAT}
+  target_lon = ${TARGET_LON}
+  stretch_fac = ${stretch_fac}
+EOF
+fi
+
+if [[ "${DO_NEST:-NO}" == "YES" && "${namelist_mode}" == "nest" ]] ; then
+  cat >> "${nml_file}" <<EOF
+  nested = .true.
+  twowaynest = ${twowaynest:-.true.} ! .true.
+  nestupdate = 7
+EOF
+fi
+
+cat >> "${nml_file}" <<EOF
+/
+
+&external_ic_nml
+  filtered_terrain = ${filtered_terrain}
+  levp = ${LEVS}
+  gfs_dwinds = ${gfs_dwinds}
+  checker_tr = .false.
+  nt_checker = 0
+  ${external_ic_nml-}
+/
+
+&gfs_physics_nml
+  fhzero       = ${FHZER}
+  h2o_phys     = ${h2o_phys:-".true."}
+  ldiag3d      = ${ldiag3d:-".false."}
+  qdiag3d      = ${qdiag3d:-".false."}
+  print_diff_pgr = ${print_diff_pgr:-".false."}
+  fhcyc        = ${FHCYC}
+  use_ufo      = ${use_ufo:-".true."}
+  pre_rad      = ${pre_rad:-".false."}
+  imp_physics  = ${imp_physics:-"99"}
+EOF
+
+case "${CCPP_SUITE:-}" in
+  "FV3_GFS_v15p2_coupled")
+  cat >> "${nml_file}" << EOF
+  oz_phys      = .false.
+  oz_phys_2015 = .true.
+EOF
+  ;;
+  "FV3_GSD_v0")
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".false."}
+  ttendlim     = ${ttendlim:-0.005}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  lsoil_lsm    = ${lsoil_lsm:-"4"}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect=${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom=${bl_mynn_edmf_mom:-"1"}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+  use_cice_alb = ${use_cice_alb:-".false."}
+EOF
+  ;;
+  FV3_GFS_v16_coupled*)
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".false."}
+  ttendlim     = ${ttendlim:-"0.005"}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+EOF
+  ;;
+  FV3_GFS_v16*)
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".false."}
+  ttendlim     = ${ttendlim:-"0.005"}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  lsoil_lsm    = ${lsoil_lsm:-"4"}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+EOF
+  ;;
+  FV3_GFS_v17*)
+  local default_dt_inner=$(( DELTIM/2 ))
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".true."}
+  ttendlim     = ${ttendlim:-"-999"}
+  dt_inner     = ${dt_inner:-"${default_dt_inner}"}
+  sedi_semi    = ${sedi_semi:-".true."}
+  decfl        = ${decfl:-"10"}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  lsoil_lsm    = ${lsoil_lsm:-"4"}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"}
+  do_ugwp      = ${do_ugwp:-".false."}
+  do_tofd      = ${do_tofd:-".false."}
+  gwd_opt      = ${gwd_opt:-"2"}
+  do_ugwp_v0   = ${do_ugwp_v0:-".false."}
+  do_ugwp_v1   = ${do_ugwp_v1:-".true."}
+  do_ugwp_v0_orog_only = ${do_ugwp_v0_orog_only:-".false."}
+  do_ugwp_v0_nst_only  = ${do_ugwp_v0_nst_only:-".false."}
+  do_gsl_drag_ls_bl    = ${do_gsl_drag_ls_bl:-".true."}
+  do_gsl_drag_ss       = ${do_gsl_drag_ss:-".true."}
+  do_gsl_drag_tofd     = ${do_gsl_drag_tofd:-".true."}
+  do_gwd_opt_psl       = ${do_gwd_opt_psl:-".false."}
+  do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+  use_cice_alb = ${use_cice_alb:-".false."}
+EOF
+  ;;
+  FV3_global_nest*)
+  local default_dt_inner=$(( DELTIM/2 ))
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+  lcnorm       = ${lcnorm:-".false."}
+  ltaerosol    = ${ltaerosol:-".false."}
+  lradar       = ${lradar:-".true."}
+  ttendlim     = ${ttendlim:-"-999"}
+  dt_inner     = ${dt_inner:-"${default_dt_inner}"}
+  sedi_semi    = ${sedi_semi:-".true."}
+  decfl        = ${decfl:-"10"}
+  oz_phys      = ${oz_phys:-".false."}
+  oz_phys_2015 = ${oz_phys_2015:-".true."}
+  lsoil_lsm    = ${lsoil_lsm:-"4"}
+  do_mynnedmf  = ${do_mynnedmf:-".false."}
+  do_mynnsfclay = ${do_mynnsfclay:-".false."}
+  icloud_bl    = ${icloud_bl:-"1"}
+  bl_mynn_edmf = ${bl_mynn_edmf:-"1"}
+  bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."}
+  bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"}
+  do_ugwp      = ${do_ugwp:-".false."}
+  do_tofd      = ${do_tofd:-".false."}
+  gwd_opt      = ${gwd_opt:-"2"}
+  do_ugwp_v0   = ${do_ugwp_v0:-".false."}
+  do_ugwp_v1   = ${do_ugwp_v1:-".true."}
+  do_ugwp_v0_orog_only = ${do_ugwp_v0_orog_only:-".false."}
+  do_ugwp_v0_nst_only  = ${do_ugwp_v0_nst_only:-".false."}
+  do_gsl_drag_ls_bl    = ${do_gsl_drag_ls_bl:-".true."}
+  do_gsl_drag_ss       = ${do_gsl_drag_ss:-".true."}
+  do_gsl_drag_tofd     = ${do_gsl_drag_tofd:-".true."}
+  do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."}
+  min_lakeice  = ${min_lakeice:-"0.15"}
+  min_seaice   = ${min_seaice:-"0.15"}
+  use_cice_alb = ${use_cice_alb:-".false."}
+EOF
+  ;;
+  *)
+  cat >> "${nml_file}" << EOF
+  iovr         = ${iovr:-"3"}
+EOF
+  ;;
+esac
+
+cat >> "${nml_file}" <<EOF
+  pdfcld       = ${pdfcld:-".false."}
+  fhswr        = ${FHSWR:-"3600."}
+  fhlwr        = ${FHLWR:-"3600."}
+  ialb         = ${IALB:-"1"}
+  iems         = ${IEMS:-"1"}
+  iaer         = ${IAER}
+  icliq_sw     = ${icliq_sw:-"2"}
+  ico2         = ${ICO2}
+  isubc_sw     = ${isubc_sw:-"2"}
+  isubc_lw     = ${isubc_lw:-"2"}
+  isol         = ${ISOL:-"2"}
+  lwhtr        = ${lwhtr:-".true."}
+  swhtr        = ${swhtr:-".true."}
+  cnvgwd       = ${cnvgwd:-".true."}
+  shal_cnv     = ${shal_cnv:-".true."}
+  cal_pre      = ${cal_pre:-".true."}
+  redrag       = ${redrag:-".true."}
+  dspheat      = ${dspheat:-".true."}
+  hybedmf      = ${hybedmf:-".false."}
+  satmedmf     = ${satmedmf-".true."}
+  isatmedmf    = ${isatmedmf-"1"}
+  lheatstrg    = ${lheatstrg-".false."}
+  lseaspray    = ${lseaspray:-".true."}
+  random_clds  = ${random_clds:-".true."}
+  trans_trac   = ${trans_trac:-".true."}
+  cnvcld       = ${cnvcld:-".true."}
+  imfshalcnv   = ${imfshalcnv:-"2"}
+  imfdeepcnv   = ${imfdeepcnv:-"2"}
+  progsigma    = ${progsigma:-".true."}
+  betascu      = ${betascu:-"8.0"}
+  betamcu      = ${betamcu:-"1.0"}
+  betadcu      = ${betadcu:-"2.0"}
+  ras          = ${ras:-".false."}
+  cdmbgwd      = ${cdmbgwd:-"3.5,0.25"}
+  psl_gwd_dx_factor  = ${psl_gwd_dx_factor:-"6.0"}
+  prslrd0      = ${prslrd0:-"0."}
+  ivegsrc      = ${ivegsrc:-"1"}
+  isot         = ${isot:-"1"}
+  lsoil        = ${lsoil:-"4"}
+  lsm          = ${lsm:-"2"}
+  iopt_dveg    = ${iopt_dveg:-"1"}
+  iopt_crs     = ${iopt_crs:-"1"}
+  iopt_btr     = ${iopt_btr:-"1"}
+  iopt_run     = ${iopt_run:-"1"}
+  iopt_sfc     = ${iopt_sfc:-"1"}
+  iopt_frz     = ${iopt_frz:-"1"}
+  iopt_inf     = ${iopt_inf:-"1"}
+  iopt_rad     = ${iopt_rad:-"1"}
+  iopt_alb     = ${iopt_alb:-"2"}
+  iopt_snf     = ${iopt_snf:-"4"}
+  iopt_tbot    = ${iopt_tbot:-"2"}
+  iopt_stc     = ${iopt_stc:-"1"}
+  iopt_trs     = ${iopt_trs:-"2"}
+  iopt_diag    = ${iopt_diag:-"2"}
+  debug        = ${gfs_phys_debug:-".false."}
+  nstf_name    = ${nstf_name}
+  nst_anl      = ${nst_anl}
+  psautco      = ${psautco:-"0.0008,0.0005"}
+  prautco      = ${prautco:-"0.00015,0.00015"}
+  lgfdlmprad   = ${lgfdlmprad:-".false."}
+  effr_in      = ${effr_in:-".false."}
+  ldiag_ugwp   = ${ldiag_ugwp:-".false."}
+  do_RRTMGP          = ${do_RRTMGP:-".false."}
+  active_gases       = ${active_gases:-'h2o_co2_o3_n2o_ch4_o2'}
+  ngases             = ${ngases:-"6"}
+  lw_file_gas        = ${lw_file_gas:-'rrtmgp-data-lw-g128-210809.nc'}
+  lw_file_clouds     = ${lw_file_clouds:-'rrtmgp-cloud-optics-coeffs-lw.nc'}
+  sw_file_gas        = ${sw_file_gas:-'rrtmgp-data-sw-g112-210809.nc'}
+  sw_file_clouds     = ${sw_file_clouds:-'rrtmgp-cloud-optics-coeffs-sw.nc'}
+  rrtmgp_nGptsSW     = ${rrtmgp_nGptsSW:-"112"}
+  rrtmgp_nGptsLW     = ${rrtmgp_nGptsLW:-"128"}
+  rrtmgp_nBandsLW    = ${rrtmgp_nBandsLW:-"16"}
+  rrtmgp_nBandsSW    = ${rrtmgp_nBandsSW:-"14"}
+  doGP_cldoptics_LUT = ${doGP_cldoptics_LUT:-".false."}
+  doGP_lwscat        = ${doGP_lwscat:-".false."}
+EOF
+
+if [[ ${cplchm} = ".true." ]]; then
+  cat >> "${nml_file}" << EOF
+  fscav_aero = ${fscav_aero:-'*:0.0'}
+EOF
+fi
+
+cat >> "${nml_file}" <<EOF
+  do_sppt      = ${do_sppt:-".false."}
+  do_shum      = ${do_shum:-".false."}
+  do_skeb      = ${do_skeb:-".false."}
+  frac_grid    = ${FRAC_GRID:-".true."}
+  cplchm       = ${cplchm:-".false."}
+  cplflx       = ${cplflx:-".false."}
+  cplice       = ${cplice:-".false."}
+  cplwav       = ${cplwav:-".false."}
+  cplwav2atm   = ${cplwav2atm:-".false."}
+EOF
+
+if [[ ${DO_SPPT} = "YES" ]]; then
+cat >> "${nml_file}" <<EOF
+  pert_mp = .false.
+  pert_radtend = .false.
+  pert_clds = .true.
+EOF
+fi
+
+# Add namelist for IAU
+if [[ ${DOIAU} = "YES" ]]; then
+  cat >> "${nml_file}" << EOF
+  iaufhrs      = ${IAUFHRS}
+  iau_delthrs  = ${IAU_DELTHRS}
+  iau_inc_files= ${IAU_INC_FILES}
+  iau_drymassfixer = .false.
+  iau_filter_increments = ${IAU_FILTER_INCREMENTS:-".false."}
+EOF
+fi
+
+if [[ ${DO_CA:-"NO"} = "YES" ]]; then
+  cat >> "${nml_file}" << EOF
+  do_ca      = .true.
+  ca_global  = ${ca_global:-".false."}
+  ca_sgs     = ${ca_sgs:-".true."}
+  nca        = ${nca:-"1"}
+  ncells     = ${ncells:-"5"}
+  nlives     = ${nlives:-"12"}
+  nseed      = ${nseed:-"1"}
+  nfracseed  = ${nfracseed:-"0.5"}
+  nthresh    = ${nthresh:-"18"}
+  ca_trigger = ${ca_trigger:-".true."}
+  nspinup    = ${nspinup:-"1"}
+  iseed_ca   = ${ISEED_CA:-"12345"}
+EOF
+fi
+
+if [[ "${DO_LAND_PERT:-NO}" == "YES" ]]; then
+  cat >> "${nml_file}" << EOF
+  lndp_type = ${lndp_type:-2}
+  n_var_lndp = ${n_var_lndp:-0}
+EOF
+fi
+
+# Close &gfs_physics_nml section
+cat >> "${nml_file}" << EOF
+/
+EOF
+
+if [[ "${namelist_mode}" == "global" ]] ; then
+  cat >> "${nml_file}" << EOF
+&fv_nest_nml
+  grid_pes = $(( layout_x * layout_y * 6 )),$(( layout_x_nest * layout_y_nest ))
+  tile_coarse = 0,6
+  num_tile_top = 6
+  p_split = 1
+  nest_refine = 0,${nest_refine}
+  nest_ioffsets = 0,${nest_ioffset}
+  nest_joffsets = 0,${nest_joffset}
+/
+EOF
+fi
+
+if [[ ${knob_ugwp_version} -eq 0 ]]; then
+  cat >> "${nml_file}" << EOF
+&cires_ugwp_nml
+  knob_ugwp_solver  = ${knob_ugwp_solver:-2}
+  knob_ugwp_source  = ${knob_ugwp_source:-1,1,0,0}
+  knob_ugwp_wvspec  = ${knob_ugwp_wvspec:-1,25,25,25}
+  knob_ugwp_azdir   = ${knob_ugwp_azdir:-2,4,4,4}
+  knob_ugwp_stoch   = ${knob_ugwp_stoch:-0,0,0,0}
+  knob_ugwp_effac   = ${knob_ugwp_effac:-1,1,1,1}
+  knob_ugwp_doaxyz  = ${knob_ugwp_doaxyz:-1}
+  knob_ugwp_doheat  = ${knob_ugwp_doheat:-1}
+  knob_ugwp_dokdis  = ${knob_ugwp_dokdis:-1}
+  knob_ugwp_ndx4lh  = ${knob_ugwp_ndx4lh:-1}
+  knob_ugwp_version = ${knob_ugwp_version:-0}
+  launch_level      = ${launch_level:-54}
+/
+EOF
+fi
+
+if [[ ${knob_ugwp_version} -eq 1 ]]; then
+  cat >> "${nml_file}" << EOF
+&cires_ugwp_nml
+  knob_ugwp_solver  = ${knob_ugwp_solver:-2}
+  knob_ugwp_source  = ${knob_ugwp_source:-1,1,0,0}
+  knob_ugwp_wvspec  = ${knob_ugwp_wvspec:-1,25,25,25}
+  knob_ugwp_azdir   = ${knob_ugwp_azdir:-2,4,4,4}
+  knob_ugwp_stoch   = ${knob_ugwp_stoch:-0,0,0,0}
+  knob_ugwp_effac   = ${knob_ugwp_effac:-1,1,1,1}
+  knob_ugwp_doaxyz  = ${knob_ugwp_doaxyz:-1}
+  knob_ugwp_doheat  = ${knob_ugwp_doheat:-1}
+  knob_ugwp_dokdis  = ${knob_ugwp_dokdis:-2}
+  knob_ugwp_ndx4lh  = ${knob_ugwp_ndx4lh:-4}
+  knob_ugwp_version = ${knob_ugwp_version:-1}
+  knob_ugwp_palaunch = ${knob_ugwp_palaunch:-275.0e2}
+  knob_ugwp_nslope   = ${knob_ugwp_nslope:-1}
+  knob_ugwp_lzmax    = ${knob_ugwp_lzmax:-15.750e3}
+  knob_ugwp_lzmin    = ${knob_ugwp_lzmin:-0.75e3}
+  knob_ugwp_lzstar   = ${knob_ugwp_lzstar:-2.0e3}
+  knob_ugwp_taumin   = ${knob_ugwp_taumin:-0.25e-3}
+  knob_ugwp_tauamp   = ${knob_ugwp_tauamp:-3.0e-3}
+  knob_ugwp_lhmet    = ${knob_ugwp_lhmet:-200.0e3}
+  knob_ugwp_orosolv  = ${knob_ugwp_orosolv:-'pss-1986'}
+/
+EOF
+fi
+
+echo "" >> "${nml_file}"
+
+cat >> "${nml_file}" <<EOF
+&gfdl_cloud_microphysics_nml
+  sedi_transport = .true.
+  do_sedi_heat = .false.
+  rad_snow = .true.
+  rad_graupel = .true.
+  rad_rain = .true.
+  const_vi = .false.
+  const_vs = .false.
+  const_vg = .false.
+  const_vr = .false.
+  vi_max = 1.
+  vs_max = 2.
+  vg_max = 12.
+  vr_max = 12.
+  qi_lim = 1.
+  prog_ccn = .false.
+  do_qa = .true.
+  fast_sat_adj = .true.
+  tau_l2v = 225.
+  tau_v2l = 150.
+  tau_g2v = 900.
+  rthresh = 10.e-6  ! This is a key parameter for cloud water
+  dw_land  = 0.16
+  dw_ocean = 0.10
+  ql_gen = 1.0e-3
+  ql_mlt = 1.0e-3
+  qi0_crt = 8.0E-5
+  qs0_crt = 1.0e-3
+  tau_i2s = 1000.
+  c_psaci = 0.05
+  c_pgacs = 0.01
+  rh_inc = 0.30
+  rh_inr = 0.30
+  rh_ins = 0.30
+  ccn_l = 300.
+  ccn_o = 100.
+  c_paut = 0.5
+  c_cracw = 0.8
+  use_ppm = .false.
+  use_ccn = .true.
+  mono_prof = .true.
+  z_slope_liq  = .true.
+  z_slope_ice  = .true.
+  de_ice = .false.
+  fix_negative = .true.
+  icloud_f = 1
+  mp_time = 150.
+  reiflag = ${reiflag:-"2"}
+
+  ${gfdl_cloud_microphysics_nml:-}
+/
+
+&interpolator_nml
+  interp_method = 'conserve_great_circle'
+  ${interpolator_nml:-}
+/
+
+&namsfc
+  FNGLAC   = '${FNGLAC}'
+  FNMXIC   = '${FNMXIC}'
+  FNTSFC   = '${FNTSFC}'
+  FNSNOC   = '${FNSNOC}'
+  FNZORC   = '${FNZORC}'
+  FNALBC   = '${FNALBC}'
+  FNALBC2  = '${FNALBC2}'
+  FNAISC   = '${FNAISC}'
+  FNTG3C   = '${FNTG3C}'
+  FNVEGC   = '${FNVEGC}'
+  FNVETC   = '${FNVETC}'
+  FNSOTC   = '${FNSOTC}'
+  FNSOCC   = '${FNSOCC}'
+  FNSMCC   = '${FNSMCC}'
+  FNMSKH   = '${FNMSKH}'
+  FNTSFA   = '${FNTSFA}'
+  FNACNA   = '${FNACNA:-}'
+  FNSNOA   = '${FNSNOA:-}'
+  FNVMNC   = '${FNVMNC:-}'
+  FNVMXC   = '${FNVMXC:-}'
+  FNSLPC   = '${FNSLPC:-}'
+  FNABSC   = '${FNABSC:-}'
+  LDEBUG = ${LDEBUG:-".false."}
+  FSMCL(2) = ${FSMCL2:-99999}
+  FSMCL(3) = ${FSMCL3:-99999}
+  FSMCL(4) = ${FSMCL4:-99999}
+  LANDICE  = ${landice:-".true."}
+  FTSFS = ${FTSFS:-90}
+  FAISL = ${FAISL:-99999}
+  FAISS = ${FAISS:-99999}
+  FSNOL = ${FSNOL:-99999}
+  FSNOS = ${FSNOS:-99999}
+  FSICL = ${FSICL:-99999}
+  FSICS = ${FSICS:-99999}
+  FTSFL = ${FTSFL:-99999}
+  FVETL = ${FVETL:-99999}
+  FSOTL = ${FSOTL:-99999}
+  FvmnL = ${FvmnL:-99999}
+  FvmxL = ${FvmxL:-99999}
+  FSLPL = ${FSLPL:-99999}
+  FABSL = ${FABSL:-99999}
+  ${namsfc_nml:-}
+/
+
+&fv_grid_nml
+EOF
+if [[ "${DO_NEST:-NO}" == "NO" ]] ; then
+    cat >> "${nml_file}" <<EOF
+  grid_file = 'INPUT/grid_spec.nc'
+EOF
+fi
+
+cat >> "${nml_file}" <<EOF
+  ${fv_grid_nml:-}
+/
+EOF
+
+# Add namelist for stochastic physics options
+echo "" >> "${nml_file}"
+#if [ $MEMBER -gt 0 ]; then
+if [[ "${DO_SPPT}" = "YES" || "${DO_SHUM}" = "YES" || "${DO_SKEB}" = "YES" || "${DO_LAND_PERT}" = "YES" ]]; then
+
+    cat >> "${nml_file}" << EOF
+&nam_stochy
+EOF
+
+  if [[ ${DO_SKEB} = "YES" ]]; then
+    cat >> "${nml_file}" << EOF
+  skeb = ${SKEB}
+  iseed_skeb = ${ISEED_SKEB:-${ISEED}}
+  skeb_tau = ${SKEB_TAU:-"-999."}
+  skeb_lscale = ${SKEB_LSCALE:-"-999."}
+  skebnorm = ${SKEBNORM:-"1"}
+  skeb_npass = ${SKEB_NPASS:-"30"}
+  skeb_vdof = ${SKEB_VDOF:-"5"}
+EOF
+  fi
+
+  if [[ ${DO_SHUM} = "YES" ]]; then
+    cat >> "${nml_file}" << EOF
+  shum = ${SHUM}
+  iseed_shum = ${ISEED_SHUM:-${ISEED}}
+  shum_tau = ${SHUM_TAU:-"-999."}
+  shum_lscale = ${SHUM_LSCALE:-"-999."}
+EOF
+  fi
+
+  if [[ ${DO_SPPT} = "YES" ]]; then
+    cat >> "${nml_file}" << EOF
+  sppt = ${SPPT}
+  iseed_sppt = ${ISEED_SPPT:-${ISEED}}
+  sppt_tau = ${SPPT_TAU:-"-999."}
+  sppt_lscale = ${SPPT_LSCALE:-"-999."}
+  sppt_logit = ${SPPT_LOGIT:-".true."}
+  sppt_sfclimit = ${SPPT_SFCLIMIT:-".true."}
+  use_zmtnblck = ${use_zmtnblck:-".true."}
+  pbl_taper = ${pbl_taper:-"0,0,0,0.125,0.25,0.5,0.75"}
+EOF
+  fi
+
+  if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then
+    cat >> "${nml_file}" <<EOF
+  OCNSPPT=${OCNSPPT}
+  OCNSPPT_LSCALE=${OCNSPPT_LSCALE}
+  OCNSPPT_TAU=${OCNSPPT_TAU}
+  ISEED_OCNSPPT=${ISEED_OCNSPPT:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_PERT_EPBL:-NO}" == "YES" ]]; then
+    cat >> "${nml_file}" <<EOF
+  EPBL=${EPBL}
+  EPBL_LSCALE=${EPBL_LSCALE}
+  EPBL_TAU=${EPBL_TAU}
+  ISEED_EPBL=${ISEED_EPBL:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then
+    cat >> "${nml_file}" <<EOF
+  OCNSPPT=${OCNSPPT}
+  OCNSPPT_LSCALE=${OCNSPPT_LSCALE}
+  OCNSPPT_TAU=${OCNSPPT_TAU}
+  ISEED_OCNSPPT=${ISEED_OCNSPPT:-${ISEED}}
+EOF
+  fi
+
+  if [[ "${DO_OCN_PERT_EPBL:-NO}" == "YES" ]]; then
+    cat >> "${nml_file}" <<EOF
+  EPBL=${EPBL}
+  EPBL_LSCALE=${EPBL_LSCALE}
+  EPBL_TAU=${EPBL_TAU}
+  ISEED_EPBL=${ISEED_EPBL:-${ISEED}}
+EOF
+  fi
+
+  cat >> "${nml_file}" << EOF
+/
+EOF
+
+  if [[ ${DO_LAND_PERT} = "YES" ]]; then
+    cat >> "${nml_file}" << EOF
+&nam_sfcperts
+  lndp_type = ${lndp_type}
+  LNDP_TAU = ${LNDP_TAU}
+  LNDP_SCALE = ${LNDP_SCALE}
+  ISEED_LNDP = ${ISEED_LNDP:-${ISEED}}
+  lndp_var_list = ${lndp_var_list}
+  lndp_prt_list = ${lndp_prt_list}
+/
+EOF
+  else
+    cat >> "${nml_file}" << EOF
+&nam_sfcperts
+/
+EOF
+  fi
+
+else
+
+  cat >> "${nml_file}" << EOF
+&nam_stochy
+/
+&nam_sfcperts
+/
+EOF
+
+fi
+
+# Echo out formatted ""${nml_file}""
+echo "===================================="
+echo "FV3_namelists_nest(): '${nml_file}'"
+cat "${nml_file}"
+echo "===================================="
+}
diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh
index 8059096363..9010851806 100755
--- a/ush/parsing_namelists_MOM6.sh
+++ b/ush/parsing_namelists_MOM6.sh
@@ -1,15 +1,14 @@
 #! /usr/bin/env bash
 
+# Disable variable not used warnings
+# shellcheck disable=SC2034
 MOM6_namelists(){
 
 # MOM6 namelists generation
 
-if [[ "${cplwav}" == ".true." ]] ; then
-  local MOM6_USE_WAVES='True'
-else
-  local MOM6_USE_WAVES='False'
-fi
-
+# ================================================================
+# input.nml
+# ---------
 cat >> input.nml <<EOF
 
 &MOM_input_nml
@@ -26,30 +25,6 @@ EOF
 #  new_lscale=.true.
 #EOF
 
-if [[ ${DO_OCN_SPPT} = "YES" ]]; then
-  local OCN_SPPT="True"
-  cat >> input.nml <<EOF
-  OCNSPPT=${OCNSPPT:-1.0}
-  OCNSPPT_LSCALE=${OCNSPPT_LSCALE:-500e3}
-  OCNSPPT_TAU=${OCNSPPT_TAU:-21600}
-  ISEED_OCNSPPT=${ISEED_OCNSPPT:-${ISEED}}
-EOF
-else
-  local OCN_SPPT="False"
-fi
-
-if [[ ${DO_OCN_PERT_EPBL} = "YES" ]]; then
-  local PERT_EPBL="True"
-  cat >> input.nml <<EOF
-  EPBL=${EPBL:-1.0}
-  EPBL_LSCALE=${EPBL_LSCALE:-500e3}
-  EPBL_TAU=${EPBL_TAU:-21600}
-  ISEED_EPBL=${ISEED_EPBL:-${ISEED}}
-EOF
-  else
-    local PERT_EPBL="False"
-fi
-
 #cat >> input.nml <<EOF
 #/
 #
@@ -58,34 +33,84 @@ fi
 #
 #EOF
 
-echo "$(cat input.nml)"
+echo "Rendered input.nml:"
+cat input.nml
 
+# ================================================================
+# MOM_input
+# ---------
+# Prepare local variables for use in MOM_input.IN from UFSWM
+# The ones already defined are left commented as a reminder
+# == MOM_domains section ==
+# NX_GLB
+# NY_GLB
+# == MOM section ==
+# DT_DYNAM_MOM6
+# DT_THERM_MOM6
+# MOM6_THERMO_SPAN
+# == MOM_grid_init section ==
+local MOM6_TOPOEDITS=${TOPOEDITS}
+# MOM6_ALLOW_LANDMASK_CHANGES
+# == MOM_diag_mediator section ==
+# MOM6_DIAG_COORD_DEF_Z_FILE
+# MOM6_DIAG_MISVAL
+# == MOM_diabatic_aux section ==
+local MOM6_CHLCLIM=${CHLCLIM}
+# == MOM_energetic_PBL section ==
+# MOM6_USE_LI2016
+if [[ "${cplwav}" == ".true." ]] ; then
+  local MOM6_USE_WAVES="True"
+else
+  local MOM6_USE_WAVES="False"
+fi
+# == MOM_oda_incupd section ==
+local ODA_TEMPINC_VAR=${ODA_TEMPINC_VAR:-"Temp"}
+local ODA_SALTINC_VAR=${ODA_SALTINC_VAR:-"Salt"}
+local ODA_THK_VAR=${ODA_THK_VAR:-"h"}
+local ODA_INCUPD_UV="True"
+local ODA_UINC_VAR=${ODA_UINC_VAR:-"u"}
+local ODA_VINC_VAR=${ODA_VINC_VAR:-"v"}
+# ODA_INCUPD
+# ODA_INCUPD_NHOURS
+# == MOM_surface_forcing section ==
+# MOM6_RIVER_RUNOFF
+# == ocean_stochastics section ==
+if [[ "${DO_OCN_SPPT}" == "YES" ]]; then
+  local DO_OCN_SPPT="True"  # TODO: This is problematic if DO_OCN_SPPT is going to be used elsewhere
+else
+  local DO_OCN_SPPT="False"
+fi
+if [[ "${DO_OCN_PERT_EPBL}" == "YES" ]]; then
+  local PERT_EPBL="True"
+else
+  local PERT_EPBL="False"
+fi
+# Ensure the template exists
+local template=${MOM6_INPUT_TEMPLATE:-"${PARMgfs}/ufs/MOM_input_${OCNRES}.IN"}
+if [[ ! -f "${template}" ]]; then
+  echo "FATAL ERROR: template '${template}' does not exist, ABORT!"
+  exit 1
+fi
+rm -f "${DATA}/INPUT/MOM_input"
+atparse < "${template}" >> "${DATA}/INPUT/MOM_input"
+echo "Rendered MOM_input:"
+cat "${DATA}/INPUT/MOM_input"
 
-#Copy MOM_input and edit:
-${NCP} -pf "${HOMEgfs}/parm/ufs/mom6/MOM_input_template_${OCNRES}" "${DATA}/INPUT/"
-sed -e "s/@\[DT_THERM_MOM6\]/${DT_THERM_MOM6}/g" \
-    -e "s/@\[DT_DYNAM_MOM6\]/${DT_DYNAM_MOM6}/g" \
-    -e "s/@\[MOM6_RIVER_RUNOFF\]/${MOM6_RIVER_RUNOFF}/g" \
-    -e "s/@\[MOM6_THERMO_SPAN\]/${MOM6_THERMO_SPAN}/g" \
-    -e "s/@\[MOM6_USE_LI2016\]/${MOM6_USE_LI2016}/g" \
-    -e "s/@\[MOM6_USE_WAVES\]/${MOM6_USE_WAVES}/g" \
-    -e "s/@\[MOM6_ALLOW_LANDMASK_CHANGES\]/${MOM6_ALLOW_LANDMASK_CHANGES}/g" \
-    -e "s/@\[NX_GLB\]/${NX_GLB}/g" \
-    -e "s/@\[NY_GLB\]/${NY_GLB}/g" \
-    -e "s/@\[CHLCLIM\]/${CHLCLIM}/g" \
-    -e "s/@\[DO_OCN_SPPT\]/${OCN_SPPT}/g" \
-    -e "s/@\[PERT_EPBL\]/${PERT_EPBL}/g" \
-    -e "s/@\[MOM6_DIAG_COORD_DEF_Z_FILE\]/${MOM6_DIAG_COORD_DEF_Z_FILE}/g" \
-    -e "s/@\[TOPOEDITS\]/${TOPOEDITS}/g" \
-    -e "s/@\[MOM6_DIAG_MISVAL\]/${MOM6_DIAG_MISVAL}/g" \
-    -e "s/@\[ODA_INCUPD_NHOURS\]/${ODA_INCUPD_NHOURS}/g" \
-    -e "s/@\[ODA_INCUPD\]/${ODA_INCUPD}/g" "${DATA}/INPUT/MOM_input_template_${OCNRES}" > "${DATA}/INPUT/MOM_input"
-rm "${DATA}/INPUT/MOM_input_template_${OCNRES}"
+# ================================================================
+# data_table
+# ----------
+# Prepare local variables for use in MOM6_data_table.IN from UFSWM
+local MOM6_FRUNOFF=${FRUNOFF}
 
-#data table for runoff:
-DATA_TABLE=${DATA_TABLE:-${HOMEgfs}/parm/ufs/fv3/data_table}
-${NCP} "${DATA_TABLE}" "${DATA}/data_table_template"
-sed -e "s/@\[FRUNOFF\]/${FRUNOFF}/g" "${DATA}/data_table_template" > "${DATA}/data_table"
-rm "${DATA}/data_table_template"
+# Ensure the template exists
+local template=${MOM6_DATA_TABLE_TEMPLATE:-"${PARMgfs}/ufs/MOM6_data_table.IN"}
+if [[ ! -f "${template}" ]]; then
+  echo "FATAL ERROR: template '${template}' does not exist, ABORT!"
+  exit 1
+fi
+rm -f "${DATA}/data_table"
+atparse < "${template}" >> "${DATA}/data_table"
+echo "Rendered data_table:"
+cat "${DATA}/data_table"
 
 }
diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh
index 9b0a94695c..c57a90e50a 100755
--- a/ush/parsing_namelists_WW3.sh
+++ b/ush/parsing_namelists_WW3.sh
@@ -18,7 +18,7 @@ WW3_namelists(){
   fi
   # Set time stamps for model start and output
   # For special case when IAU is on but this is an initial half cycle 
-  if [ $IAU_OFFSET = 0 ]; then
+  if [ ${IAU_OFFSET:-0} = 0 ]; then
     ymdh_beg=$YMDH
   else
     ymdh_beg=$($NDATE -$WAVHINDH $YMDH)
@@ -79,8 +79,8 @@ WW3_namelists(){
 
   if [ $waveMULTIGRID = ".true." ]; then
     # ww3_multi template
-    if [ -f $PARMwave/ww3_multi.inp.tmpl ]; then
-      cp $PARMwave/ww3_multi.inp.tmpl ww3_multi.inp.tmpl
+    if [ -f ${PARMgfs}/wave/ww3_multi.inp.tmpl ]; then
+      cp ${PARMgfs}/wave/ww3_multi.inp.tmpl ww3_multi.inp.tmpl
     fi
     if [ ! -f ww3_multi.inp.tmpl ]; then
       echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 MULTI INPUT FILE" 
@@ -88,8 +88,8 @@ WW3_namelists(){
     fi
   else 
     # ww3_multi template
-    if [ -f $PARMwave/ww3_shel.inp.tmpl ]; then
-      cp $PARMwave/ww3_shel.inp.tmpl ww3_shel.inp.tmpl
+    if [ -f ${PARMgfs}/wave/ww3_shel.inp.tmpl ]; then
+      cp ${PARMgfs}/wave/ww3_shel.inp.tmpl ww3_shel.inp.tmpl
     fi
     if [ ! -f ww3_shel.inp.tmpl ]; then
       echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" 
@@ -99,18 +99,18 @@ WW3_namelists(){
 
 # Buoy location file
 
-  if [ -f $PARMwave/wave_${NET}.buoys ]
+  if [ -f ${PARMgfs}/wave/wave_${NET}.buoys ]
   then
-    cp $PARMwave/wave_${NET}.buoys buoy.loc
+    cp ${PARMgfs}/wave/wave_${NET}.buoys buoy.loc
   fi
 
   if [ -f buoy.loc ]
   then
     set +x
-    echo "   buoy.loc copied ($PARMwave/wave_${NET}.buoys)."
+    echo "   buoy.loc copied (${PARMgfs}/wave/wave_${NET}.buoys)."
     set_trace
   else
-    echo " FATAL ERROR : buoy.loc ($PARMwave/wave_${NET}.buoys) NOT FOUND"
+    echo " FATAL ERROR : buoy.loc (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND"
     exit 12 
   fi
 
diff --git a/ush/ufs_configure.sh b/ush/parsing_ufs_configure.sh
similarity index 79%
rename from ush/ufs_configure.sh
rename to ush/parsing_ufs_configure.sh
index 8898d11162..062b40591e 100755
--- a/ush/ufs_configure.sh
+++ b/ush/parsing_ufs_configure.sh
@@ -1,36 +1,31 @@
 #! /usr/bin/env bash
 
 #####
-## This script writes ufs.configure file
-## first, select a "*.IN" templates based on
-## $confignamevarforufs and parse values based on
-## $cpl** switches.
-##
-## This is a child script of modular
-## forecast script. This script is definition only (Is it? There is nothing defined here being used outside this script.)
+## This script writes ufs.configure file based on a template defined in
+## ${ufs_configure_template}
 #####
 
 # Disable variable not used warnings
 # shellcheck disable=SC2034
-writing_ufs_configure() {
+UFS_configure() {
 
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh begins"
+echo "SUB ${FUNCNAME[0]}: ufs.configure begins"
 
 # Setup ufs.configure
-local DumpFields=${NEMSDumpFields:-false}
+local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE
+local DumpFields=${DumpFields:-false}
 local cap_dbug_flag=${cap_dbug_flag:-0}
+
 # Determine "cmeps_run_type" based on the availability of the mediator restart file
 # If it is a warm_start, we already copied the mediator restart to DATA, if it was present
 # If the mediator restart was not present, despite being a "warm_start", we put out a WARNING
-# in forecast_postdet.sh
+# in forecast_postdet.sh function CMEPS_postdet
 if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then
   local cmeps_run_type='continue'
 else
   local cmeps_run_type='startup'
 fi
 
-local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE
-
 # Atm-related
 local atm_model="fv3"
 local atm_petlist_bounds="0 $(( ATMPETS-1 ))"
@@ -44,6 +39,8 @@ if [[ "${cpl}" = ".true." ]]; then
   local coupling_interval_slow_sec="${CPL_SLOW}"
 fi
 
+local WRITE_ENDOFRUN_RESTART=.false.
+
 if [[ "${cplflx}" = ".true." ]]; then
 
   local use_coldstart=${use_coldstart:-".false."}
@@ -53,12 +50,15 @@ if [[ "${cplflx}" = ".true." ]]; then
   local ocn_petlist_bounds="${ATMPETS} $(( ATMPETS+OCNPETS-1 ))"
   local ocn_omp_num_threads="${OCNTHREADS}"
   local RUNTYPE="${cmeps_run_type}"
+  local CMEPS_RESTART_DIR="CMEPS_RESTART/"
   local CPLMODE="${cplmode}"
   local coupling_interval_fast_sec="${CPL_FAST}"
   local RESTART_N="${restart_interval}"
   local ocean_albedo_limit=0.06
   local ATMTILESIZE="${CASE:1}"
   local ocean_albedo_limit=0.06
+  local pio_rearranger=${pio_rearranger:-"box"}
+  local MED_history_n=1000000 
 fi
 
 if [[ "${cplice}" = ".true." ]]; then
@@ -66,7 +66,6 @@ if [[ "${cplice}" = ".true." ]]; then
   local ice_model="cice6"
   local ice_petlist_bounds="$(( ATMPETS+OCNPETS )) $(( ATMPETS+OCNPETS+ICEPETS-1 ))"
   local ice_omp_num_threads="${ICETHREADS}"
-  local MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"}
   local FHMAX="${FHMAX_GFS}"  # TODO:  How did this get in here hard-wired to FHMAX_GFS?
 fi
 
@@ -76,6 +75,7 @@ if [[ "${cplwav}" = ".true." ]]; then
   local wav_petlist_bounds="$(( ATMPETS+OCNPETS+ICEPETS )) $(( ATMPETS+OCNPETS+ICEPETS+WAVPETS-1 ))"
   local wav_omp_num_threads="${WAVTHREADS}"
   local MULTIGRID="${waveMULTIGRID}"
+  local WW3_user_sets_restname="false"
 
 fi
 
@@ -84,7 +84,7 @@ if [[ "${cplchm}" = ".true." ]]; then
   local chm_model="gocart"
   local chm_petlist_bounds="0 $(( CHMPETS-1 ))"
   local chm_omp_num_threads="${CHMTHREADS}"
-  local coupling_interval_fast_sec="${CPL_FAST}"
+  local coupling_interval_sec="${CPL_FAST}"
 
 fi
 
@@ -92,9 +92,10 @@ fi
 if [[ ! -r "${ufs_configure_template}" ]]; then
   echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!"
   exit 1
+else
+  echo "INFO: using ufs.configure template: '${ufs_configure_template}'"
 fi
 
-source "${HOMEgfs}/ush/atparse.bash"
 rm -f "${DATA}/ufs.configure"
 atparse < "${ufs_configure_template}" >> "${DATA}/ufs.configure"
 echo "Rendered ufs.configure:"
@@ -102,6 +103,6 @@ cat ufs.configure
 
 ${NCP} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml
 
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh ends for ${ufs_configure_template}"
+echo "SUB ${FUNCNAME[0]}: ufs.configure ends"
 
 }
diff --git a/ush/preamble.sh b/ush/preamble.sh
old mode 100644
new mode 100755
index be64684aa8..08d7659ad1
--- a/ush/preamble.sh
+++ b/ush/preamble.sh
@@ -16,6 +16,8 @@
 #   TRACE (YES/NO): Whether to echo every command (set -x) [default: "YES"]
 #   STRICT (YES/NO): Whether to exit immediately on error or undefined variable
 #     (set -eu) [default: "YES"]
+#   POSTAMBLE_CMD (empty/set): A command to run at the end of the job
+#     [default: empty]
 #
 #######
 set +x
@@ -70,6 +72,24 @@ postamble() {
     start_time="${2}"
     rc="${3}"
 
+    # Execute postamble command
+    #
+    # Commands can be added to the postamble by appending them to $POSTAMBLE_CMD:
+    #    POSTAMBLE_CMD="new_thing; ${POSTAMBLE_CMD:-}" # (before existing commands)
+    #    POSTAMBLE_CMD="${POSTAMBLE_CMD:-}; new_thing" # (after existing commands)
+    #
+    # Always use this form so previous POSTAMBLE_CMD are not overwritten. This should
+    #   only be used for commands that execute conditionally (i.e. on certain machines
+    #   or jobs). Global changes should just be added to this function.
+    # These commands will be called when EACH SCRIPT terminates, so be mindful. Please
+    #   consult with global-workflow CMs about permanent changes to $POSTAMBLE_CMD or
+    #   this postamble function.
+    #
+
+    if [[ -v 'POSTAMBLE_CMD' ]]; then
+      ${POSTAMBLE_CMD}
+    fi
+
     # Calculate the elapsed time
     end_time=$(date +%s)
     end_time_human=$(date -d@"${end_time}" -u +%H:%M:%S)
@@ -87,70 +107,7 @@ postamble() {
 trap "postamble ${_calling_script} ${start_time} \$?" EXIT
 # shellcheck disable=
 
-function generate_com() {
-    #
-    # Generate a list COM variables from a template by substituting in env variables.
-    #
-    # Each argument must have a corresponding template with the name ${ARG}_TMPL. Any 
-    #   variables in the template are replaced with their values. Undefined variables
-    #   are just removed without raising an error.
-    #
-    # Accepts as options `-r` and `-x`, which do the same thing as the same options in
-    #   `declare`. Variables are automatically marked as `-g` so the variable is visible
-    #   in the calling script.
-    #
-    # Syntax:
-    #   generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]]
-    #
-    #   options:
-    #       -r: Make variable read-only (same as `decalre -r`)
-    #       -x: Mark variable for export (same as `declare -x`)
-    #   var1, var2, etc: Variable names whose values will be generated from a template
-    #                    and declared
-    #   tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL")
-    #
-    #   Examples:
-    #       # Current cycle and RUN, implicitly using template COM_ATMOS_ANALYSIS_TMPL
-    #       YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS
-    #
-    #       # Previous cycle and gdas using an explicit template
-    #       RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \
-    #           COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL
-    #
-    #       # Current cycle and COM for first member
-    #       MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY
-    #
-    if [[ ${DEBUG_WORKFLOW:-"NO"} == "NO" ]]; then set +x; fi
-    local opts="-g"
-    local OPTIND=1
-    while getopts "rx" option; do
-        opts="${opts}${option}"
-    done
-    shift $((OPTIND-1))
-
-    for input in "$@"; do
-        IFS=':' read -ra args <<< "${input}"
-        local com_var="${args[0]}"
-        local template
-        local value
-        if (( ${#args[@]} > 1 )); then
-            template="${args[1]}"
-        else
-            template="${com_var}_TMPL"
-        fi
-        if [[ ! -v "${template}" ]]; then
-            echo "FATAL ERROR in generate_com: Requested template ${template} not defined!"
-            exit 2
-        fi
-        value=$(echo "${!template}" | envsubst)
-        # shellcheck disable=SC2086
-        declare ${opts} "${com_var}"="${value}"
-        echo "generate_com :: ${com_var}=${value}"
-    done
-    set_trace
-}
-# shellcheck disable=
-declare -xf generate_com
+source "${HOMEgfs}/ush/bash_utils.sh"
 
 # Turn on our settings
 set_strict
diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py
index e69de29bb2..c0b72bbc35 100644
--- a/ush/python/pygfs/__init__.py
+++ b/ush/python/pygfs/__init__.py
@@ -0,0 +1,18 @@
+
+import os
+
+from .task.analysis import Analysis
+from .task.aero_emissions import AerosolEmissions
+from .task.aero_analysis import AerosolAnalysis
+from .task.atm_analysis import AtmAnalysis
+from .task.atmens_analysis import AtmEnsAnalysis
+from .task.marine_bmat import MarineBMat
+from .task.snow_analysis import SnowAnalysis
+from .task.upp import UPP
+from .task.oceanice_products import OceanIceProducts
+from .task.gfs_forecast import GFSForecast
+from .utils import marine_da_utils
+
+__docformat__ = "restructuredtext"
+__version__ = "0.1.0"
+pygfs_directory = os.path.dirname(__file__)
diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py
index 0e515a0df4..69a992d7d4 100644
--- a/ush/python/pygfs/task/aero_analysis.py
+++ b/ush/python/pygfs/task/aero_analysis.py
@@ -12,7 +12,7 @@
                     add_to_datetime, to_fv3time, to_timedelta,
                     chdir,
                     to_fv3time,
-                    YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml,
+                    YAMLFile, parse_j2yaml, save_as_yaml,
                     logit,
                     Executable,
                     WorkflowException)
@@ -29,33 +29,33 @@ class AerosolAnalysis(Analysis):
     def __init__(self, config):
         super().__init__(config)
 
-        _res = int(self.config['CASE'][1:])
-        _res_anl = int(self.config['CASE_ANL'][1:])
-        _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2)
-        _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
+        _res = int(self.task_config['CASE'][1:])
+        _res_anl = int(self.task_config['CASE_ANL'][1:])
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+        _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
 
         # Create a local dictionary that is repeatedly used across this class
         local_dict = AttrDict(
             {
                 'npx_ges': _res + 1,
                 'npy_ges': _res + 1,
-                'npz_ges': self.config.LEVS - 1,
-                'npz': self.config.LEVS - 1,
+                'npz_ges': self.task_config.LEVS - 1,
+                'npz': self.task_config.LEVS - 1,
                 'npx_anl': _res_anl + 1,
                 'npy_anl': _res_anl + 1,
-                'npz_anl': self.config['LEVS'] - 1,
+                'npz_anl': self.task_config['LEVS'] - 1,
                 'AERO_WINDOW_BEGIN': _window_begin,
-                'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
-                'aero_bkg_fhr': map(int, self.config['aero_bkg_times'].split(',')),
-                'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
-                'fv3jedi_yaml': _fv3jedi_yaml,
+                'AERO_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H",
+                'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')),
+                'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
+                'jedi_yaml': _jedi_yaml,
             }
         )
 
-        # task_config is everything that this task should need
-        self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+        # Extend task_config with local_dict
+        self.task_config = AttrDict(**self.task_config, **local_dict)
 
     @logit(logger)
     def initialize(self: Analysis) -> None:
@@ -73,15 +73,13 @@ def initialize(self: Analysis) -> None:
         super().initialize()
 
         # stage CRTM fix files
-        crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_crtm_coeff.yaml')
-        logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}")
-        crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config)
+        logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}")
+        crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config)
         FileHandler(crtm_fix_list).sync()
 
         # stage fix files
-        jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_jedi_fix.yaml')
-        logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}")
-        jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config)
+        logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}")
+        jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config)
         FileHandler(jedi_fix_list).sync()
 
         # stage berror files
@@ -93,10 +91,9 @@ def initialize(self: Analysis) -> None:
         FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync()
 
         # generate variational YAML file
-        logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}")
-        varda_yaml = parse_j2yaml(self.task_config['AEROVARYAML'], self.task_config)
-        save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml)
-        logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}")
+        logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}")
+        save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml)
+        logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}")
 
         # need output dir for diags and anl
         logger.debug("Create empty output [anl, diags] directories to receive output from executable")
@@ -112,9 +109,11 @@ def execute(self: Analysis) -> None:
         chdir(self.task_config.DATA)
 
         exec_cmd = Executable(self.task_config.APRUN_AEROANL)
-        exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x')
+        exec_name = os.path.join(self.task_config.DATA, 'gdas.x')
         exec_cmd.add_default_arg(exec_name)
-        exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml)
+        exec_cmd.add_default_arg('fv3jedi')
+        exec_cmd.add_default_arg('variational')
+        exec_cmd.add_default_arg(self.task_config.jedi_yaml)
 
         try:
             logger.debug(f"Executing {exec_cmd}")
@@ -158,8 +157,8 @@ def finalize(self: Analysis) -> None:
                 archive.add(diaggzip, arcname=os.path.basename(diaggzip))
 
         # copy full YAML from executable to ROTDIR
-        src = os.path.join(self.task_config['DATA'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
-        dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
+        src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
+        dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
         yaml_copy = {
             'mkdir': [self.task_config.COM_CHEM_ANALYSIS],
             'copy': [[src, dest]]
@@ -212,7 +211,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None:
         inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template)
         bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template)
         # get list of increment vars
-        incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aeroanl_inc_vars.yaml')
+        incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml')
         incvars = YAMLFile(path=incvars_list_path)['incvars']
         super().add_fv3_increments(inc_template, bkg_template, incvars)
 
diff --git a/ush/python/pygfs/task/aero_emissions.py b/ush/python/pygfs/task/aero_emissions.py
new file mode 100644
index 0000000000..5f2d4c6840
--- /dev/null
+++ b/ush/python/pygfs/task/aero_emissions.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+
+import os
+from logging import getLogger
+from typing import Dict, Any, Union
+from pprint import pformat
+
+from wxflow import (AttrDict,
+                    parse_j2yaml,
+                    FileHandler,
+                    Jinja,
+                    logit,
+                    Task,
+                    add_to_datetime, to_timedelta,
+                    WorkflowException,
+                    Executable, which)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class AerosolEmissions(Task):
+    """Aerosol Emissions pre-processing Task
+    """
+
+    @logit(logger, name="AerosolEmissions")
+    def __init__(self, config: Dict[str, Any]) -> None:
+        """Constructor for the Aerosol Emissions task
+
+        Parameters
+        ----------
+        config : Dict[str, Any]
+            Incoming configuration for the task from the environment
+
+        Returns
+        -------
+        None
+        """
+        super().__init__(config)
+
+        local_variable = "something"
+
+        localdict = AttrDict(
+            {'variable_used_repeatedly': local_variable}
+        )
+
+        # Extend task_config with localdict
+        self.task_config = AttrDict(**self.task_config, **localdict)
+
+    @staticmethod
+    @logit(logger)
+    def initialize() -> None:
+        """Initialize the work directory
+        """
+
+    @staticmethod
+    @logit(logger)
+    def configure() -> None:
+        """Configure the artifacts in the work directory.
+        Copy run specific data to run directory
+        """
+
+    @staticmethod
+    @logit(logger)
+    def execute(workdir: Union[str, os.PathLike], aprun_cmd: str) -> None:
+        """Run the executable (if any)
+
+        Parameters
+        ----------
+        workdir : str | os.PathLike
+            work directory with the staged data, parm files, namelists, etc.
+        aprun_cmd : str
+            launcher command for executable.x
+
+        Returns
+        -------
+        None
+        """
+
+    @staticmethod
+    @logit(logger)
+    def finalize() -> None:
+        """Perform closing actions of the task.
+        Copy data back from the DATA/ directory to COM/
+        """
diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py
new file mode 100644
index 0000000000..d8396fe3ca
--- /dev/null
+++ b/ush/python/pygfs/task/aero_prepobs.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python3
+
+import os
+import glob
+import gzip
+import tarfile
+import re
+from logging import getLogger
+from typing import List, Dict, Any, Union
+
+from wxflow import (AttrDict, FileHandler, rm_p, rmdir,
+                    Task, add_to_datetime, to_timedelta, to_datetime,
+                    datetime_to_YMD,
+                    chdir, Executable, WorkflowException,
+                    parse_j2yaml, save_as_yaml, logit)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class AerosolObsPrep(Task):
+    """
+    Class for preparing and managing aerosol observations
+    """
+    def __init__(self, config: Dict[str, Any]) -> None:
+        super().__init__(config)
+
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+        _window_end = add_to_datetime(self.task_config.current_cycle, +to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+
+        local_dict = AttrDict(
+            {
+                'window_begin': _window_begin,
+                'window_end': _window_end,
+                'sensors': str(self.task_config['SENSORS']).split(','),
+                'data_dir': self.task_config['VIIRS_DATA_DIR'],
+                'input_files': '',
+                'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z."
+            }
+        )
+
+        # task_config is everything that this task should need
+        self.task_config = AttrDict(**self.task_config, **local_dict)
+
+    @logit(logger)
+    def initialize(self) -> None:
+        """
+        List needed raw obs files.
+        Copy the raw obs files to $DATA/obs.
+        Link over the needed executable.
+        Generate corresponding YAML file.
+        Run IODA converter.
+        """
+        self.task_config.DATA_OBS = os.path.join(self.task_config.DATA, 'obs')
+        if os.path.exists(self.task_config.DATA_OBS):
+            rmdir(self.task_config.DATA_OBS)
+        FileHandler({'mkdir': [self.task_config.DATA_OBS]}).sync()
+
+        self.task_config.prepaero_yaml = []
+        for sensor in self.task_config.sensors:
+            raw_files = self.list_raw_files(sensor)
+            self.task_config.input_files = self.copy_obs(raw_files)
+            self.link_obsconvexe()
+            self.task_config.prepaero_config = self.get_obsproc_config(sensor)
+
+            # generate converter YAML file
+            template = f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml"
+            _prepaero_yaml = os.path.join(self.task_config.DATA, template)
+            self.task_config.prepaero_yaml.append(_prepaero_yaml)
+            logger.debug(f"Generate PrepAeroObs YAML file: {_prepaero_yaml}")
+            save_as_yaml(self.task_config.prepaero_config, _prepaero_yaml)
+            logger.info(f"Wrote PrepAeroObs YAML to: {_prepaero_yaml}")
+
+    @logit(logger)
+    def list_raw_files(self, sensor) -> List[str]:
+        """
+        List all files in the predefined directory that match the predefined sensor and within the time window.
+        """
+        if sensor == 'n20':
+            sensor = 'j01'
+        dir1 = os.path.join(self.task_config.data_dir, datetime_to_YMD(self.task_config.window_begin))
+        dir2 = os.path.join(self.task_config.data_dir, datetime_to_YMD(self.task_config.window_end))
+
+        if dir1 == dir2:
+            files = os.listdir(dir1)
+            allfiles = [os.path.join(dir1, file) for file in files]
+            allfiles.sort()
+        else:
+            files_1 = os.listdir(dir1)
+            allfiles_1 = [os.path.join(dir1, file) for file in files_1]
+            files_2 = os.listdir(dir2)
+            allfiles_2 = [os.path.join(dir2, file) for file in files_2]
+            allfiles = sorted(allfiles_1, allfiles_2)
+        matching_files = []
+        try:
+            for file in allfiles:
+                basename = os.path.basename(file)
+                pattern = r"s(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{3})"
+                match = re.match(pattern, basename.split('_')[3])
+                yyyy, mm, dd, HH, MM = match.group(1), match.group(2), match.group(3), match.group(4), match.group(5)
+                fstart = to_datetime(f'{yyyy}-{mm}-{dd}T{HH}:{MM}Z')
+                if sensor == basename.split('_')[2]:
+                    # temporally select obs files based on time stamp in the filename.
+                    if (fstart > self.task_config.window_begin) and (fstart < self.task_config.window_end):
+                        matching_files.append(os.path.join(self.task_config.data_dir, file))
+            logger.info("Found %d matching files.", len(matching_files))
+        except FileNotFoundError:
+            logger.error("The specified file/directory does not exist.")
+            raise
+        return matching_files
+
+    @logit(logger)
+    def copy_obs(self, inputfiles) -> Dict[str, Any]:
+        """
+        Copy the raw obs files to $DATA/obs.
+        """
+        copylist = []
+        destlist = []
+        for filename in inputfiles:
+            basename = os.path.basename(filename)
+            dest = os.path.join(self.task_config.DATA_OBS, basename)
+            copylist.append([filename, dest])
+            destlist.append(dest)
+        FileHandler({'copy': copylist}).sync()
+
+        return destlist
+
+    @logit(logger)
+    def get_obsproc_config(self, sensor) -> Dict[str, Any]:
+        """
+        Compile a dictionary of obs proc configuration from OBSPROCYAML template file
+        Parameters
+        ----------
+        Returns
+        ----------
+        obsproc_config : Dict
+            a dictionary containing the fully rendered obs proc yaml configuration
+        """
+        self.task_config.sensor = sensor
+        # generate JEDI YAML file
+        logger.info(f"Generate gdas_obsprovider2ioda YAML config: {self.task_config.OBSPROCYAML}")
+        prepaero_config = parse_j2yaml(self.task_config.OBSPROCYAML, self.task_config)
+
+        return prepaero_config
+
+    @logit(logger)
+    def link_obsconvexe(self) -> None:
+        """
+        This method links the gdas executable to the run directory
+        Parameters
+        ----------
+        Task: GDAS task
+        Returns
+        ----------
+        None
+        """
+        exe_src = self.task_config.OBSPROCEXE
+
+        logger.info(f"Link executable {exe_src} to DATA/")
+        exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src))
+        if os.path.exists(exe_dest):
+            rm_p(exe_dest)
+        os.symlink(exe_src, exe_dest)
+
+        return
+
+    @logit(logger)
+    def runConverter(self) -> None:
+        """
+        Run the IODA converter gdas_obsprovider2ioda.x
+        """
+        chdir(self.task_config.DATA)
+        exec_cmd = Executable(self.task_config.APRUN_PREPOBSAERO)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_obsprovider2ioda.x')
+        exec_cmd.add_default_arg(exec_name)
+
+        for prepaero_yaml in self.task_config.prepaero_yaml:
+            try:
+                logger.debug(f"Executing {exec_cmd} on {prepaero_yaml}")
+                exec_cmd(f"{prepaero_yaml}")
+            except OSError:
+                raise OSError(f"Failed to execute {exec_cmd} on {prepaero_yaml}")
+            except Exception:
+                raise WorkflowException(f"An error occured during execution of {exec_cmd} on {prepaero_yaml}")
+
+        pass
+
+    @logit(logger)
+    def finalize(self) -> None:
+        """
+        Copy the output viirs files to COMIN_OBS.
+        Tar and archive the output files.
+        Tar and archive the raw obs files.
+        """
+        # get list of viirs files
+        obsfiles = glob.glob(os.path.join(self.task_config['DATA'], '*viirs*nc4'))
+        copylist = []
+        for obsfile in obsfiles:
+            basename = os.path.basename(obsfile)
+            src = os.path.join(self.task_config['DATA'], basename)
+            dest = os.path.join(self.task_config.COMOUT_OBS, basename)
+            copylist.append([src, dest])
+        FileHandler({'copy': copylist}).sync()
+
+        # gzip the files first
+        for obsfile in obsfiles:
+            with open(obsfile, 'rb') as f_in, gzip.open(f"{obsfile}.gz", 'wb') as f_out:
+                f_out.writelines(f_in)
+
+        aeroobs = os.path.join(self.task_config.COMOUT_OBS, f"{self.task_config['APREFIX']}aeroobs")
+        # open tar file for writing
+        with tarfile.open(aeroobs, "w") as archive:
+            for obsfile in obsfiles:
+                aeroobsgzip = f"{obsfile}.gz"
+                archive.add(aeroobsgzip, arcname=os.path.basename(aeroobsgzip))
+        # get list of raw viirs L2 files
+        rawfiles = glob.glob(os.path.join(self.task_config.DATA_OBS, 'JRR-AOD*'))
+        # gzip the raw L2 files first
+        for rawfile in rawfiles:
+            with open(rawfile, 'rb') as f_in, gzip.open(f"{rawfile}.gz", 'wb') as f_out:
+                f_out.writelines(f_in)
+
+        aerorawobs = os.path.join(self.task_config.COMOUT_OBS, f"{self.task_config['APREFIX']}aerorawobs")
+        # open tar file for writing
+        with tarfile.open(aerorawobs, "w") as archive:
+            for rawfile in rawfiles:
+                aerorawobsgzip = f"{rawfile}.gz"
+                archive.add(aerorawobsgzip, arcname=os.path.basename(aerorawobsgzip))
+        copylist = []
+        for prepaero_yaml in self.task_config.prepaero_yaml:
+            basename = os.path.basename(prepaero_yaml)
+            dest = os.path.join(self.task_config.COMOUT_OBS, basename)
+            copylist.append([prepaero_yaml, dest])
+        FileHandler({'copy': copylist}).sync()
+
+        pass
diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py
index cfd1fb2206..e407cf1765 100644
--- a/ush/python/pygfs/task/analysis.py
+++ b/ush/python/pygfs/task/analysis.py
@@ -4,9 +4,11 @@
 import glob
 import tarfile
 from logging import getLogger
+from pprint import pformat
 from netCDF4 import Dataset
-from typing import List, Dict, Any, Union
+from typing import List, Dict, Any, Union, Optional
 
+from jcb import render
 from wxflow import (parse_j2yaml, FileHandler, rm_p, logit,
                     Task, Executable, WorkflowException, to_fv3time, to_YMD,
                     Template, TemplateConstants)
@@ -24,10 +26,15 @@ class Analysis(Task):
 
     def __init__(self, config: Dict[str, Any]) -> None:
         super().__init__(config)
-        self.config.ntiles = 6
+        # Store location of GDASApp jinja2 templates
+        self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas')
 
     def initialize(self) -> None:
         super().initialize()
+
+        # all JEDI analyses need a JEDI config
+        self.task_config.jedi_config = self.get_jedi_config()
+
         # all analyses need to stage observations
         obs_dict = self.get_obs_dict()
         FileHandler(obs_dict).sync()
@@ -39,13 +46,60 @@ def initialize(self) -> None:
         # link jedi executable to run directory
         self.link_jediexe()
 
+    @logit(logger)
+    def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]:
+        """Compile a dictionary of JEDI configuration from JEDIYAML template file
+
+        Parameters
+        ----------
+        algorithm (optional) : str
+            Name of the algorithm to use in the JEDI configuration. Will override the algorithm
+            set in the self.task_config.JCB_<>_YAML file
+
+        Returns
+        ----------
+        jedi_config : Dict
+            a dictionary containing the fully rendered JEDI yaml configuration
+        """
+
+        # generate JEDI YAML file
+        logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}")
+
+        if 'JCB_BASE_YAML' in self.task_config.keys():
+            # Step 1: fill templates of the jcb base YAML file
+            jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config)
+
+            # Step 2: (optional) fill templates of algorithm override YAML and merge
+            if 'JCB_ALGO_YAML' in self.task_config.keys():
+                jcb_algo_config = parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config)
+                jcb_config = {**jcb_config, **jcb_algo_config}
+
+            # If algorithm is present override the algorithm in the JEDI config
+            if algorithm:
+                jcb_config['algorithm'] = algorithm
+
+            # Step 3: generate the JEDI Yaml using JCB driving YAML
+            jedi_config = render(jcb_config)
+        elif 'JEDIYAML' in self.task_config.keys():
+            # Generate JEDI YAML file (without using JCB)
+            logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}")
+            jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config,
+                                       searchpath=self.gdasapp_j2tmpl_dir)
+            logger.debug(f"JEDI config:\n{pformat(jedi_config)}")
+        else:
+            raise KeyError(f"Task config must contain JCB_BASE_YAML or JEDIYAML")
+
+        logger.debug(f"JEDI config:\n{pformat(jedi_config)}")
+
+        return jedi_config
+
     @logit(logger)
     def get_obs_dict(self) -> Dict[str, Any]:
         """Compile a dictionary of observation files to copy
 
-        This method uses the OBS_LIST configuration variable to generate a dictionary
-        from a list of YAML files that specify what observation files are to be
-        copied to the run directory from the observation input directory
+        This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of
+        observation files that are to be copied to the run directory
+        from the observation input directory
 
         Parameters
         ----------
@@ -55,18 +109,18 @@ def get_obs_dict(self) -> Dict[str, Any]:
         obs_dict: Dict
             a dictionary containing the list of observation files to copy for FileHandler
         """
-        logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}")
-        obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config)
-        logger.debug(f"obs_list_config: {obs_list_config}")
-        # get observers from master dictionary
-        observers = obs_list_config['observers']
+
+        logger.info(f"Extracting a list of observation files from Jedi config file")
+        observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations')
+        logger.debug(f"observations:\n{pformat(observations)}")
+
         copylist = []
-        for ob in observers:
+        for ob in observations['observers']:
             obfile = ob['obs space']['obsdatain']['engine']['obsfile']
             basename = os.path.basename(obfile)
             copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile])
         obs_dict = {
-            'mkdir': [os.path.join(self.runtime_config['DATA'], 'obs')],
+            'mkdir': [os.path.join(self.task_config['DATA'], 'obs')],
             'copy': copylist
         }
         return obs_dict
@@ -75,9 +129,11 @@ def get_obs_dict(self) -> Dict[str, Any]:
     def get_bias_dict(self) -> Dict[str, Any]:
         """Compile a dictionary of observation files to copy
 
-        This method uses the OBS_LIST configuration variable to generate a dictionary
-        from a list of YAML files that specify what observation bias correction files
-        are to be copied to the run directory from the observation input directory
+        This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of
+        observation bias correction files that are to be copied to the run directory
+        from the component directory.
+        TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in
+        `analysis.py` and should be implemented in the component where this is applicable.
 
         Parameters
         ----------
@@ -87,24 +143,25 @@ def get_bias_dict(self) -> Dict[str, Any]:
         bias_dict: Dict
             a dictionary containing the list of observation bias files to copy for FileHandler
         """
-        logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}")
-        obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config)
-        logger.debug(f"obs_list_config: {obs_list_config}")
-        # get observers from master dictionary
-        observers = obs_list_config['observers']
+
+        logger.info(f"Extracting a list of bias correction files from Jedi config file")
+        observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations')
+        logger.debug(f"observations:\n{pformat(observations)}")
+
         copylist = []
-        for ob in observers:
+        for ob in observations['observers']:
             if 'obs bias' in ob.keys():
                 obfile = ob['obs bias']['input file']
                 obdir = os.path.dirname(obfile)
                 basename = os.path.basename(obfile)
                 prefix = '.'.join(basename.split('.')[:-2])
-                for file in ['satbias.nc4', 'satbias_cov.nc4', 'tlapse.txt']:
+                for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']:
                     bfile = f"{prefix}.{file}"
                     copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)])
+                    # TODO: Why is this specific to ATMOS?
 
         bias_dict = {
-            'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')],
+            'mkdir': [os.path.join(self.task_config.DATA, 'bc')],
             'copy': copylist
         }
         return bias_dict
@@ -123,7 +180,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li
            List of increment variables to add to the background
         """
 
-        for itile in range(1, self.config.ntiles + 1):
+        for itile in range(1, self.task_config.ntiles + 1):
             inc_path = inc_file_tmpl.format(tilenum=itile)
             bkg_path = bkg_file_tmpl.format(tilenum=itile)
             with Dataset(inc_path, mode='r') as incfile, Dataset(bkg_path, mode='a') as rstfile:
@@ -137,44 +194,6 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li
                     except (AttributeError, RuntimeError):
                         pass  # checksum is missing, move on
 
-    @logit(logger)
-    def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of model background files to copy
-
-        This method is a placeholder for now... will be possibly made generic at a later date
-
-        Parameters
-        ----------
-        task_config: Dict
-            a dictionary containing all of the configuration needed for the task
-
-        Returns
-        ----------
-        bkg_dict: Dict
-            a dictionary containing the list of model background files to copy for FileHandler
-        """
-        bkg_dict = {'foo': 'bar'}
-        return bkg_dict
-
-    @logit(logger)
-    def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of background error files to copy
-
-        This method is a placeholder for now... will be possibly made generic at a later date
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-
-        Returns
-        ----------
-        berror_dict: Dict
-            a dictionary containing the list of background error files to copy for FileHandler
-        """
-        berror_dict = {'foo': 'bar'}
-        return berror_dict
-
     @logit(logger)
     def link_jediexe(self) -> None:
         """Compile a dictionary of background error files to copy
@@ -199,109 +218,7 @@ def link_jediexe(self) -> None:
             rm_p(exe_dest)
         os.symlink(exe_src, exe_dest)
 
-        return
-
-    @staticmethod
-    @logit(logger)
-    def get_fv3ens_dict(config: Dict[str, Any]) -> Dict[str, Any]:
-        """Compile a dictionary of ensemble member restarts to copy
-
-        This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer)
-        that are needed for global atmens DA and returns said dictionary for use by the FileHandler class.
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-
-        Returns
-        ----------
-        ens_dict: Dict
-            a dictionary containing the list of ensemble member restart files to copy for FileHandler
-        """
-        # NOTE for now this is FV3 restart files and just assumed to be fh006
-
-        # define template
-        template_res = config.COM_ATMOS_RESTART_TMPL
-        prev_cycle = config.previous_cycle
-        tmpl_res_dict = {
-            'ROTDIR': config.ROTDIR,
-            'RUN': config.RUN,
-            'YMD': to_YMD(prev_cycle),
-            'HH': prev_cycle.strftime('%H'),
-            'MEMDIR': None
-        }
-
-        # construct ensemble member file list
-        dirlist = []
-        enslist = []
-        for imem in range(1, config.NMEM_ENS + 1):
-            memchar = f"mem{imem:03d}"
-
-            # create directory path for ensemble member restart
-            dirlist.append(os.path.join(config.DATA, config.dirname, f'mem{imem:03d}'))
-
-            # get FV3 restart files, this will be a lot simpler when using history files
-            tmpl_res_dict['MEMDIR'] = memchar
-            rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get)
-            run_dir = os.path.join(config.DATA, config.dirname, memchar)
-
-            # atmens DA needs coupler
-            basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
-            enslist.append([os.path.join(rst_dir, basename), os.path.join(config.DATA, config.dirname, memchar, basename)])
-
-            # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data
-            for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']:
-                template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
-                for itile in range(1, config.ntiles + 1):
-                    basename = template.format(tilenum=itile)
-                    enslist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
-        ens_dict = {
-            'mkdir': dirlist,
-            'copy': enslist,
-        }
-        return ens_dict
-
-    @staticmethod
-    @logit(logger)
-    def execute_jediexe(workdir: Union[str, os.PathLike], aprun_cmd: str, jedi_exec: str, jedi_yaml: str) -> None:
-        """
-        Run a JEDI executable
-
-        Parameters
-        ----------
-        workdir : str | os.PathLike
-            Working directory where to run containing the necessary files and executable
-        aprun_cmd : str
-            Launcher command e.g. mpirun -np <ntasks> or srun, etc.
-        jedi_exec : str
-            Name of the JEDI executable e.g. fv3jedi_var.x
-        jedi_yaml : str | os.PathLike
-            Name of the yaml file to feed the JEDI executable e.g. fv3jedi_var.yaml
-
-        Raises
-        ------
-        OSError
-            Failure due to OS issues
-        WorkflowException
-            All other exceptions
-        """
-
-        os.chdir(workdir)
-
-        exec_cmd = Executable(aprun_cmd)
-        exec_cmd.add_default_arg([os.path.join(workdir, jedi_exec), jedi_yaml])
-
-        logger.info(f"Executing {exec_cmd}")
-        try:
-            exec_cmd()
-        except OSError:
-            logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
-            raise OSError(f"{exec_cmd}")
-        except Exception:
-            logger.exception(f"FATAL ERROR: Error occured during execution of {exec_cmd}")
-            raise WorkflowException(f"{exec_cmd}")
+        return exe_dest
 
     @staticmethod
     @logit(logger)
@@ -311,13 +228,14 @@ def tgz_diags(statfile: str, diagdir: str) -> None:
         Parameters
         ----------
         statfile : str | os.PathLike
-            Path to the output .tar.gz .tgz file that will contain the diag*.nc4 files e.g. atmstat.tgz
+            Path to the output .tar.gz .tgz file that will contain the diag*.nc files e.g. atmstat.tgz
         diagdir : str | os.PathLike
             Directory containing JEDI diag files
         """
 
         # get list of diag files to put in tarball
-        diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc4'))
+        diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc'))
+        diags.extend(glob.glob(os.path.join(diagdir, 'diags', 'diag*nc4')))
 
         logger.info(f"Compressing {len(diags)} diag files to {statfile}")
 
@@ -326,3 +244,74 @@ def tgz_diags(statfile: str, diagdir: str) -> None:
             # Add diag files to tarball
             for diagfile in diags:
                 tgz.add(diagfile, arcname=os.path.basename(diagfile))
+
+
+@logit(logger)
+def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any:
+    """
+    Recursively search through a nested dictionary and return the value for the target key.
+    This returns the first target key it finds.  So if a key exists in a subsequent
+    nested dictionary, it will not be found.
+
+    Parameters
+    ----------
+    nested_dict : Dict
+        Dictionary to search
+    target_key : str
+        Key to search for
+
+    Returns
+    -------
+    Any
+        Value of the target key
+
+    Raises
+    ------
+    KeyError
+        If key is not found in dictionary
+
+    TODO: if this gives issues due to landing on an incorrect key in the nested
+    dictionary, we will have to implement a more concrete method to search for a key
+    given a more complete address.  See resolved conversations in PR 2387
+
+    # Example usage:
+    nested_dict = {
+        'a': {
+            'b': {
+                'c': 1,
+                'd': {
+                    'e': 2,
+                    'f': 3
+                }
+            },
+            'g': 4
+        },
+        'h': {
+            'i': 5
+        },
+        'j': {
+            'k': 6
+        }
+    }
+
+    user_key = input("Enter the key to search for: ")
+    result = find_value_in_nested_dict(nested_dict, user_key)
+    """
+
+    if not isinstance(nested_dict, dict):
+        raise TypeError(f"Input is not of type(dict)")
+
+    result = nested_dict.get(target_key)
+    if result is not None:
+        return result
+
+    for value in nested_dict.values():
+        if isinstance(value, dict):
+            try:
+                result = find_value_in_nested_dict(value, target_key)
+                if result is not None:
+                    return result
+            except KeyError:
+                pass
+
+    raise KeyError(f"Key '{target_key}' not found in the nested dictionary")
diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py
new file mode 100644
index 0000000000..953a856192
--- /dev/null
+++ b/ush/python/pygfs/task/archive.py
@@ -0,0 +1,427 @@
+#!/usr/bin/env python3
+
+import glob
+import os
+import shutil
+import tarfile
+from logging import getLogger
+from typing import Any, Dict, List
+
+from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task,
+                    chgrp, get_gid, logit, mkdir_p, parse_j2yaml, rm_p, strftime,
+                    to_YMDH)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class Archive(Task):
+    """Task to archive ROTDIR data to HPSS (or locally)
+    """
+
+    @logit(logger, name="Archive")
+    def __init__(self, config: Dict[str, Any]) -> None:
+        """Constructor for the Archive task
+        The constructor is responsible for collecting necessary yamls based on
+        the runtime options and RUN.
+
+        Parameters
+        ----------
+        config : Dict[str, Any]
+            Incoming configuration for the task from the environment
+
+        Returns
+        -------
+        None
+        """
+        super().__init__(config)
+
+        rotdir = self.task_config.ROTDIR + os.sep
+
+        # Find all absolute paths in the environment and get their relative paths from ${ROTDIR}
+        path_dict = self._gen_relative_paths(rotdir)
+
+        # Extend task_config with path_dict
+        self.task_config = AttrDict(**self.task_config, **path_dict)
+
+    @logit(logger)
+    def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str, Any]]):
+        """Determine which tarballs will need to be created.
+
+        Parameters
+        ----------
+        arch_dict : Dict[str, Any]
+            Task specific keys, e.g. runtime options (DO_AERO, DO_ICE, etc)
+
+        Return
+        ------
+        arcdir_set : Dict[str, Any]
+            Set of FileHandler instructions to copy files to the ARCDIR
+        atardir_sets : List[Dict[str, Any]]
+            List of tarballs and instructions for creating them via tar or htar
+        """
+
+        if not os.path.isdir(arch_dict.ROTDIR):
+            raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!")
+
+        if arch_dict.RUN == "gefs":
+            raise NotImplementedError("FATAL ERROR: Archiving is not yet set up for GEFS runs")
+
+        if arch_dict.RUN in ["gdas", "gfs"]:
+
+            # Copy the cyclone track files and rename the experiments
+            # TODO This really doesn't belong in archiving and should be moved elsewhere
+            Archive._rename_cyclone_expt(arch_dict)
+
+        archive_parm = os.path.join(arch_dict.PARMgfs, "archive")
+
+        # Collect the dataset to archive locally
+        arcdir_j2yaml = os.path.join(archive_parm, "arcdir.yaml.j2")
+
+        # Add the glob.glob function for capturing log filenames
+        # TODO remove this kludge once log filenames are explicit
+        arch_dict['glob'] = glob.glob
+
+        # Add the os.path.exists function to the dict for yaml parsing
+        arch_dict['path_exists'] = os.path.exists
+
+        # Parse the input jinja yaml template
+        arcdir_set = Archive._construct_arcdir_set(arcdir_j2yaml,
+                                                   arch_dict)
+
+        if not os.path.isdir(arch_dict.ROTDIR):
+            raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!")
+
+        if arch_dict.RUN in ["gdas", "gfs"]:
+
+            # Copy the cyclone track files and rename the experiments
+            Archive._rename_cyclone_expt(arch_dict)
+
+        # Collect datasets that need to be archived
+        # Each dataset represents one tarball
+
+        if arch_dict.HPSSARCH:
+            self.tar_cmd = "htar"
+            self.hsi = Hsi()
+            self.htar = Htar()
+            self.cvf = self.htar.cvf
+            self.rm_cmd = self.hsi.rm
+            self.chgrp_cmd = self.hsi.chgrp
+            self.chmod_cmd = self.hsi.chmod
+        elif arch_dict.LOCALARCH:
+            self.tar_cmd = "tar"
+            self.cvf = Archive._create_tarball
+            self.chgrp_cmd = chgrp
+            self.chmod_cmd = os.chmod
+            self.rm_cmd = rm_p
+        else:  # Only perform local archiving.  Do not create tarballs.
+            self.tar_cmd = ""
+            return arcdir_set, []
+
+        master_yaml = "master_" + arch_dict.RUN + ".yaml.j2"
+
+        parsed_sets = parse_j2yaml(os.path.join(archive_parm, master_yaml),
+                                   arch_dict,
+                                   allow_missing=False)
+
+        atardir_sets = []
+
+        for dataset in parsed_sets.datasets.values():
+
+            dataset["fileset"] = Archive._create_fileset(dataset)
+            dataset["has_rstprod"] = Archive._has_rstprod(dataset.fileset)
+
+            atardir_sets.append(dataset)
+
+        return arcdir_set, atardir_sets
+
+    @logit(logger)
+    def execute_store_products(self, arcdir_set: Dict[str, Any]) -> None:
+        """Perform local archiving of data products to ARCDIR.
+
+        Parameters
+        ----------
+        arcdir_set : Dict[str, Any]
+            FileHandler instructions to populate ARCDIR with
+
+        Return
+        ------
+        None
+        """
+
+        # Copy files to the local ARCDIR
+        FileHandler(arcdir_set).sync()
+
+    @logit(logger)
+    def execute_backup_dataset(self, atardir_set: Dict[str, Any]) -> None:
+        """Create a backup tarball from a yaml dict.
+
+        Parameters
+        ----------
+        atardir_set: Dict[str, Any]
+            Dict defining set of files to backup and the target tarball.
+
+        Return
+        ------
+        None
+        """
+
+        # Generate tarball
+        if len(atardir_set.fileset) == 0:
+            logger.warning(f"WARNING: skipping would-be empty archive {atardir_set.target}.")
+            return
+
+        if atardir_set.has_rstprod:
+
+            try:
+                self.cvf(atardir_set.target, atardir_set.fileset)
+            # Regardless of exception type, attempt to remove the target
+            except Exception:
+                self.rm_cmd(atardir_set.target)
+                raise RuntimeError(f"FATAL ERROR: Failed to create restricted archive {atardir_set.target}, deleting!")
+
+            self._protect_rstprod(atardir_set)
+
+        else:
+            self.cvf(atardir_set.target, atardir_set.fileset)
+
+    @staticmethod
+    @logit(logger)
+    def _create_fileset(atardir_set: Dict[str, Any]) -> List:
+        """
+        Collect the list of all available files from the parsed yaml dict.
+        Globs are expanded and if required files are missing, an error is
+        raised.
+
+        TODO: expand all globs in the jinja yaml files instead of expanding
+              them here and issue errors here if globbing patterns (*, ?, [])
+              are found.
+
+        Parameters
+        ----------
+        atardir_set: Dict
+            Contains full paths for required and optional files to be archived.
+        """
+
+        fileset = []
+        if "required" in atardir_set:
+            if atardir_set.required is not None:
+                for item in atardir_set.required:
+                    glob_set = glob.glob(item)
+                    if len(glob_set) == 0:
+                        raise FileNotFoundError(f"FATAL ERROR: Required file, directory, or glob {item} not found!")
+                    for entry in glob_set:
+                        fileset.append(entry)
+
+        if "optional" in atardir_set:
+            if atardir_set.optional is not None:
+                for item in atardir_set.optional:
+                    glob_set = glob.glob(item)
+                    if len(glob_set) == 0:
+                        logger.warning(f"WARNING: optional file/glob {item} not found!")
+                    else:
+                        for entry in glob_set:
+                            fileset.append(entry)
+
+        return fileset
+
+    @staticmethod
+    @logit(logger)
+    def _has_rstprod(fileset: List) -> bool:
+        """
+        Checks if any files in the input fileset belongs to rstprod.
+
+        Parameters
+        ----------
+        fileset : List
+            List of filenames to check.
+        """
+
+        try:
+            rstprod_gid = get_gid("rstprod")
+        except KeyError:
+            # rstprod does not exist on this machine
+            return False
+
+        # Expand globs and check each file for group ownership
+        for file_or_glob in fileset:
+            glob_set = glob.glob(file_or_glob)
+            for filename in glob_set:
+                if os.stat(filename).st_gid == rstprod_gid:
+                    return True
+
+        return False
+
+    @logit(logger)
+    def _protect_rstprod(self, atardir_set: Dict[str, any]) -> None:
+        """
+        Changes the group of the target tarball to rstprod and the permissions to
+        640.  If this fails for any reason, attempt to delete the file before exiting.
+
+        """
+
+        try:
+            self.chgrp_cmd("rstprod", atardir_set.target)
+            if self.tar_cmd == "htar":
+                self.chmod_cmd("640", atardir_set.target)
+            else:
+                self.chmod_cmd(atardir_set.target, 0o640)
+        # Regardless of exception type, attempt to remove the target
+        except Exception:
+            try:
+                self.rm_cmd(atardir_set.target)
+            finally:
+                raise RuntimeError(f"FATAL ERROR: Failed to protect {atardir_set.target}!\n"
+                                   f"Please verify that it has been deleted!!")
+
+    @staticmethod
+    @logit(logger)
+    def _create_tarball(target: str, fileset: List) -> None:
+        """Method to create a local tarball.
+
+        Parameters
+        ----------
+        target : str
+            Tarball to create
+
+        file_list : List
+            List of files to add to an archive
+        """
+
+        # TODO create a set of tar helper functions in wxflow
+        # Attempt to create the parent directory if it does not exist
+        mkdir_p(os.path.dirname(os.path.realpath(target)))
+
+        # Create the archive
+        with tarfile.open(target, "w") as tarball:
+            for filename in fileset:
+                tarball.add(filename)
+
+    @logit(logger)
+    def _gen_relative_paths(self, root_path: str) -> Dict:
+        """Generate a dict of paths in self.task_config relative to root_path
+
+        Parameters
+        ----------
+        root_path : str
+            Path to base all relative paths off of
+
+        Return
+        ------
+        rel_path_dict : Dict
+            Dictionary of paths relative to root_path.  Members will be named
+            based on the dict names in self.config.  For COM paths, the names will
+            follow COMIN_<NAME> --> <name>_dir.  For all other directories, the
+            names will follow <NAME> --> <name>_dir.
+        """
+
+        rel_path_dict = {}
+        for key, value in self.task_config.items():
+            if isinstance(value, str):
+                if root_path in value:
+                    rel_path = value.replace(root_path, "")
+                    rel_key = (key[4:] if key.startswith("COMIN_") else key).lower() + "_dir"
+                    rel_path_dict[rel_key] = rel_path
+
+        return rel_path_dict
+
+    @staticmethod
+    @logit(logger)
+    def _construct_arcdir_set(arcdir_j2yaml, arch_dict) -> Dict:
+        """Construct the list of files to send to the ARCDIR and Fit2Obs
+           directories from a template.
+
+           TODO Copying Fit2Obs data doesn't belong in archiving should be
+                moved elsewhere.
+
+        Parameters
+        ----------
+        arcdir_j2yaml: str
+            The filename of the ARCDIR jinja template to parse.
+
+        arch_dict: Dict
+            The context dictionary to parse arcdir_j2yaml with.
+
+        Return
+        ------
+        arcdir_set : Dict
+            FileHandler dictionary (i.e. with top level "mkdir" and "copy" keys)
+            containing all directories that need to be created and what data
+            files need to be copied to the ARCDIR and the Fit2Obs directory.
+        """
+
+        # Get the FileHandler dictionary for creating directories and copying
+        # to the ARCDIR and VFYARC directories.
+        arcdir_set = parse_j2yaml(arcdir_j2yaml,
+                                  arch_dict,
+                                  allow_missing=True)
+
+        return arcdir_set
+
+    @staticmethod
+    @logit(logger)
+    def _rename_cyclone_expt(arch_dict) -> None:
+
+        # Rename the experiment in the tracker files from "AVNO" to the
+        # first 4 letters of PSLOT.
+        pslot4 = arch_dict.PSLOT.upper()
+        if len(arch_dict.PSLOT) > 4:
+            pslot4 = arch_dict.PSLOT[0:4].upper()
+
+        track_dir_in = arch_dict.COMIN_ATMOS_TRACK
+        track_dir_out = arch_dict.COMOUT_ATMOS_TRACK
+        run = arch_dict.RUN
+        cycle_HH = strftime(arch_dict.current_cycle, "%H")
+
+        if run == "gfs":
+            in_track_file = (track_dir_in + "/avno.t" +
+                             cycle_HH + "z.cycle.trackatcfunix")
+            in_track_p_file = (track_dir_in + "/avnop.t" +
+                               cycle_HH + "z.cycle.trackatcfunixp")
+        elif run == "gdas":
+            in_track_file = (track_dir_in + "/gdas.t" +
+                             cycle_HH + "z.cycle.trackatcfunix")
+            in_track_p_file = (track_dir_in + "/gdasp.t" +
+                               cycle_HH + "z.cycle.trackatcfunixp")
+
+        if not os.path.isfile(in_track_file):
+            # Do not attempt to archive the outputs
+            return
+
+        out_track_file = track_dir_out + "/atcfunix." + run + "." + to_YMDH(arch_dict.current_cycle)
+        out_track_p_file = track_dir_out + "/atcfunixp." + run + "." + to_YMDH(arch_dict.current_cycle)
+
+        def replace_string_from_to_file(filename_in, filename_out, search_str, replace_str):
+
+            """Write a new file from the contents of an input file while searching
+            and replacing ASCII strings.  To prevent partial file creation, a
+            temporary file is created and moved to the final location only
+            after the search/replace is finished.
+
+            Parameters
+            ----------
+            filename_in : str
+                Input filename
+
+            filename_out : str
+                Output filename
+
+            search_str : str
+                ASCII string to search for
+
+            replace_str : str
+                ASCII string to replace the search_str with
+            """
+            with open(filename_in) as old_file:
+                lines = old_file.readlines()
+
+            out_lines = [line.replace(search_str, replace_str) for line in lines]
+
+            with open("/tmp/track_file", "w") as new_file:
+                new_file.writelines(out_lines)
+
+            shutil.move("tmp/track_file", filename_out)
+
+        replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4)
+        replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4)
+
+        return
diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py
index da41574fc9..4e9d37335c 100644
--- a/ush/python/pygfs/task/atm_analysis.py
+++ b/ush/python/pygfs/task/atm_analysis.py
@@ -11,7 +11,7 @@
                     FileHandler,
                     add_to_datetime, to_fv3time, to_timedelta, to_YMDH,
                     chdir,
-                    parse_yamltmpl, parse_j2yaml, save_as_yaml,
+                    parse_j2yaml, save_as_yaml,
                     logit,
                     Executable,
                     WorkflowException)
@@ -28,32 +28,35 @@ class AtmAnalysis(Analysis):
     def __init__(self, config):
         super().__init__(config)
 
-        _res = int(self.config.CASE[1:])
-        _res_anl = int(self.config.CASE_ANL[1:])
-        _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2)
-        _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml")
+        _res = int(self.task_config.CASE[1:])
+        _res_anl = int(self.task_config.CASE_ANL[1:])
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+        _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
 
         # Create a local dictionary that is repeatedly used across this class
         local_dict = AttrDict(
             {
                 'npx_ges': _res + 1,
                 'npy_ges': _res + 1,
-                'npz_ges': self.config.LEVS - 1,
-                'npz': self.config.LEVS - 1,
+                'npz_ges': self.task_config.LEVS - 1,
+                'npz': self.task_config.LEVS - 1,
                 'npx_anl': _res_anl + 1,
                 'npy_anl': _res_anl + 1,
-                'npz_anl': self.config.LEVS - 1,
+                'npz_anl': self.task_config.LEVS - 1,
                 'ATM_WINDOW_BEGIN': _window_begin,
-                'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H",
-                'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
-                'fv3jedi_yaml': _fv3jedi_yaml,
+                'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H",
+                'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
+                'jedi_yaml': _jedi_yaml,
+                'atm_obsdatain_path': f"{self.task_config.DATA}/obs/",
+                'atm_obsdataout_path': f"{self.task_config.DATA}/diags/",
+                'BKG_TSTEP': "PT1H"  # Placeholder for 4D applications
             }
         )
 
-        # task_config is everything that this task should need
-        self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+        # Extend task_config with local_dict
+        self.task_config = AttrDict(**self.task_config, **local_dict)
 
     @logit(logger)
     def initialize(self: Analysis) -> None:
@@ -71,41 +74,38 @@ def initialize(self: Analysis) -> None:
         super().initialize()
 
         # stage CRTM fix files
-        crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml')
-        logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}")
-        crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config)
+        logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}")
+        crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config)
         FileHandler(crtm_fix_list).sync()
 
         # stage fix files
-        jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml')
-        logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}")
-        jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config)
+        logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}")
+        jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config)
         FileHandler(jedi_fix_list).sync()
 
         # stage static background error files, otherwise it will assume ID matrix
-        logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}")
-        FileHandler(self.get_berror_dict(self.task_config)).sync()
+        logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}")
+        if self.task_config.STATICB_TYPE != 'identity':
+            berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config)
+        else:
+            berror_staging_dict = {}
+        FileHandler(berror_staging_dict).sync()
 
         # stage ensemble files for use in hybrid background error
         if self.task_config.DOHYBVAR:
             logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}")
-            localconf = AttrDict()
-            keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN',
-                    'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles']
-            for key in keys:
-                localconf[key] = self.task_config[key]
-            localconf.RUN = 'enkf' + self.task_config.RUN
-            localconf.dirname = 'ens'
-            FileHandler(self.get_fv3ens_dict(localconf)).sync()
+            fv3ens_staging_dict = parse_j2yaml(self.task_config.FV3ENS_STAGING_YAML, self.task_config)
+            FileHandler(fv3ens_staging_dict).sync()
 
         # stage backgrounds
-        FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync()
+        logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}")
+        bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config)
+        FileHandler(bkg_staging_dict).sync()
 
         # generate variational YAML file
-        logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}")
-        varda_yaml = parse_j2yaml(self.task_config.ATMVARYAML, self.task_config)
-        save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml)
-        logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}")
+        logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}")
+        save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml)
+        logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}")
 
         # need output dir for diags and anl
         logger.debug("Create empty output [anl, diags] directories to receive output from executable")
@@ -116,14 +116,16 @@ def initialize(self: Analysis) -> None:
         FileHandler({'mkdir': newdirs}).sync()
 
     @logit(logger)
-    def execute(self: Analysis) -> None:
+    def variational(self: Analysis) -> None:
 
         chdir(self.task_config.DATA)
 
-        exec_cmd = Executable(self.task_config.APRUN_ATMANL)
-        exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x')
+        exec_cmd = Executable(self.task_config.APRUN_ATMANLVAR)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas.x')
         exec_cmd.add_default_arg(exec_name)
-        exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml)
+        exec_cmd.add_default_arg('fv3jedi')
+        exec_cmd.add_default_arg('variational')
+        exec_cmd.add_default_arg(self.task_config.jedi_yaml)
 
         try:
             logger.debug(f"Executing {exec_cmd}")
@@ -135,6 +137,31 @@ def execute(self: Analysis) -> None:
 
         pass
 
+    @logit(logger)
+    def init_fv3_increment(self: Analysis) -> None:
+        # Setup JEDI YAML file
+        self.task_config.jedi_yaml = os.path.join(self.task_config.DATA,
+                                                  f"{self.task_config.JCB_ALGO}.yaml")
+        save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml)
+
+        # Link JEDI executable to run directory
+        self.task_config.jedi_exe = self.link_jediexe()
+
+    @logit(logger)
+    def fv3_increment(self: Analysis) -> None:
+        # Run executable
+        exec_cmd = Executable(self.task_config.APRUN_ATMANLFV3INC)
+        exec_cmd.add_default_arg(self.task_config.jedi_exe)
+        exec_cmd.add_default_arg(self.task_config.jedi_yaml)
+
+        try:
+            logger.debug(f"Executing {exec_cmd}")
+            exec_cmd()
+        except OSError:
+            raise OSError(f"Failed to execute {exec_cmd}")
+        except Exception:
+            raise WorkflowException(f"An error occured during execution of {exec_cmd}")
+
     @logit(logger)
     def finalize(self: Analysis) -> None:
         """Finalize a global atm analysis
@@ -152,7 +179,7 @@ def finalize(self: Analysis) -> None:
         atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat")
 
         # get list of diag files to put in tarball
-        diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4'))
+        diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc'))
 
         logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz")
 
@@ -170,9 +197,9 @@ def finalize(self: Analysis) -> None:
                 archive.add(diaggzip, arcname=os.path.basename(diaggzip))
 
         # copy full YAML from executable to ROTDIR
-        logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}")
-        src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml")
-        dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml")
+        logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}")
+        src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
+        dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
         logger.debug(f"Copying {src} to {dest}")
         yaml_copy = {
             'mkdir': [self.task_config.COM_ATMOS_ANALYSIS],
@@ -212,235 +239,17 @@ def finalize(self: Analysis) -> None:
         }
         FileHandler(bias_copy).sync()
 
-        # Create UFS model readable atm increment file from UFS-DA atm increment
-        logger.info("Create UFS model readable atm increment file from UFS-DA atm increment")
-        self.jedi2fv3inc()
+        # Copy FV3 atm increment to comrot directory
+        logger.info("Copy UFS model readable atm increment file")
+        cdate = to_fv3time(self.task_config.current_cycle)
+        cdate_inc = cdate.replace('.', '_')
+        src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4")
+        dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc')
+        logger.debug(f"Copying {src} to {dest}")
+        inc_copy = {
+            'copy': [[src, dest]]
+        }
+        FileHandler(inc_copy).sync()
 
     def clean(self):
         super().clean()
-
-    @logit(logger)
-    def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of model background files to copy
-
-        This method constructs a dictionary of FV3 restart files (coupler, core, tracer)
-        that are needed for global atm DA and returns said dictionary for use by the FileHandler class.
-
-        Parameters
-        ----------
-        task_config: Dict
-            a dictionary containing all of the configuration needed for the task
-
-        Returns
-        ----------
-        bkg_dict: Dict
-            a dictionary containing the list of model background files to copy for FileHandler
-        """
-        # NOTE for now this is FV3 restart files and just assumed to be fh006
-
-        # get FV3 restart files, this will be a lot simpler when using history files
-        rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV)  # for now, option later?
-        run_dir = os.path.join(task_config.DATA, 'bkg')
-
-        # Start accumulating list of background files to copy
-        bkglist = []
-
-        # atm DA needs coupler
-        basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res'
-        bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
-        # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data
-        for ftype in ['core', 'srf_wnd', 'tracer']:
-            template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc'
-            for itile in range(1, task_config.ntiles + 1):
-                basename = template.format(tilenum=itile)
-                bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
-        for ftype in ['phy_data', 'sfc_data']:
-            template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
-            for itile in range(1, task_config.ntiles + 1):
-                basename = template.format(tilenum=itile)
-                bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
-        bkg_dict = {
-            'mkdir': [run_dir],
-            'copy': bkglist,
-        }
-        return bkg_dict
-
-    @logit(logger)
-    def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of background error files to copy
-
-        This method will construct a dictionary of either bump of gsibec background
-        error files for global atm DA and return said dictionary for use by the
-        FileHandler class.
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-
-        Returns
-        ----------
-        berror_dict: Dict
-            a dictionary containing the list of atm background error files to copy for FileHandler
-        """
-        SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity,
-                                       'bump': self._get_berror_dict_bump,
-                                       'gsibec': self._get_berror_dict_gsibec}
-
-        try:
-            berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config)
-        except KeyError:
-            raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" +
-                           f"Currently supported background error types are:\n" +
-                           f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}')
-
-        return berror_dict
-
-    @staticmethod
-    @logit(logger)
-    def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Identity BE does not need any files for staging.
-
-        This is a private method and should not be accessed directly.
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-        Returns
-        ----------
-        berror_dict: Dict
-            Empty dictionary [identity BE needs not files to stage]
-        """
-        logger.info(f"Identity background error does not use staged files.  Return empty dictionary")
-        return {}
-
-    @staticmethod
-    @logit(logger)
-    def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of atm bump background error files to copy
-
-        This method will construct a dictionary of atm bump background error
-        files for global atm DA and return said dictionary to the parent
-
-        This is a private method and should not be accessed directly.
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-
-        Returns
-        ----------
-        berror_dict: Dict
-            a dictionary of atm bump background error files to copy for FileHandler
-        """
-        # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files.
-        b_dir = config.BERROR_DATA_DIR
-        b_datestr = to_fv3time(config.BERROR_DATE)
-        berror_list = []
-        for ftype in ['cor_rh', 'cor_rv', 'stddev']:
-            coupler = f'{b_datestr}.{ftype}.coupler.res'
-            berror_list.append([
-                os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler)
-            ])
-
-            template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc'
-            for itile in range(1, config.ntiles + 1):
-                tracer = template.format(tilenum=itile)
-                berror_list.append([
-                    os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer)
-                ])
-
-        nproc = config.ntiles * config.layout_x * config.layout_y
-        for nn in range(1, nproc + 1):
-            berror_list.append([
-                os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'),
-                os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc')
-            ])
-
-        # create dictionary of background error files to stage
-        berror_dict = {
-            'mkdir': [os.path.join(config.DATA, 'berror')],
-            'copy': berror_list,
-        }
-        return berror_dict
-
-    @staticmethod
-    @logit(logger)
-    def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]:
-        """Compile a dictionary of atm gsibec background error files to copy
-
-        This method will construct a dictionary of atm gsibec background error
-        files for global atm DA and return said dictionary to the parent
-
-        This is a private method and should not be accessed directly.
-
-        Parameters
-        ----------
-        config: Dict
-            a dictionary containing all of the configuration needed
-
-        Returns
-        ----------
-        berror_dict: Dict
-            a dictionary of atm gsibec background error files to copy for FileHandler
-        """
-        # GSI atm static-B needs namelist and coefficient files.
-        b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL)
-        berror_list = []
-        for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']:
-            berror_list.append([
-                os.path.join(b_dir, ftype),
-                os.path.join(config.DATA, 'berror', ftype)
-            ])
-
-        # create dictionary of background error files to stage
-        berror_dict = {
-            'mkdir': [os.path.join(config.DATA, 'berror')],
-            'copy': berror_list,
-        }
-        return berror_dict
-
-    @logit(logger)
-    def jedi2fv3inc(self: Analysis) -> None:
-        """Generate UFS model readable analysis increment
-
-        This method writes a UFS DA atm increment in UFS model readable format.
-        This includes:
-        - write UFS-DA atm increments using variable names expected by UFS model
-        - compute and write delp increment
-        - compute and write hydrostatic delz increment
-
-        Please note that some of these steps are temporary and will be modified
-        once the modle is able to directly read atm increments.
-
-        """
-        # Select the atm guess file based on the analysis and background resolutions
-        # Fields from the atm guess are used to compute the delp and delz increments
-        case_anl = int(self.task_config.CASE_ANL[1:])
-        case = int(self.task_config.CASE[1:])
-
-        file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc"
-        atmges_fv3 = os.path.join(self.task_config.COM_ATMOS_HISTORY_PREV, file)
-
-        # Set the path/name to the input UFS-DA atm increment file (atminc_jedi)
-        # and the output UFS model atm increment file (atminc_fv3)
-        cdate = to_fv3time(self.task_config.current_cycle)
-        cdate_inc = cdate.replace('.', '_')
-        atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4')
-        atminc_fv3 = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc")
-
-        # Reference the python script which does the actual work
-        incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py')
-
-        # Execute incpy to create the UFS model atm increment file
-        cmd = Executable(incpy)
-        cmd.add_default_arg(atmges_fv3)
-        cmd.add_default_arg(atminc_jedi)
-        cmd.add_default_arg(atminc_fv3)
-        logger.debug(f"Executing {cmd}")
-        cmd(output='stdout', error='stderr')
diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py
index 9cf84c07c7..bd5112050e 100644
--- a/ush/python/pygfs/task/atmens_analysis.py
+++ b/ush/python/pygfs/task/atmens_analysis.py
@@ -11,7 +11,7 @@
                     FileHandler,
                     add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD,
                     chdir,
-                    parse_yamltmpl, parse_j2yaml, save_as_yaml,
+                    parse_j2yaml, save_as_yaml,
                     logit,
                     Executable,
                     WorkflowException,
@@ -29,28 +29,31 @@ class AtmEnsAnalysis(Analysis):
     def __init__(self, config):
         super().__init__(config)
 
-        _res = int(self.config.CASE_ENS[1:])
-        _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2)
-        _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml")
+        _res = int(self.task_config.CASE_ENS[1:])
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+        _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
 
         # Create a local dictionary that is repeatedly used across this class
         local_dict = AttrDict(
             {
                 'npx_ges': _res + 1,
                 'npy_ges': _res + 1,
-                'npz_ges': self.config.LEVS - 1,
-                'npz': self.config.LEVS - 1,
+                'npz_ges': self.task_config.LEVS - 1,
+                'npz': self.task_config.LEVS - 1,
                 'ATM_WINDOW_BEGIN': _window_begin,
-                'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H",
-                'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.",  # TODO: CDUMP is being replaced by RUN
-                'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
-                'fv3jedi_yaml': _fv3jedi_yaml,
+                'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H",
+                'OPREFIX': f"{self.task_config.EUPD_CYC}.t{self.task_config.cyc:02d}z.",
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
+                'jedi_yaml': _jedi_yaml,
+                'atm_obsdatain_path': f"./obs/",
+                'atm_obsdataout_path': f"./diags/",
+                'BKG_TSTEP': "PT1H"  # Placeholder for 4D applications
             }
         )
 
-        # task_config is everything that this task should need
-        self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+        # Extend task_config with local_dict
+        self.task_config = AttrDict(**self.task_config, **local_dict)
 
     @logit(logger)
     def initialize(self: Analysis) -> None:
@@ -74,54 +77,25 @@ def initialize(self: Analysis) -> None:
         """
         super().initialize()
 
-        # Make member directories in DATA for background and in DATA and ROTDIR for analysis files
-        # create template dictionary for output member analysis directories
-        template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL
-        tmpl_inc_dict = {
-            'ROTDIR': self.task_config.ROTDIR,
-            'RUN': self.task_config.RUN,
-            'YMD': to_YMD(self.task_config.current_cycle),
-            'HH': self.task_config.current_cycle.strftime('%H')
-        }
-        dirlist = []
-        for imem in range(1, self.task_config.NMEM_ENS + 1):
-            dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}'))
-            dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}'))
-
-            # create output directory path for member analysis
-            tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}"
-            incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get)
-            dirlist.append(incdir)
-
-        FileHandler({'mkdir': dirlist}).sync()
-
         # stage CRTM fix files
-        crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml')
-        logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}")
-        crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config)
+        logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}")
+        crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config)
         FileHandler(crtm_fix_list).sync()
 
         # stage fix files
-        jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml')
-        logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}")
-        jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config)
+        logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}")
+        jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config)
         FileHandler(jedi_fix_list).sync()
 
         # stage backgrounds
-        logger.debug(f"Stage ensemble member background files")
-        localconf = AttrDict()
-        keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN',
-                'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles']
-        for key in keys:
-            localconf[key] = self.task_config[key]
-        localconf.dirname = 'bkg'
-        FileHandler(self.get_fv3ens_dict(localconf)).sync()
+        logger.info(f"Stage ensemble member background files")
+        bkg_staging_dict = parse_j2yaml(self.task_config.LGETKF_BKG_STAGING_YAML, self.task_config)
+        FileHandler(bkg_staging_dict).sync()
 
         # generate ensemble da YAML file
-        logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}")
-        ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config)
-        save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml)
-        logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}")
+        logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}")
+        save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml)
+        logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}")
 
         # need output dir for diags and anl
         logger.debug("Create empty output [anl, diags] directories to receive output from executable")
@@ -132,7 +106,7 @@ def initialize(self: Analysis) -> None:
         FileHandler({'mkdir': newdirs}).sync()
 
     @logit(logger)
-    def execute(self: Analysis) -> None:
+    def letkf(self: Analysis) -> None:
         """Execute a global atmens analysis
 
         This method will execute a global atmens analysis using JEDI.
@@ -150,10 +124,13 @@ def execute(self: Analysis) -> None:
         """
         chdir(self.task_config.DATA)
 
-        exec_cmd = Executable(self.task_config.APRUN_ATMENSANL)
-        exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x')
+        exec_cmd = Executable(self.task_config.APRUN_ATMENSANLLETKF)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas.x')
+
         exec_cmd.add_default_arg(exec_name)
-        exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml)
+        exec_cmd.add_default_arg('fv3jedi')
+        exec_cmd.add_default_arg('localensembleda')
+        exec_cmd.add_default_arg(self.task_config.jedi_yaml)
 
         try:
             logger.debug(f"Executing {exec_cmd}")
@@ -165,6 +142,31 @@ def execute(self: Analysis) -> None:
 
         pass
 
+    @logit(logger)
+    def init_fv3_increment(self: Analysis) -> None:
+        # Setup JEDI YAML file
+        self.task_config.jedi_yaml = os.path.join(self.task_config.DATA,
+                                                  f"{self.task_config.JCB_ALGO}.yaml")
+        save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml)
+
+        # Link JEDI executable to run directory
+        self.task_config.jedi_exe = self.link_jediexe()
+
+    @logit(logger)
+    def fv3_increment(self: Analysis) -> None:
+        # Run executable
+        exec_cmd = Executable(self.task_config.APRUN_ATMENSANLFV3INC)
+        exec_cmd.add_default_arg(self.task_config.jedi_exe)
+        exec_cmd.add_default_arg(self.task_config.jedi_yaml)
+
+        try:
+            logger.debug(f"Executing {exec_cmd}")
+            exec_cmd()
+        except OSError:
+            raise OSError(f"Failed to execute {exec_cmd}")
+        except Exception:
+            raise WorkflowException(f"An error occured during execution of {exec_cmd}")
+
     @logit(logger)
     def finalize(self: Analysis) -> None:
         """Finalize a global atmens analysis
@@ -188,7 +190,7 @@ def finalize(self: Analysis) -> None:
         atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat")
 
         # get list of diag files to put in tarball
-        diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4'))
+        diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc'))
 
         logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz")
 
@@ -206,9 +208,9 @@ def finalize(self: Analysis) -> None:
                 archive.add(diaggzip, arcname=os.path.basename(diaggzip))
 
         # copy full YAML from executable to ROTDIR
-        logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}")
-        src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml")
-        dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml")
+        logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}")
+        src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
+        dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
         logger.debug(f"Copying {src} to {dest}")
         yaml_copy = {
             'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS],
@@ -216,42 +218,6 @@ def finalize(self: Analysis) -> None:
         }
         FileHandler(yaml_copy).sync()
 
-        # Create UFS model readable atm increment file from UFS-DA atm increment
-        logger.info("Create UFS model readable atm increment file from UFS-DA atm increment")
-        self.jedi2fv3inc()
-
-    def clean(self):
-        super().clean()
-
-    @logit(logger)
-    def jedi2fv3inc(self: Analysis) -> None:
-        """Generate UFS model readable analysis increment
-
-        This method writes a UFS DA atm increment in UFS model readable format.
-        This includes:
-        - write UFS-DA atm increments using variable names expected by UFS model
-        - compute and write delp increment
-        - compute and write hydrostatic delz increment
-
-        Please note that some of these steps are temporary and will be modified
-        once the modle is able to directly read atm increments.
-
-        Parameters
-        ----------
-        Analysis: parent class for GDAS task
-
-        Returns
-        ----------
-        None
-        """
-        # Select the atm guess file based on the analysis and background resolutions
-        # Fields from the atm guess are used to compute the delp and delz increments
-        cdate = to_fv3time(self.task_config.current_cycle)
-        cdate_inc = cdate.replace('.', '_')
-
-        # Reference the python script which does the actual work
-        incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py')
-
         # create template dictionaries
         template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL
         tmpl_inc_dict = {
@@ -261,14 +227,10 @@ def jedi2fv3inc(self: Analysis) -> None:
             'HH': self.task_config.current_cycle.strftime('%H')
         }
 
-        template_ges = self.task_config.COM_ATMOS_HISTORY_TMPL
-        tmpl_ges_dict = {
-            'ROTDIR': self.task_config.ROTDIR,
-            'RUN': self.task_config.RUN,
-            'YMD': to_YMD(self.task_config.previous_cycle),
-            'HH': self.task_config.previous_cycle.strftime('%H')
-        }
-
+        # copy FV3 atm increment to comrot directory
+        logger.info("Copy UFS model readable atm increment file")
+        cdate = to_fv3time(self.task_config.current_cycle)
+        cdate_inc = cdate.replace('.', '_')
         # loop over ensemble members
         for imem in range(1, self.task_config.NMEM_ENS + 1):
             memchar = f"mem{imem:03d}"
@@ -276,20 +238,15 @@ def jedi2fv3inc(self: Analysis) -> None:
             # create output path for member analysis increment
             tmpl_inc_dict['MEMDIR'] = memchar
             incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get)
+            src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4")
+            dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc")
 
-            # rewrite UFS-DA atmens increments
-            tmpl_ges_dict['MEMDIR'] = memchar
-            gesdir = Template.substitute_structure(template_ges, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_ges_dict.get)
-            atmges_fv3 = os.path.join(gesdir, f"{self.task_config.CDUMP}.t{self.task_config.previous_cycle.hour:02d}z.atmf006.nc")
-            atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4')
-            atminc_fv3 = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc")
-
-            # Execute incpy to create the UFS model atm increment file
-            # TODO: use MPMD or parallelize with mpi4py
-            # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656
-            cmd = Executable(incpy)
-            cmd.add_default_arg(atmges_fv3)
-            cmd.add_default_arg(atminc_jedi)
-            cmd.add_default_arg(atminc_fv3)
-            logger.debug(f"Executing {cmd}")
-            cmd(output='stdout', error='stderr')
+            # copy increment
+            logger.debug(f"Copying {src} to {dest}")
+            inc_copy = {
+                'copy': [[src, dest]]
+            }
+            FileHandler(inc_copy).sync()
+
+    def clean(self):
+        super().clean()
diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py
new file mode 100644
index 0000000000..9d64e621c9
--- /dev/null
+++ b/ush/python/pygfs/task/marine_bmat.py
@@ -0,0 +1,350 @@
+#!/usr/bin/env python3
+
+import os
+import glob
+from logging import getLogger
+import pygfs.utils.marine_da_utils as mdau
+
+from wxflow import (AttrDict,
+                    FileHandler,
+                    add_to_datetime, to_timedelta,
+                    chdir,
+                    parse_j2yaml,
+                    logit,
+                    Executable,
+                    Task)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class MarineBMat(Task):
+    """
+    Class for global marine B-matrix tasks
+    """
+    @logit(logger, name="MarineBMat")
+    def __init__(self, config):
+        super().__init__(config)
+        _home_gdas = os.path.join(self.task_config.HOMEgfs, 'sorc', 'gdas.cd')
+        _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'soca', 'calc_scales.py')
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+        _window_end = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+
+        # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert
+        if self.task_config.NMEM_ENS > 0:
+            _enspert_relpath = os.path.relpath(self.task_config.DATAenspert, self.task_config.DATA)
+        else:
+            _enspert_relpath = None
+
+        # Create a local dictionary that is repeatedly used across this class
+        local_dict = AttrDict(
+            {
+                'HOMEgdas': _home_gdas,
+                'MARINE_WINDOW_BEGIN': _window_begin,
+                'MARINE_WINDOW_END': _window_end,
+                'MARINE_WINDOW_MIDDLE': self.task_config.current_cycle,
+                'BERROR_YAML_DIR': os.path.join(_home_gdas, 'parm', 'soca', 'berror'),
+                'GRID_GEN_YAML': os.path.join(_home_gdas, 'parm', 'soca', 'gridgen', 'gridgen.yaml'),
+                'MARINE_ENSDA_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'ensda', 'stage_ens_mem.yaml.j2'),
+                'MARINE_DET_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'soca_det_bkg_stage.yaml.j2'),
+                'ENSPERT_RELPATH': _enspert_relpath,
+                'CALC_SCALE_EXEC': _calc_scale_exec,
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+            }
+        )
+
+        # Extend task_config with local_dict
+        self.task_config = AttrDict(**self.task_config, **local_dict)
+
+    @logit(logger)
+    def initialize(self: Task) -> None:
+        """Initialize a global B-matrix
+
+        This method will initialize a global B-Matrix.
+        This includes:
+        - staging the deterministic backgrounds (middle of window)
+        - staging SOCA fix files
+        - staging static ensemble members (optional)
+        - staging ensemble members (optional)
+        - generating the YAML files for the JEDI and GDASApp executables
+        - creating output directories
+        """
+        super().initialize()
+
+        # stage fix files
+        logger.info(f"Staging SOCA fix files from {self.task_config.SOCA_INPUT_FIX_DIR}")
+        soca_fix_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config)
+        FileHandler(soca_fix_list).sync()
+
+        # prepare the MOM6 input.nml
+        mdau.prep_input_nml(self.task_config)
+
+        # stage backgrounds
+        # TODO(G): Check ocean backgrounds dates for consistency
+        bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config)
+        FileHandler(bkg_list).sync()
+        for cice_fname in ['./INPUT/cice.res.nc', './bkg/ice.bkg.f006.nc', './bkg/ice.bkg.f009.nc']:
+            mdau.cice_hist2fms(cice_fname, cice_fname)
+
+        # stage the grid generation yaml
+        FileHandler({'copy': [[self.task_config.GRID_GEN_YAML,
+                               os.path.join(self.task_config.DATA, 'gridgen.yaml')]]}).sync()
+
+        # generate the variance partitioning YAML file
+        logger.debug("Generate variance partitioning YAML file")
+        diagb_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_diagb.yaml.j2'),
+                                    data=self.task_config)
+        diagb_config.save(os.path.join(self.task_config.DATA, 'soca_diagb.yaml'))
+
+        # generate the vertical decorrelation scale YAML file
+        logger.debug("Generate the vertical correlation scale YAML file")
+        vtscales_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_vtscales.yaml.j2'),
+                                       data=self.task_config)
+        vtscales_config.save(os.path.join(self.task_config.DATA, 'soca_vtscales.yaml'))
+
+        # generate vertical diffusion scale YAML file
+        logger.debug("Generate vertical diffusion YAML file")
+        diffvz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_vt.yaml.j2'),
+                                     data=self.task_config)
+        diffvz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_vt.yaml'))
+
+        # generate the horizontal diffusion YAML files
+        if True:  # TODO(G): skip this section once we have optimized the scales
+            # stage the correlation scale configuration
+            logger.debug("Generate correlation scale YAML file")
+            FileHandler({'copy': [[os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_setcorscales.yaml'),
+                                   os.path.join(self.task_config.DATA, 'soca_setcorscales.yaml')]]}).sync()
+
+            # generate horizontal diffusion scale YAML file
+            logger.debug("Generate horizontal diffusion scale YAML file")
+            diffhz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_hz.yaml.j2'),
+                                         data=self.task_config)
+            diffhz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_hz.yaml'))
+
+        # hybrid EnVAR case
+        if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2:
+            # stage ensemble membersfiles for use in hybrid background error
+            logger.debug(f"Stage ensemble members for the hybrid background error")
+            mdau.stage_ens_mem(self.task_config)
+
+            # generate ensemble recentering/rebalancing YAML file
+            logger.debug("Generate ensemble recentering YAML file")
+            ensrecenter_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensb.yaml.j2'),
+                                              data=self.task_config)
+            ensrecenter_config.save(os.path.join(self.task_config.DATA, 'soca_ensb.yaml'))
+
+            # generate ensemble weights YAML file
+            logger.debug("Generate ensemble recentering YAML file: {self.task_config.abcd_yaml}")
+            hybridweights_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensweights.yaml.j2'),
+                                                data=self.task_config)
+            hybridweights_config.save(os.path.join(self.task_config.DATA, 'soca_ensweights.yaml'))
+
+        # need output dir for ensemble perturbations and static B-matrix
+        logger.debug("Create empty diagb directories to receive output from executables")
+        FileHandler({'mkdir': [os.path.join(self.task_config.DATA, 'diagb')]}).sync()
+
+    @logit(logger)
+    def gridgen(self: Task) -> None:
+        # link gdas_soca_gridgen.x
+        mdau.link_executable(self.task_config, 'gdas_soca_gridgen.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_gridgen.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('gridgen.yaml')
+
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def variance_partitioning(self: Task) -> None:
+        # link the variance partitioning executable, gdas_soca_diagb.x
+        mdau.link_executable(self.task_config, 'gdas_soca_diagb.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_diagb.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_diagb.yaml')
+
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def horizontal_diffusion(self: Task) -> None:
+        """Generate the horizontal diffusion coefficients
+        """
+        # link the executable that computes the correlation scales, gdas_soca_setcorscales.x,
+        # and prepare the command to run it
+        mdau.link_executable(self.task_config, 'gdas_soca_setcorscales.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_setcorscales.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_setcorscales.yaml')
+
+        # create a files containing the correlation scales
+        mdau.run(exec_cmd)
+
+        # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x,
+        # and prepare the command to run it
+        mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_parameters_diffusion_hz.yaml')
+
+        # compute the coefficients of the diffusion operator
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def vertical_diffusion(self: Task) -> None:
+        """Generate the vertical diffusion coefficients
+        """
+        # compute the vertical correlation scales based on the MLD
+        FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC),
+                               os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync()
+        exec_cmd = Executable("python")
+        exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_vtscales.yaml')
+        mdau.run(exec_cmd)
+
+        # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x,
+        # and prepare the command to run it
+        mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_parameters_diffusion_vt.yaml')
+
+        # compute the coefficients of the diffusion operator
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def ensemble_perturbations(self: Task) -> None:
+        """Generate the 3D ensemble of perturbation for the 3DEnVAR
+
+        This method will generate ensemble perturbations re-balanced w.r.t the
+        deterministic background.
+        This includes:
+        - computing a storing the unbalanced ensemble perturbations' statistics
+        - recentering the ensemble members around the deterministic background and
+          accounting for the nonlinear steric recentering
+        - saving the recentered ensemble statistics
+        """
+        mdau.link_executable(self.task_config, 'gdas_ens_handler.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_ens_handler.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_ensb.yaml')
+
+        # generate the ensemble perturbations
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def hybrid_weight(self: Task) -> None:
+        """Generate the hybrid weights for the 3DEnVAR
+
+        This method will generate the 3D fields hybrid weights for the 3DEnVAR for each
+        variables.
+        TODO(G): Currently implemented for the specific case of the static ensemble members only
+        """
+        mdau.link_executable(self.task_config, 'gdas_socahybridweights.x')
+        exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT)
+        exec_name = os.path.join(self.task_config.DATA, 'gdas_socahybridweights.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('soca_ensweights.yaml')
+
+        # compute the ensemble weights
+        mdau.run(exec_cmd)
+
+    @logit(logger)
+    def execute(self: Task) -> None:
+        """Generate the full B-matrix
+
+        This method will generate the full B-matrix according to the configuration.
+        """
+        chdir(self.task_config.DATA)
+        self.gridgen()                 # TODO: This should be optional in case the geometry file was staged
+        self.variance_partitioning()
+        self.horizontal_diffusion()    # TODO: Make this optional once we've converged on an acceptable set of scales
+        self.vertical_diffusion()
+        # hybrid EnVAR case
+        if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2:
+            self.ensemble_perturbations()  # TODO: refactor this from the old scripts
+            self.hybrid_weight()           # TODO: refactor this from the old scripts
+
+    @logit(logger)
+    def finalize(self: Task) -> None:
+        """Finalize the global B-matrix job
+
+        This method will finalize the global B-matrix job.
+        This includes:
+        - copy the generated static, but cycle dependent background error files to the ROTDIR
+        - copy the generated YAML file from initialize to the ROTDIR
+        - keep the re-balanced ensemble perturbation files in DATAenspert
+        - ...
+
+        """
+        # Copy the soca grid if it was created
+        grid_file = os.path.join(self.task_config.DATA, 'soca_gridspec.nc')
+        if os.path.exists(grid_file):
+            logger.info(f"Copying the soca grid file to the ROTDIR")
+            FileHandler({'copy': [[grid_file,
+                                   os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, 'soca_gridspec.nc')]]}).sync()
+
+        # Copy the diffusion coefficient files to the ROTDIR
+        logger.info(f"Copying the diffusion coefficient files to the ROTDIR")
+        diffusion_coeff_list = []
+        for diff_type in ['hz', 'vt']:
+            src = os.path.join(self.task_config.DATA, f"{diff_type}_ocean.nc")
+            dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX,
+                                f"{self.task_config.APREFIX}{diff_type}_ocean.nc")
+            diffusion_coeff_list.append([src, dest])
+
+        src = os.path.join(self.task_config.DATA, f"hz_ice.nc")
+        dest = os.path.join(self.task_config.COMOUT_ICE_BMATRIX,
+                            f"{self.task_config.APREFIX}hz_ice.nc")
+        diffusion_coeff_list.append([src, dest])
+
+        FileHandler({'copy': diffusion_coeff_list}).sync()
+
+        # Copy diag B files to ROTDIR
+        logger.info(f"Copying diag B files to the ROTDIR")
+        diagb_list = []
+        window_end_iso = self.task_config.MARINE_WINDOW_END.strftime('%Y-%m-%dT%H:%M:%SZ')
+
+        # ocean diag B
+        src = os.path.join(self.task_config.DATA, 'diagb', f"ocn.bkgerr_stddev.incr.{window_end_iso}.nc")
+        dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX,
+                           f"{self.task_config.APREFIX}ocean.bkgerr_stddev.nc")
+        diagb_list.append([src, dst])
+
+        # ice diag B
+        src = os.path.join(self.task_config.DATA, 'diagb', f"ice.bkgerr_stddev.incr.{window_end_iso}.nc")
+        dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX,
+                           f"{self.task_config.APREFIX}ice.bkgerr_stddev.nc")
+        diagb_list.append([src, dst])
+
+        FileHandler({'copy': diagb_list}).sync()
+
+        # Copy the ensemble perturbation diagnostics to the ROTDIR
+        if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 3:
+            window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ')
+            weight_list = []
+            src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc")
+            dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX,
+                               f"{self.task_config.APREFIX}ocean.ens_weights.nc")
+            weight_list.append([src, dst])
+
+            src = os.path.join(self.task_config.DATA, f"ice.ens_weights.incr.{window_middle_iso}.nc")
+            dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX,
+                               f"{self.task_config.APREFIX}ice.ens_weights.nc")
+            weight_list.append([src, dst])
+
+            # TODO(G): missing ssh_steric_stddev, ssh_unbal_stddev, ssh_total_stddev and steric_explained_variance
+
+            FileHandler({'copy': weight_list}).sync()
+
+        # Copy the YAML files to the OCEAN ROTDIR
+        yamls = glob.glob(os.path.join(self.task_config.DATA, '*.yaml'))
+        yaml_list = []
+        for yaml_file in yamls:
+            dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX,
+                                f"{self.task_config.APREFIX}{os.path.basename(yaml_file)}")
+            yaml_list.append([yaml_file, dest])
+        FileHandler({'copy': yaml_list}).sync()
diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py
new file mode 100644
index 0000000000..36c26d594b
--- /dev/null
+++ b/ush/python/pygfs/task/marine_letkf.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+
+import f90nml
+from logging import getLogger
+import os
+from pygfs.task.analysis import Analysis
+from typing import Dict
+from wxflow import (AttrDict,
+                    FileHandler,
+                    logit,
+                    parse_j2yaml,
+                    to_timedelta,
+                    to_YMDH)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class MarineLETKF(Analysis):
+    """
+    Class for global ocean and sea ice analysis LETKF task
+    """
+
+    @logit(logger, name="MarineLETKF")
+    def __init__(self, config: Dict) -> None:
+        """Constructor for ocean and sea ice LETKF task
+        Parameters:
+        ------------
+        config: Dict
+            configuration, namely evironment variables
+        Returns:
+        --------
+        None
+        """
+
+        logger.info("init")
+        super().__init__(config)
+
+        _half_assim_freq = to_timedelta(f"{self.task_config.assim_freq}H") / 2
+        _letkf_yaml_file = 'letkf.yaml'
+        _letkf_exec_args = [self.task_config.MARINE_LETKF_EXEC,
+                            'soca',
+                            'localensembleda',
+                            _letkf_yaml_file]
+
+        self.task_config.WINDOW_MIDDLE = self.task_config.current_cycle
+        self.task_config.WINDOW_BEGIN = self.task_config.current_cycle - _half_assim_freq
+        self.task_config.letkf_exec_args = _letkf_exec_args
+        self.task_config.letkf_yaml_file = _letkf_yaml_file
+        self.task_config.mom_input_nml_tmpl = os.path.join(self.task_config.DATA, 'mom_input.nml.tmpl')
+        self.task_config.mom_input_nml = os.path.join(self.task_config.DATA, 'mom_input.nml')
+        self.task_config.obs_dir = os.path.join(self.task_config.DATA, 'obs')
+
+    @logit(logger)
+    def initialize(self):
+        """Method initialize for ocean and sea ice LETKF task
+        Parameters:
+        ------------
+        None
+        Returns:
+        --------
+        None
+        """
+
+        logger.info("initialize")
+
+        # make directories and stage ensemble background files
+        ensbkgconf = AttrDict()
+        keys = ['previous_cycle', 'current_cycle', 'DATA', 'NMEM_ENS',
+                'PARMgfs', 'ROTDIR', 'COM_OCEAN_HISTORY_TMPL', 'COM_ICE_HISTORY_TMPL']
+        for key in keys:
+            ensbkgconf[key] = self.task_config[key]
+        ensbkgconf.RUN = 'enkfgdas'
+        soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.SOCA_ENS_BKG_STAGE_YAML_TMPL, ensbkgconf)
+        FileHandler(soca_ens_bkg_stage_list).sync()
+        soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config)
+        FileHandler(soca_fix_stage_list).sync()
+        letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, self.task_config)
+        FileHandler(letkf_stage_list).sync()
+
+        obs_list = parse_j2yaml(self.task_config.OBS_YAML, self.task_config)
+
+        # get the list of observations
+        obs_files = []
+        for ob in obs_list['observers']:
+            obs_name = ob['obs space']['name'].lower()
+            obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc"
+            obs_files.append((obs_filename, ob))
+
+        obs_files_to_copy = []
+        obs_to_use = []
+        # copy obs from COMIN_OBS to DATA/obs
+        for obs_file, ob in obs_files:
+            obs_src = os.path.join(self.task_config.COMIN_OBS, obs_file)
+            obs_dst = os.path.join(self.task_config.DATA, self.task_config.obs_dir, obs_file)
+            if os.path.exists(obs_src):
+                obs_files_to_copy.append([obs_src, obs_dst])
+                obs_to_use.append(ob)
+            else:
+                logger.warning(f"{obs_file} is not available in {self.task_config.COMIN_OBS}")
+
+        # stage the desired obs files
+        FileHandler({'copy': obs_files_to_copy}).sync()
+
+        # make the letkf.yaml
+        letkfconf = AttrDict()
+        keys = ['WINDOW_BEGIN', 'WINDOW_MIDDLE', 'RUN', 'gcyc', 'NMEM_ENS']
+        for key in keys:
+            letkfconf[key] = self.task_config[key]
+        letkfconf.RUN = 'enkfgdas'
+        letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, letkfconf)
+        letkf_yaml.observations.observers = obs_to_use
+        letkf_yaml.save(self.task_config.letkf_yaml_file)
+
+        # swap date and stack size in mom_input.nml
+        domain_stack_size = self.task_config.DOMAIN_STACK_SIZE
+        ymdhms = [int(s) for s in self.task_config.WINDOW_BEGIN.strftime('%Y,%m,%d,%H,%M,%S').split(',')]
+        with open(self.task_config.mom_input_nml_tmpl, 'r') as nml_file:
+            nml = f90nml.read(nml_file)
+            nml['ocean_solo_nml']['date_init'] = ymdhms
+            nml['fms_nml']['domains_stack_size'] = int(domain_stack_size)
+            nml.write(self.task_config.mom_input_nml, force=True)  # force to overwrite if necessary
+
+    @logit(logger)
+    def run(self):
+        """Method run for ocean and sea ice LETKF task
+        Parameters:
+        ------------
+        None
+        Returns:
+        --------
+        None
+        """
+
+        logger.info("run")
+
+    @logit(logger)
+    def finalize(self):
+        """Method finalize for ocean and sea ice LETKF task
+        Parameters:
+        ------------
+        None
+        Returns:
+        --------
+        None
+        """
+
+        logger.info("finalize")
diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py
new file mode 100644
index 0000000000..98b57ae801
--- /dev/null
+++ b/ush/python/pygfs/task/oceanice_products.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python3
+
+import os
+from logging import getLogger
+from typing import List, Dict, Any
+from pprint import pformat
+import xarray as xr
+
+from wxflow import (AttrDict,
+                    parse_j2yaml,
+                    FileHandler,
+                    Jinja,
+                    logit,
+                    Task,
+                    add_to_datetime, to_timedelta,
+                    WorkflowException,
+                    Executable)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class OceanIceProducts(Task):
+    """Ocean Ice Products Task
+    """
+
+    VALID_COMPONENTS = ['ocean', 'ice']
+    COMPONENT_RES_MAP = {'ocean': 'OCNRES', 'ice': 'ICERES'}
+    VALID_PRODUCT_GRIDS = {'mx025': ['1p00', '0p25'],
+                           'mx050': ['1p00', '0p50'],
+                           'mx100': ['1p00'],
+                           'mx500': ['5p00']}
+
+    # These could be read from the yaml file
+    TRIPOLE_DIMS_MAP = {'mx025': [1440, 1080], 'mx050': [720, 526], 'mx100': [360, 320], 'mx500': [72, 35]}
+    LATLON_DIMS_MAP = {'0p25': [1440, 721], '0p50': [720, 361], '1p00': [360, 181], '5p00': [72, 36]}
+
+    @logit(logger, name="OceanIceProducts")
+    def __init__(self, config: Dict[str, Any]) -> None:
+        """Constructor for the Ocean/Ice Productstask
+
+        Parameters
+        ----------
+        config : Dict[str, Any]
+            Incoming configuration for the task from the environment
+
+        Returns
+        -------
+        None
+        """
+        super().__init__(config)
+
+        if self.task_config.COMPONENT not in self.VALID_COMPONENTS:
+            raise NotImplementedError(f'{self.task_config.COMPONENT} is not a valid model component.\n' +
+                                      'Valid model components are:\n' +
+                                      f'{", ".join(self.VALID_COMPONENTS)}')
+
+        model_grid = f"mx{self.task_config[self.COMPONENT_RES_MAP[self.task_config.COMPONENT]]:03d}"
+
+        valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H"))
+
+        if self.task_config.COMPONENT == 'ice':
+            offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS
+            # For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset.
+            # TODO: Consider FHMIN when calculating offset.
+            if offset != 0:
+                forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H"))
+                # For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours
+                if forecast_hour <= self.task_config.FHOUT_ICE_GFS:
+                    interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H"))
+                else:
+                    interval = self.task_config.FHOUT_ICE_GFS
+            else:
+                forecast_hour = self.task_config.FORECAST_HOUR
+                interval = self.task_config.FHOUT_ICE_GFS
+        if self.task_config.COMPONENT == 'ocean':
+            forecast_hour = self.task_config.FORECAST_HOUR
+            interval = self.task_config.FHOUT_OCN_GFS
+
+        # TODO: This is a bit of a hack, but it works for now
+        # FIXME: find a better way to provide the averaging period
+        avg_period = f"{forecast_hour-interval:03d}-{forecast_hour:03d}"
+
+        # Extend task_config with localdict
+        localdict = AttrDict(
+            {'component': self.task_config.COMPONENT,
+             'forecast_hour': forecast_hour,
+             'valid_datetime': valid_datetime,
+             'avg_period': avg_period,
+             'model_grid': model_grid,
+             'interval': interval,
+             'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]}
+        )
+        self.task_config = AttrDict(**self.task_config, **localdict)
+
+        # Read the oceanice_products.yaml file for common configuration
+        logger.info(f"Read the ocean ice products configuration yaml file {self.task_config.OCEANICEPRODUCTS_CONFIG}")
+        self.task_config.oceanice_yaml = parse_j2yaml(self.task_config.OCEANICEPRODUCTS_CONFIG, self.task_config)
+        logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}")
+
+    @staticmethod
+    @logit(logger)
+    def initialize(config: Dict) -> None:
+        """Initialize the work directory by copying all the common fix data
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+
+        Returns
+        -------
+        None
+        """
+
+        # Copy static data to run directory
+        logger.info("Copy static data to run directory")
+        FileHandler(config.oceanice_yaml.ocnicepost.fix_data).sync()
+
+        # Copy "component" specific model data to run directory (e.g. ocean/ice forecast output)
+        logger.info(f"Copy {config.component} data to run directory")
+        FileHandler(config.oceanice_yaml[config.component].data_in).sync()
+
+    @staticmethod
+    @logit(logger)
+    def configure(config: Dict, product_grid: str) -> None:
+        """Configure the namelist for the product_grid in the work directory.
+        Create namelist 'ocnicepost.nml' from template
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+        product_grid : str
+            Target product grid to process
+
+        Returns
+        -------
+        None
+        """
+
+        # Make a localconf with the "component" specific configuration for parsing the namelist
+        localconf = AttrDict()
+        localconf.DATA = config.DATA
+        localconf.component = config.component
+
+        localconf.source_tripole_dims = ', '.join(map(str, OceanIceProducts.TRIPOLE_DIMS_MAP[config.model_grid]))
+        localconf.target_latlon_dims = ', '.join(map(str, OceanIceProducts.LATLON_DIMS_MAP[product_grid]))
+
+        localconf.maskvar = config.oceanice_yaml[config.component].namelist.maskvar
+        localconf.sinvar = config.oceanice_yaml[config.component].namelist.sinvar
+        localconf.cosvar = config.oceanice_yaml[config.component].namelist.cosvar
+        localconf.angvar = config.oceanice_yaml[config.component].namelist.angvar
+        localconf.debug = ".true." if config.oceanice_yaml.ocnicepost.namelist.debug else ".false."
+
+        logger.debug(f"localconf:\n{pformat(localconf)}")
+
+        # Configure the namelist and write to file
+        logger.info("Create namelist for ocnicepost.x")
+        nml_template = os.path.join(localconf.DATA, "ocnicepost.nml.jinja2")
+        nml_data = Jinja(nml_template, localconf).render
+        logger.debug(f"ocnicepost_nml:\n{nml_data}")
+        nml_file = os.path.join(localconf.DATA, "ocnicepost.nml")
+        with open(nml_file, "w") as fho:
+            fho.write(nml_data)
+
+    @staticmethod
+    @logit(logger)
+    def execute(config: Dict, product_grid: str) -> None:
+        """Run the ocnicepost.x executable to interpolate and convert to grib2
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+        product_grid : str
+            Target product grid to process
+
+        Returns
+        -------
+        None
+        """
+
+        # Run the ocnicepost.x executable
+        OceanIceProducts.interp(config.DATA, config.APRUN_OCNICEPOST, exec_name="ocnicepost.x")
+
+        # Convert interpolated netCDF file to grib2
+        OceanIceProducts.netCDF_to_grib2(config, product_grid)
+
+    @staticmethod
+    @logit(logger)
+    def interp(workdir: str, aprun_cmd: str, exec_name: str = "ocnicepost.x") -> None:
+        """
+        Run the interpolation executable to generate rectilinear netCDF file
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+        workdir : str
+            Working directory for the task
+        aprun_cmd : str
+            aprun command to use
+        exec_name : str
+            Name of the executable e.g. ocnicepost.x
+
+        Returns
+        -------
+        None
+        """
+        os.chdir(workdir)
+        logger.debug(f"Current working directory: {os.getcwd()}")
+
+        exec_cmd = Executable(aprun_cmd)
+        exec_cmd.add_default_arg(os.path.join(workdir, exec_name))
+
+        OceanIceProducts._call_executable(exec_cmd)
+
+    @staticmethod
+    @logit(logger)
+    def netCDF_to_grib2(config: Dict, grid: str) -> None:
+        """Convert interpolated netCDF file to grib2
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+        grid : str
+            Target product grid to process
+
+        Returns
+        ------
+        None
+        """
+
+        os.chdir(config.DATA)
+
+        exec_cmd = Executable(config.oceanice_yaml.nc2grib2.script)
+        arguments = [config.component, grid, config.current_cycle.strftime("%Y%m%d%H"), config.avg_period]
+        if config.component == 'ocean':
+            levs = config.oceanice_yaml.ocean.namelist.ocean_levels
+            arguments.append(':'.join(map(str, levs)))
+
+        logger.info(f"Executing {exec_cmd} with arguments {arguments}")
+        try:
+            exec_cmd(*arguments)
+        except OSError:
+            logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+            raise OSError(f"{exec_cmd}")
+        except Exception:
+            logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+            raise WorkflowException(f"{exec_cmd}")
+
+    @staticmethod
+    @logit(logger)
+    def subset(config: Dict) -> None:
+        """
+        Subset a list of variables from a netcdf file and save to a new netcdf file.
+        Also save global attributes and history from the old netcdf file into new netcdf file
+
+        Parameters
+        ----------
+        config : Dict
+            Configuration dictionary for the task
+
+        Returns
+        -------
+        None
+        """
+
+        os.chdir(config.DATA)
+
+        input_file = f"{config.component}.nc"
+        output_file = f"{config.component}_subset.nc"
+        varlist = config.oceanice_yaml[config.component].subset
+
+        logger.info(f"Subsetting {varlist} from {input_file} to {output_file}")
+
+        try:
+            # open the netcdf file
+            ds = xr.open_dataset(input_file)
+
+            # subset the variables
+            ds_subset = ds[varlist]
+
+            # save global attributes from the old netcdf file into new netcdf file
+            ds_subset.attrs = ds.attrs
+
+            # save subsetted variables to a new netcdf file
+            ds_subset.to_netcdf(output_file)
+
+        except FileNotFoundError:
+            logger.exception(f"FATAL ERROR: Input file not found: {input_file}")
+            raise FileNotFoundError(f"File not found: {input_file}")
+
+        except IOError as err:
+            logger.exception(f"FATAL ERROR: IOError occurred during netCDF subset: {input_file}")
+            raise IOError(f"An I/O error occurred: {err}")
+
+        except Exception as err:
+            logger.exception(f"FATAL ERROR: Error occurred during netCDF subset: {input_file}")
+            raise WorkflowException(f"{err}")
+
+        finally:
+            # close the netcdf files
+            ds.close()
+            ds_subset.close()
+
+    @staticmethod
+    @logit(logger)
+    def _call_executable(exec_cmd: Executable) -> None:
+        """Internal method to call executable
+
+        Parameters
+        ----------
+        exec_cmd : Executable
+            Executable to run
+
+        Raises
+        ------
+        OSError
+            Failure due to OS issues
+        WorkflowException
+            All other exceptions
+        """
+
+        logger.info(f"Executing {exec_cmd}")
+        try:
+            exec_cmd()
+        except OSError:
+            logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+            raise OSError(f"{exec_cmd}")
+        except Exception:
+            logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+            raise WorkflowException(f"{exec_cmd}")
+
+    @staticmethod
+    @logit(logger)
+    def finalize(config: Dict) -> None:
+        """Perform closing actions of the task.
+        Copy data back from the DATA/ directory to COM/
+
+        Parameters
+        ----------
+        config: Dict
+            Configuration dictionary for the task
+
+        Returns
+        -------
+        None
+        """
+
+        # Copy "component" specific generated data to COM/ directory
+        data_out = config.oceanice_yaml[config.component].data_out
+
+        logger.info(f"Copy processed data to COM/ directory")
+        FileHandler(data_out).sync()
diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/snow_analysis.py
similarity index 72%
rename from ush/python/pygfs/task/land_analysis.py
rename to ush/python/pygfs/task/snow_analysis.py
index 307e875183..9656b00a8e 100644
--- a/ush/python/pygfs/task/land_analysis.py
+++ b/ush/python/pygfs/task/snow_analysis.py
@@ -11,7 +11,7 @@
                     FileHandler,
                     to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime,
                     rm_p,
-                    parse_j2yaml, parse_yamltmpl, save_as_yaml,
+                    parse_j2yaml, save_as_yaml,
                     Jinja,
                     logit,
                     Executable,
@@ -21,44 +21,44 @@
 logger = getLogger(__name__.split('.')[-1])
 
 
-class LandAnalysis(Analysis):
+class SnowAnalysis(Analysis):
     """
-    Class for global land analysis tasks
+    Class for global snow analysis tasks
     """
 
-    NMEM_LANDENS = 2  # The size of the land ensemble is fixed at 2.  Does this need to be a variable?
+    NMEM_SNOWENS = 2
 
-    @logit(logger, name="LandAnalysis")
+    @logit(logger, name="SnowAnalysis")
     def __init__(self, config):
         super().__init__(config)
 
-        _res = int(self.config['CASE'][1:])
-        _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2)
-        _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml")
+        _res = int(self.task_config['CASE'][1:])
+        _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+        _letkfoi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.letkfoi.yaml")
 
         # Create a local dictionary that is repeatedly used across this class
         local_dict = AttrDict(
             {
                 'npx_ges': _res + 1,
                 'npy_ges': _res + 1,
-                'npz_ges': self.config.LEVS - 1,
-                'npz': self.config.LEVS - 1,
-                'LAND_WINDOW_BEGIN': _window_begin,
-                'LAND_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
-                'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
-                'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
+                'npz_ges': self.task_config.LEVS - 1,
+                'npz': self.task_config.LEVS - 1,
+                'SNOW_WINDOW_BEGIN': _window_begin,
+                'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H",
+                'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+                'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
                 'jedi_yaml': _letkfoi_yaml
             }
         )
 
-        # task_config is everything that this task should need
-        self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+        # Extend task_config with local_dict
+        self.task_config = AttrDict(**self.task_config, **local_dict)
 
     @logit(logger)
     def prepare_GTS(self) -> None:
-        """Prepare the GTS data for a global land analysis
+        """Prepare the GTS data for a global snow analysis
 
-        This method will prepare GTS data for a global land analysis using JEDI.
+        This method will prepare GTS data for a global snow analysis using JEDI.
         This includes:
         - processing GTS bufr snow depth observation data to IODA format
 
@@ -74,7 +74,7 @@ def prepare_GTS(self) -> None:
         # create a temporary dict of all keys needed in this method
         localconf = AttrDict()
         keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
-                'OPREFIX', 'CASE', 'ntiles']
+                'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
         for key in keys:
             localconf[key] = self.task_config[key]
 
@@ -99,7 +99,7 @@ def prepare_GTS(self) -> None:
 
         def _gtsbufr2iodax(exe, yaml_file):
             if not os.path.isfile(yaml_file):
-                logger.exception(f"{yaml_file} not found")
+                logger.exception(f"FATAL ERROR: {yaml_file} not found")
                 raise FileNotFoundError(yaml_file)
 
             logger.info(f"Executing {exe}")
@@ -114,7 +114,7 @@ def _gtsbufr2iodax(exe, yaml_file):
         # 1. generate bufr2ioda YAML files
         # 2. execute bufr2ioda.x
         for name in prep_gts_config.bufr2ioda.keys():
-            gts_yaml = os.path.join(self.runtime_config.DATA, f"bufr_{name}_snow.yaml")
+            gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml")
             logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}")
             temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf)
             save_as_yaml(temp_yaml, gts_yaml)
@@ -133,9 +133,9 @@ def _gtsbufr2iodax(exe, yaml_file):
 
     @logit(logger)
     def prepare_IMS(self) -> None:
-        """Prepare the IMS data for a global land analysis
+        """Prepare the IMS data for a global snow analysis
 
-        This method will prepare IMS data for a global land analysis using JEDI.
+        This method will prepare IMS data for a global snow analysis using JEDI.
         This includes:
         - staging model backgrounds
         - processing raw IMS observation data and prepare for conversion to IODA
@@ -153,7 +153,7 @@ def prepare_IMS(self) -> None:
         # create a temporary dict of all keys needed in this method
         localconf = AttrDict()
         keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
-                'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
+                'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs']
         for key in keys:
             localconf[key] = self.task_config[key]
 
@@ -198,7 +198,7 @@ def prepare_IMS(self) -> None:
             raise WorkflowException(f"An error occured during execution of {exe}")
 
         # Ensure the snow depth IMS file is produced by the above executable
-        input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}.mx{localconf.OCNRES}_oro_data.nc"
+        input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}_oro_data.nc"
         if not os.path.isfile(f"{os.path.join(localconf.DATA, input_file)}"):
             logger.exception(f"{self.task_config.CALCFIMSEXE} failed to produce {input_file}")
             raise FileNotFoundError(f"{os.path.join(localconf.DATA, input_file)}")
@@ -232,7 +232,7 @@ def prepare_IMS(self) -> None:
 
     @logit(logger)
     def initialize(self) -> None:
-        """Initialize method for Land analysis
+        """Initialize method for snow analysis
         This method:
         - creates artifacts in the DATA directory by copying fix files
         - creates the JEDI LETKF yaml from the template
@@ -241,7 +241,7 @@ def initialize(self) -> None:
         Parameters
         ----------
         self : Analysis
-            Instance of the LandAnalysis object
+            Instance of the SnowAnalysis object
         """
 
         super().initialize()
@@ -249,30 +249,27 @@ def initialize(self) -> None:
         # create a temporary dict of all keys needed in this method
         localconf = AttrDict()
         keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
-                'OPREFIX', 'CASE', 'ntiles']
+                'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
         for key in keys:
             localconf[key] = self.task_config[key]
 
         # Make member directories in DATA for background
         dirlist = []
-        for imem in range(1, LandAnalysis.NMEM_LANDENS + 1):
+        for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1):
             dirlist.append(os.path.join(localconf.DATA, 'bkg', f'mem{imem:03d}'))
         FileHandler({'mkdir': dirlist}).sync()
 
         # stage fix files
-        jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'land_jedi_fix.yaml')
-        logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}")
-        jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config)
+        logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}")
+        jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config)
         FileHandler(jedi_fix_list).sync()
 
         # stage backgrounds
         logger.info("Staging ensemble backgrounds")
         FileHandler(self.get_ens_bkg_dict(localconf)).sync()
 
-        # generate letkfoi YAML file
-        logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}")
-        letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config)
-        save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml)
+        # Write out letkfoi YAML file
+        save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml)
         logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}")
 
         # need output dir for diags and anl
@@ -294,15 +291,15 @@ def execute(self) -> None:
         Parameters
         ----------
         self : Analysis
-           Instance of the LandAnalysis object
+           Instance of the SnowAnalysis object
         """
 
         # create a temporary dict of all keys needed in this method
         localconf = AttrDict()
         keys = ['HOMEgfs', 'DATA', 'current_cycle',
-                'COM_ATMOS_RESTART_PREV', 'COM_LAND_ANALYSIS', 'APREFIX',
-                'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'ntiles',
-                'APRUN_LANDANL', 'JEDIEXE', 'jedi_yaml',
+                'COM_ATMOS_RESTART_PREV', 'COM_SNOW_ANALYSIS', 'APREFIX',
+                'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'OCNRES', 'ntiles',
+                'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml', 'DOIAU', 'SNOW_WINDOW_BEGIN',
                 'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR']
         for key in keys:
             localconf[key] = self.task_config[key]
@@ -313,17 +310,27 @@ def execute(self) -> None:
                              AttrDict({key: localconf[key] for key in ['DATA', 'ntiles', 'current_cycle']}))
 
         logger.info("Running JEDI LETKF")
-        self.execute_jediexe(localconf.DATA,
-                             localconf.APRUN_LANDANL,
-                             os.path.basename(localconf.JEDIEXE),
-                             localconf.jedi_yaml)
+        exec_cmd = Executable(localconf.APRUN_SNOWANL)
+        exec_name = os.path.join(localconf.DATA, 'gdas.x')
+        exec_cmd.add_default_arg(exec_name)
+        exec_cmd.add_default_arg('fv3jedi')
+        exec_cmd.add_default_arg('localensembleda')
+        exec_cmd.add_default_arg(localconf.jedi_yaml)
+
+        try:
+            logger.debug(f"Executing {exec_cmd}")
+            exec_cmd()
+        except OSError:
+            raise OSError(f"Failed to execute {exec_cmd}")
+        except Exception:
+            raise WorkflowException(f"An error occured during execution of {exec_cmd}")
 
         logger.info("Creating analysis from backgrounds and increments")
         self.add_increments(localconf)
 
     @logit(logger)
     def finalize(self) -> None:
-        """Performs closing actions of the Land analysis task
+        """Performs closing actions of the Snow analysis task
         This method:
         - tar and gzip the output diag files and place in COM/
         - copy the generated YAML file from initialize to the COM/
@@ -333,11 +340,11 @@ def finalize(self) -> None:
         Parameters
         ----------
         self : Analysis
-            Instance of the LandAnalysis object
+            Instance of the SnowAnalysis object
         """
 
         logger.info("Create diagnostic tarball of diag*.nc4 files")
-        statfile = os.path.join(self.task_config.COM_LAND_ANALYSIS, f"{self.task_config.APREFIX}landstat.tgz")
+        statfile = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat.tgz")
         self.tgz_diags(statfile, self.task_config.DATA)
 
         logger.info("Copy full YAML to COM")
@@ -350,22 +357,28 @@ def finalize(self) -> None:
         FileHandler(yaml_copy).sync()
 
         logger.info("Copy analysis to COM")
-        template = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
+        bkgtimes = []
+        if self.task_config.DOIAU:
+            # need both beginning and middle of window
+            bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN)
+        bkgtimes.append(self.task_config.current_cycle)
         anllist = []
-        for itile in range(1, self.task_config.ntiles + 1):
-            filename = template.format(tilenum=itile)
-            src = os.path.join(self.task_config.DATA, 'anl', filename)
-            dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename)
-            anllist.append([src, dest])
+        for bkgtime in bkgtimes:
+            template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc'
+            for itile in range(1, self.task_config.ntiles + 1):
+                filename = template.format(tilenum=itile)
+                src = os.path.join(self.task_config.DATA, 'anl', filename)
+                dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename)
+                anllist.append([src, dest])
         FileHandler({'copy': anllist}).sync()
 
         logger.info('Copy increments to COM')
-        template = f'landinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
+        template = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
         inclist = []
         for itile in range(1, self.task_config.ntiles + 1):
             filename = template.format(tilenum=itile)
             src = os.path.join(self.task_config.DATA, 'anl', filename)
-            dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename)
+            dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename)
             inclist.append([src, dest])
         FileHandler({'copy': inclist}).sync()
 
@@ -375,7 +388,7 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]:
         """Compile a dictionary of model background files to copy
 
         This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data)
-        that are needed for global land DA and returns said dictionary for use by the FileHandler class.
+        that are needed for global snow DA and returns said dictionary for use by the FileHandler class.
 
         Parameters
         ----------
@@ -401,11 +414,11 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]:
         # Start accumulating list of background files to copy
         bkglist = []
 
-        # land DA needs coupler
+        # snow DA needs coupler
         basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
         bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
 
-        # land DA only needs sfc_data
+        # snow DA only needs sfc_data
         for ftype in ['sfc_data']:
             template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
             for itile in range(1, config.ntiles + 1):
@@ -447,17 +460,17 @@ def get_ens_bkg_dict(config: Dict) -> Dict:
         # get FV3 sfc_data RESTART files; Note an ensemble is being created
         rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV)
 
-        for imem in range(1, LandAnalysis.NMEM_LANDENS + 1):
+        for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1):
             memchar = f"mem{imem:03d}"
 
             run_dir = os.path.join(config.DATA, 'bkg', memchar, 'RESTART')
             dirlist.append(run_dir)
 
-            # Land DA needs coupler
+            # Snow DA needs coupler
             basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
             bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
 
-            # Land DA only needs sfc_data
+            # Snow DA only needs sfc_data
             for ftype in ['sfc_data']:
                 template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
                 for itile in range(1, config.ntiles + 1):
@@ -491,7 +504,7 @@ def create_ensemble(vname: str, bestddev: float, config: Dict) -> None:
         """
 
         # 2 ens members
-        offset = bestddev / np.sqrt(LandAnalysis.NMEM_LANDENS)
+        offset = bestddev / np.sqrt(SnowAnalysis.NMEM_SNOWENS)
 
         logger.info(f"Creating ensemble for LETKFOI by offsetting with {offset}")
 
@@ -530,10 +543,13 @@ def add_increments(config: Dict) -> None:
              DATA
              current_cycle
              CASE
+             OCNRES
              ntiles
              APPLY_INCR_NML_TMPL
              APPLY_INCR_EXE
              APRUN_APPLY_INCR
+             DOIAU
+             SNOW_WINDOW_BEGIN
 
         Raises
         ------
@@ -545,38 +561,67 @@ def add_increments(config: Dict) -> None:
 
         # need backgrounds to create analysis from increments after LETKF
         logger.info("Copy backgrounds into anl/ directory for creating analysis from increments")
-        template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
+        bkgtimes = []
+        if config.DOIAU:
+            # want analysis at beginning and middle of window
+            bkgtimes.append(config.SNOW_WINDOW_BEGIN)
+        bkgtimes.append(config.current_cycle)
         anllist = []
-        for itile in range(1, config.ntiles + 1):
-            filename = template.format(tilenum=itile)
-            src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename)
-            dest = os.path.join(config.DATA, "anl", filename)
-            anllist.append([src, dest])
+        for bkgtime in bkgtimes:
+            template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc'
+            for itile in range(1, config.ntiles + 1):
+                filename = template.format(tilenum=itile)
+                src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename)
+                dest = os.path.join(config.DATA, "anl", filename)
+                anllist.append([src, dest])
         FileHandler({'copy': anllist}).sync()
 
-        logger.info("Create namelist for APPLY_INCR_EXE")
-        nml_template = config.APPLY_INCR_NML_TMPL
-        nml_data = Jinja(nml_template, config).render
-        logger.debug(f"apply_incr_nml:\n{nml_data}")
-
-        nml_file = os.path.join(config.DATA, "apply_incr_nml")
-        with open(nml_file, "w") as fho:
-            fho.write(nml_data)
-
-        logger.info("Link APPLY_INCR_EXE into DATA/")
-        exe_src = config.APPLY_INCR_EXE
-        exe_dest = os.path.join(config.DATA, os.path.basename(exe_src))
-        if os.path.exists(exe_dest):
-            rm_p(exe_dest)
-        os.symlink(exe_src, exe_dest)
-
-        # execute APPLY_INCR_EXE to create analysis files
-        exe = Executable(config.APRUN_APPLY_INCR)
-        exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src)))
-        logger.info(f"Executing {exe}")
-        try:
-            exe()
-        except OSError:
-            raise OSError(f"Failed to execute {exe}")
-        except Exception:
-            raise WorkflowException(f"An error occured during execution of {exe}")
+        if config.DOIAU:
+            logger.info("Copying increments to beginning of window")
+            template_in = f'snowinc.{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc'
+            template_out = f'snowinc.{to_fv3time(config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc'
+            inclist = []
+            for itile in range(1, config.ntiles + 1):
+                filename_in = template_in.format(tilenum=itile)
+                filename_out = template_out.format(tilenum=itile)
+                src = os.path.join(config.DATA, 'anl', filename_in)
+                dest = os.path.join(config.DATA, 'anl', filename_out)
+                inclist.append([src, dest])
+            FileHandler({'copy': inclist}).sync()
+
+        # loop over times to apply increments
+        for bkgtime in bkgtimes:
+            logger.info("Processing analysis valid: {bkgtime}")
+            logger.info("Create namelist for APPLY_INCR_EXE")
+            nml_template = config.APPLY_INCR_NML_TMPL
+            nml_config = {
+                'current_cycle': bkgtime,
+                'CASE': config.CASE,
+                'DATA': config.DATA,
+                'HOMEgfs': config.HOMEgfs,
+                'OCNRES': config.OCNRES,
+            }
+            nml_data = Jinja(nml_template, nml_config).render
+            logger.debug(f"apply_incr_nml:\n{nml_data}")
+
+            nml_file = os.path.join(config.DATA, "apply_incr_nml")
+            with open(nml_file, "w") as fho:
+                fho.write(nml_data)
+
+            logger.info("Link APPLY_INCR_EXE into DATA/")
+            exe_src = config.APPLY_INCR_EXE
+            exe_dest = os.path.join(config.DATA, os.path.basename(exe_src))
+            if os.path.exists(exe_dest):
+                rm_p(exe_dest)
+            os.symlink(exe_src, exe_dest)
+
+            # execute APPLY_INCR_EXE to create analysis files
+            exe = Executable(config.APRUN_APPLY_INCR)
+            exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src)))
+            logger.info(f"Executing {exe}")
+            try:
+                exe()
+            except OSError:
+                raise OSError(f"Failed to execute {exe}")
+            except Exception:
+                raise WorkflowException(f"An error occured during execution of {exe}")
diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py
index 7db50e1582..7e42e07c64 100644
--- a/ush/python/pygfs/task/upp.py
+++ b/ush/python/pygfs/task/upp.py
@@ -46,26 +46,27 @@ def __init__(self, config: Dict[str, Any]) -> None:
         """
         super().__init__(config)
 
-        if self.config.UPP_RUN not in self.VALID_UPP_RUN:
-            raise NotImplementedError(f'{self.config.UPP_RUN} is not a valid UPP run type.\n' +
+        if self.task_config.UPP_RUN not in self.VALID_UPP_RUN:
+            raise NotImplementedError(f'{self.task_config.UPP_RUN} is not a valid UPP run type.\n' +
                                       'Valid UPP_RUN values are:\n' +
                                       f'{", ".join(self.VALID_UPP_RUN)}')
 
-        valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H"))
+        valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H"))
 
+        # Extend task_config with localdict
         localdict = AttrDict(
-            {'upp_run': self.config.UPP_RUN,
-             'forecast_hour': self.config.FORECAST_HOUR,
+            {'upp_run': self.task_config.UPP_RUN,
+             'forecast_hour': self.task_config.FORECAST_HOUR,
              'valid_datetime': valid_datetime,
              'atmos_filename': f"atm_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc",
              'flux_filename': f"sfc_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc"
              }
         )
-        self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+        self.task_config = AttrDict(**self.task_config, **localdict)
 
         # Read the upp.yaml file for common configuration
-        logger.info(f"Read the UPP configuration yaml file {self.config.UPP_CONFIG}")
-        self.task_config.upp_yaml = parse_j2yaml(self.config.UPP_CONFIG, self.task_config)
+        logger.info(f"Read the UPP configuration yaml file {self.task_config.UPP_CONFIG}")
+        self.task_config.upp_yaml = parse_j2yaml(self.task_config.UPP_CONFIG, self.task_config)
         logger.debug(f"upp_yaml:\n{pformat(self.task_config.upp_yaml)}")
 
     @staticmethod
diff --git a/ush/python/pygfs/utils/__init__.py b/ush/python/pygfs/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/ush/python/pygfs/utils/marine_da_utils.py b/ush/python/pygfs/utils/marine_da_utils.py
new file mode 100644
index 0000000000..016551878b
--- /dev/null
+++ b/ush/python/pygfs/utils/marine_da_utils.py
@@ -0,0 +1,99 @@
+import f90nml
+import os
+from logging import getLogger
+import xarray as xr
+
+from wxflow import (FileHandler,
+                    logit,
+                    WorkflowException,
+                    AttrDict,
+                    parse_j2yaml,
+                    Executable,
+                    jinja)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+@logit(logger)
+def run(exec_cmd: Executable) -> None:
+    """Run the executable command
+    """
+    logger.info(f"Executing {exec_cmd}")
+    try:
+        logger.debug(f"Executing {exec_cmd}")
+        exec_cmd()
+    except OSError:
+        raise OSError(f"Failed to execute {exec_cmd}")
+    except Exception:
+        raise WorkflowException(f"An error occured during execution of {exec_cmd}")
+
+
+@logit(logger)
+def link_executable(task_config: AttrDict, exe_name: str) -> None:
+    """Link the executable to the DATA directory
+    """
+    logger.info(f"Link executable {exe_name}")
+    logger.warn("WARNING: Linking is not permitted per EE2.")
+    exe_src = os.path.join(task_config.EXECgfs, exe_name)
+    exe_dest = os.path.join(task_config.DATA, exe_name)
+    if os.path.exists(exe_dest):
+        os.remove(exe_dest)
+    os.symlink(exe_src, exe_dest)
+
+
+@logit(logger)
+def prep_input_nml(task_config: AttrDict) -> None:
+    """Prepare the input.nml file
+       TODO: Use jinja2 instead of f90nml
+    """
+    # stage input.nml
+    mom_input_nml_tmpl_src = os.path.join(task_config.HOMEgdas, 'parm', 'soca', 'fms', 'input.nml')
+    mom_input_nml_tmpl = os.path.join(task_config.DATA, 'mom_input.nml.tmpl')
+    FileHandler({'copy': [[mom_input_nml_tmpl_src, mom_input_nml_tmpl]]}).sync()
+
+    # swap date and stacksize
+    domain_stack_size = task_config.DOMAIN_STACK_SIZE
+    ymdhms = [int(s) for s in task_config.MARINE_WINDOW_END.strftime('%Y,%m,%d,%H,%M,%S').split(',')]
+    with open(mom_input_nml_tmpl, 'r') as nml_file:
+        nml = f90nml.read(nml_file)
+        nml['ocean_solo_nml']['date_init'] = ymdhms
+        nml['fms_nml']['domains_stack_size'] = int(domain_stack_size)
+        nml.write('mom_input.nml')
+
+
+@logit(logger)
+def cice_hist2fms(input_filename: str, output_filename: str) -> None:
+    """ Reformat the CICE history file so it can be read by SOCA/FMS
+    Simple reformatting utility to allow soca/fms to read the CICE history files
+    """
+
+    # open the CICE history file
+    ds = xr.open_dataset(input_filename)
+
+    if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables:
+        logger.info(f"*** Already reformatted, skipping.")
+        return
+
+    # rename the dimensions to xaxis_1 and yaxis_1
+    ds = ds.rename({'ni': 'xaxis_1', 'nj': 'yaxis_1'})
+
+    # rename the variables
+    ds = ds.rename({'aice_h': 'aicen', 'hi_h': 'hicen', 'hs_h': 'hsnon'})
+
+    # Save the new netCDF file
+    ds.to_netcdf(output_filename, mode='w')
+
+
+@logit(logger)
+def stage_ens_mem(task_config: AttrDict) -> None:
+    """ Copy the ensemble members to the DATA directory
+    Copy the ensemble members to the DATA directory and reformat the CICE history files
+    """
+    # Copy the ensemble members to the DATA directory
+    logger.info("---------------- Stage ensemble members")
+    ensbkgconf = AttrDict(task_config)
+    ensbkgconf.RUN = task_config.GDUMP_ENS
+    logger.debug(f"{jinja.Jinja(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf).render}")
+    letkf_stage_list = parse_j2yaml(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf)
+    logger.info(f"{letkf_stage_list}")
+    FileHandler(letkf_stage_list).sync()
diff --git a/ush/radmon_err_rpt.sh b/ush/radmon_err_rpt.sh
index 6ae6505624..c3d251d5cd 100755
--- a/ush/radmon_err_rpt.sh
+++ b/ush/radmon_err_rpt.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -55,9 +55,6 @@ cycle2=${5:-${cycle2:?}}
 diag_rpt=${6:-${diag_rpt:?}}
 outfile=${7:-${outfile:?}}
 
-# Directories
-HOMEradmon=${HOMEradmon:-$(pwd)}
-
 # Other variables
 err=0
 RADMON_SUFFIX=${RADMON_SUFFIX}
diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh
index f68d7c88cc..3dff2a6f98 100755
--- a/ush/radmon_verf_angle.sh
+++ b/ush/radmon_verf_angle.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -29,8 +29,6 @@ source "${HOMEgfs}/ush/preamble.sh"
 #   Imported Shell Variables:
 #     RADMON_SUFFIX     data source suffix
 #                       defauls to opr
-#     EXECgfs           executable directory
-#     PARMmonitor       parm directory
 #     RAD_AREA          global or regional flag
 #                       defaults to global
 #     TANKverf_rad      data repository
@@ -83,7 +81,6 @@ which prep_step
 which startmsg
 
 # File names
-export pgmout=${pgmout:-${jlogfile}}
 touch "${pgmout}"
 
 # Other variables
@@ -101,7 +98,7 @@ fi
 
 err=0
 angle_exec=radmon_angle.x
-shared_scaninfo="${shared_scaninfo:-${PARMmonitor}/gdas_radmon_scaninfo.txt}"
+shared_scaninfo="${shared_scaninfo:-${PARMgfs}/monitor/gdas_radmon_scaninfo.txt}"
 scaninfo=scaninfo.txt
 
 #--------------------------------------------------------------------
diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh
index ab1058711e..4274436154 100755
--- a/ush/radmon_verf_bcoef.sh
+++ b/ush/radmon_verf_bcoef.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -69,7 +69,6 @@ fi
 echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}"
 
 # File names
-pgmout=${pgmout:-${jlogfile}}
 touch "${pgmout}"
 
 # Other variables
diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh
index f1f97c247e..ea0a7842e6 100755
--- a/ush/radmon_verf_bcor.sh
+++ b/ush/radmon_verf_bcor.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -65,7 +65,6 @@ source "${HOMEgfs}/ush/preamble.sh"
 ####################################################################
 
 # File names
-pgmout=${pgmout:-${jlogfile}}
 touch "${pgmout}"
 
 # Other variables
diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh
index 7f98407ec5..0e935826dd 100755
--- a/ush/radmon_verf_time.sh
+++ b/ush/radmon_verf_time.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ################################################################################
 ####  UNIX Script Documentation Block
@@ -33,8 +33,6 @@ source "${HOMEgfs}/ush/preamble.sh"
 #                       defaults to 1 (on)
 #     RADMON_SUFFIX	data source suffix
 #                       defauls to opr
-#     EXECgfs           executable directory
-#     PARMmonitor       parm data directory
 #     RAD_AREA          global or regional flag
 #                       defaults to global
 #     TANKverf_rad	data repository
@@ -75,11 +73,9 @@ source "${HOMEgfs}/ush/preamble.sh"
 ####################################################################
 
 # File names
-#pgmout=${pgmout:-${jlogfile}}
-#touch $pgmout
 
 radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh}
-base_file=${base_file:-${PARMmonitor}/gdas_radmon_base.tar}
+base_file=${base_file:-${PARMgfs}/monitor/gdas_radmon_base.tar}
 report=report.txt
 disclaimer=disclaimer.txt
 
diff --git a/ush/rstprod.sh b/ush/rstprod.sh
index acac0340bb..b48a6817e0 100755
--- a/ush/rstprod.sh
+++ b/ush/rstprod.sh
@@ -1,6 +1,6 @@
 #! /usr/bin/env bash
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 #---------------------------------------------------------
 #  rstprod.sh
diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh
index 24cb3f2656..e3fc2b7512 100755
--- a/ush/run_mpmd.sh
+++ b/ush/run_mpmd.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD mode"}
 
diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh
index c17067ff72..6596c6ef96 100755
--- a/ush/syndat_getjtbul.sh
+++ b/ush/syndat_getjtbul.sh
@@ -18,17 +18,10 @@
 # Imported variables that must be passed in:
 #   DATA         - path to working directory
 #   pgmout       - string indicating path to for standard output file
-#   EXECSYND     - path to syndat executable directory
 #   TANK_TROPCY  - path to home directory containing tropical cyclone record
 #                  data base
 
-# Imported variables that can be passed in:
-#   jlogfile  - path to job log file (skipped over by this script if not
-#                 passed in)
-
-source "$HOMEgfs/ush/preamble.sh"
-
-EXECSYND=${EXECSYND:-${HOMESYND}/exec}
+source "${USHgfs}/preamble.sh"
 
 cd $DATA
 
@@ -52,8 +45,6 @@ hour=$(echo $CDATE10 | cut -c9-10)
 echo $PDYm1
 pdym1=$PDYm1
 
-#pdym1=$(sh $utilscript/finddate.sh $pdy d-1)
-
 echo " " >> $pgmout
 echo "Entering sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \
  >> $pgmout
@@ -123,7 +114,7 @@ fi
 
 [ -s jtwcbul ] && echo "Processing JTWC bulletin halfs into tcvitals records" >> $pgmout
 
-pgm=$(basename $EXECSYND/syndat_getjtbul.x)
+pgm=$(basename ${EXECgfs}/syndat_getjtbul.x)
 export pgm
 if [ -s prep_step ]; then
    set +u
@@ -138,7 +129,7 @@ rm -f fnoc
 
 export FORT11=jtwcbul
 export FORT51=fnoc
-time -p ${EXECSYND}/${pgm} >> $pgmout 2> errfile
+time -p ${EXECgfs}/${pgm} >> $pgmout 2> errfile
 errget=$?
 ###cat errfile
 cat errfile >> $pgmout
diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh
index cda9030577..8ec8f70b14 100755
--- a/ush/syndat_qctropcy.sh
+++ b/ush/syndat_qctropcy.sh
@@ -44,10 +44,6 @@
 #   COMSP  - path to both output jtwc-fnoc file and output tcvitals file (this
 #             tcvitals file is read by subsequent relocation processing and/or
 #             subsequent program SYNDAT_SYNDATA)
-#   PARMSYND  - path to syndat parm field directory
-#   EXECSYND  - path to syndat executable directory
-#   FIXam     - path to syndat fix field directory
-#   USHSYND   - path to syndat ush directory
 
 # Imported variables that can be passed in:
 #   ARCHSYND  - path to syndat archive directory
@@ -59,7 +55,7 @@
 #                data base
 #                (Default: /dcom/us007003)
 #   slmask    - path to t126 32-bit gaussian land/sea mask file
-#                (Default: $FIXam/syndat_slmask.t126.gaussian)
+#                (Default: ${FIXgfs}/am/syndat_slmask.t126.gaussian)
 #   copy_back - switch to copy updated files back to archive directory and
 #                to tcvitals directory
 #                (Default: YES)
@@ -67,19 +63,13 @@
 #                (Default: not set)
 #   TIMEIT   - optional time and resource reporting (Default: not set)
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ARCHSYND=${ARCHSYND:-$COMROOTp3/gfs/prod/syndat}
-HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep}
 HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep}
 TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003}
 
-FIXam=${FIXam:-$HOMEgfs/fix/am}
-USHSYND=${USHSYND:-$HOMEgfs/ush}
-EXECSYND=${EXECSYND:-$HOMEgfs/exec}
-PARMSYND=${PARMSYND:-$HOMEgfs/parm/relo}
-
-slmask=${slmask:-$FIXam/syndat_slmask.t126.gaussian}
+slmask=${slmask:-${FIXgfs}/am/syndat_slmask.t126.gaussian}
 copy_back=${copy_back:-YES}
 files_override=${files_override:-""}
 
@@ -188,12 +178,12 @@ if [ -n "$files_override" ]; then  # for testing, typically want FILES=F
 fi
 
 echo " &INPUT  RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp
-cat $PARMSYND/syndat_qctropcy.${RUN}.parm >> vitchk.inp
+cat ${PARMgfs}/relo/syndat_qctropcy.${RUN}.parm >> vitchk.inp
  
-#  Copy the fixed fields from FIXam
+#  Copy the fixed fields
  
-cp $FIXam/syndat_fildef.vit fildef.vit
-cp $FIXam/syndat_stmnames stmnames
+cp ${FIXgfs}/am/syndat_fildef.vit fildef.vit
+cp ${FIXgfs}/am/syndat_stmnames stmnames
 
 
 rm -f nhc fnoc lthistry
@@ -205,12 +195,9 @@ rm -f nhc fnoc lthistry
 #  All are input to program syndat_qctropcy
 #  ------------------------------------------------------------------
 
-if [ -s $HOMENHC/tcvitals ]; then
-   echo "tcvitals found" >> $pgmout
-   cp $HOMENHC/tcvitals nhc
-elif [ -s $HOMENHCp1/tcvitals ]; then
+if [ -s ${HOMENHC}/tcvitals ]; then
    echo "tcvitals found" >> $pgmout
-   cp $HOMENHCp1/tcvitals nhc
+   cp ${HOMENHC}/tcvitals nhc
 else
    echo "WARNING: tcvitals not found, create empty tcvitals" >> $pgmout
    > nhc
@@ -221,17 +208,17 @@ touch nhc
 [ "$copy_back" = 'YES' ]  &&  cat nhc >> $ARCHSYND/syndat_tcvitals.$year
 
 mv -f nhc nhc1
-$USHSYND/parse-storm-type.pl nhc1 > nhc
+${USHgfs}/parse-storm-type.pl nhc1 > nhc
 
 cp -p nhc nhc.ORIG
 # JTWC/FNOC ... execute syndat_getjtbul script to write into working directory
 #               as fnoc; copy to archive
-$USHSYND/syndat_getjtbul.sh $CDATE10
+${USHgfs}/syndat_getjtbul.sh $CDATE10
 touch fnoc
 [ "$copy_back" = 'YES' ]  &&  cat fnoc >> $ARCHSYND/syndat_tcvitals.$year
 
 mv -f fnoc fnoc1
-$USHSYND/parse-storm-type.pl fnoc1 > fnoc
+${USHgfs}/parse-storm-type.pl fnoc1 > fnoc
 
 if [ $SENDDBN = YES ]; then
   $DBNROOT/bin/dbn_alert MODEL SYNDAT_TCVITALS $job $ARCHSYND/syndat_tcvitals.$year
@@ -245,7 +232,7 @@ cp $slmask slmask.126
  
 #  Execute program syndat_qctropcy
 
-pgm=$(basename $EXECSYND/syndat_qctropcy.x)
+pgm=$(basename ${EXECgfs}/syndat_qctropcy.x)
 export pgm
 if [ -s prep_step ]; then
    set +u
@@ -259,7 +246,7 @@ fi
 echo "$CDATE10"      > cdate10.dat
 export FORT11=slmask.126
 export FORT12=cdate10.dat
-${EXECSYND}/${pgm} >> $pgmout 2> errfile
+${EXECgfs}/${pgm} >> $pgmout 2> errfile
 errqct=$?
 ###cat errfile
 cat errfile >> $pgmout
@@ -323,28 +310,25 @@ diff nhc nhc.ORIG > /dev/null
 errdiff=$?
 
 ###################################
-#  Update NHC file in $HOMENHC
+#  Update NHC file in ${HOMENHC}
 ###################################
 
 if test "$errdiff" -ne '0'
 then
 
    if [ "$copy_back" = 'YES' -a ${envir} = 'prod' ]; then
-      if [ -s $HOMENHC/tcvitals ]; then
-         cp nhc $HOMENHC/tcvitals
-      fi
-      if [ -s $HOMENHCp1/tcvitals ]; then
-         cp nhc $HOMENHCp1/tcvitals
+      if [ -s ${HOMENHC}/tcvitals ]; then
+         cp nhc ${HOMENHC}/tcvitals
       fi
 
       err=$?
 
       if [ "$err" -ne '0' ]; then
          msg="###ERROR: Previous NHC Synthetic Data Record File \
-$HOMENHC/tcvitals not updated by syndat_qctropcy"
+${HOMENHC}/tcvitals not updated by syndat_qctropcy"
       else
          msg="Previous NHC Synthetic Data Record File \
-$HOMENHC/tcvitals successfully updated by syndat_qctropcy"
+${HOMENHC}/tcvitals successfully updated by syndat_qctropcy"
       fi
 
       set +x
@@ -357,7 +341,7 @@ $HOMENHC/tcvitals successfully updated by syndat_qctropcy"
 
 else
 
-   msg="Previous NHC Synthetic Data Record File $HOMENHC/tcvitals \
+   msg="Previous NHC Synthetic Data Record File ${HOMENHC}/tcvitals \
 not changed by syndat_qctropcy"
    set +x
    echo
diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh
index 01a21bd12c..11c0afb990 100755
--- a/ush/tropcy_relocate.sh
+++ b/ush/tropcy_relocate.sh
@@ -84,20 +84,13 @@
 #     envir         String indicating environment under which job runs ('prod'
 #                   or 'test')
 #                   Default is "prod"
-#     HOMEALL       String indicating parent directory path for some or 
-#                   all files under which job runs.
-#                   If the imported variable MACHINE!=sgi, then the default is
-#                   "/nw${envir}"; otherwise the default is
-#                   "/disk1/users/snake/prepobs"
-#     HOMERELO      String indicating parent directory path for relocation
-#                   specific files.  (May be under HOMEALL)
 #     envir_getges  String indicating environment under which GETGES utility
-#                   ush runs (see documentation in $USHGETGES/getges.sh for
+#                   ush runs (see documentation in ${USHgfs}/getges.sh for
 #                   more information)
 #                   Default is "$envir"
 #     network_getges
 #                   String indicating job network under which GETGES utility
-#                   ush runs (see documentation in $USHGETGES/getges.sh for
+#                   ush runs (see documentation in ${USHgfs}/getges.sh for
 #                   more information)
 #                   Default is "global" unless the center relocation processing
 #                   date/time is not a multiple of 3-hrs, then the default is
@@ -122,34 +115,20 @@
 #     POE_OPTS      String indicating options to use with poe command
 #                   Default is "-pgmmodel mpmd -ilevel 2 -labelio yes \
 #                   -stdoutmode ordered"
-#     USHGETGES     String indicating directory path for GETGES utility ush
-#                   file
-#     USHRELO       String indicating directory path for RELOCATE ush files
-#                   Default is "${HOMERELO}/ush"
-#     EXECRELO      String indicating directory path for RELOCATE executables
-#                   Default is "${HOMERELO}/exec"
-#     FIXRELO       String indicating directory path for RELOCATE data fix-
-#                   field files
-#                   Default is "${HOMERELO}/fix"
-#     EXECUTIL      String indicating directory path for utility program
-#                   executables
-#                   If the imported variable MACHINE!=sgi, then the default is
-#                   "/nwprod/util/exec"; otherwise the default is
-#                   "${HOMEALL}/util/exec"
 #     RELOX         String indicating executable path for RELOCATE_MV_NVORTEX
 #                   program 
-#                   Default is "$EXECRELO/relocate_mv_nvortex"
+#                   Default is "${EXECgfs}/relocate_mv_nvortex"
 #     SUPVX         String indicating executable path for SUPVIT utility
 #                   program
-#                   Default is "$EXECUTIL/supvit.x"
+#                   Default is "${EXECgfs}/supvit.x"
 #     GETTX         String indicating executable path for GETTRK utility
 #                   program
-#                   Default is "$EXECUTIL/gettrk"
+#                   Default is "${EXECgfs}/gettrk"
 #     BKGFREQ       Frequency of background files for relocation
 #                   Default is "3" 
 #     SENDDBN       String when set to "YES" alerts output files to $COMSP
 #     NDATE         String indicating executable path for NDATE utility program
-#                   Default is "$EXECUTIL/ndate"
+#                   Default is "${EXECgfs}/ndate"
 #
 #     These do not have to be exported to this script.  If they are, they will
 #      be used by the script.  If they are not, they will be skipped
@@ -166,18 +145,18 @@
 #
 #   Modules and files referenced:
 #                  Herefile: RELOCATE_GES
-#                  $USHRELO/tropcy_relocate_extrkr.sh
-#                  $USHGETGES/getges.sh
+#                  ${USHgfs}/tropcy_relocate_extrkr.sh
+#                  ${USHgfs}/getges.sh
 #                  $NDATE (here and in child script
-#                        $USHRELO/tropcy_relocate_extrkr.sh)
+#                        ${USHgfs}/tropcy_relocate_extrkr.sh)
 #                  /usr/bin/poe
 #                  postmsg
 #                  $DATA/prep_step (here and in child script
-#                        $USHRELO/tropcy_relocate_extrkr.sh)
+#                        ${USHgfs}/tropcy_relocate_extrkr.sh)
 #                  $DATA/err_exit (here and in child script
-#                        $USHRELO/tropcy_relocate_extrkr.sh)
+#                        ${USHgfs}/tropcy_relocate_extrkr.sh)
 #                  $DATA/err_chk (here and in child script
-#                        $USHRELO/tropcy_relocate_extrkr.sh)
+#                        ${USHgfs}/tropcy_relocate_extrkr.sh)
 #          NOTE: The last three scripts above are NOT REQUIRED utilities.
 #                If $DATA/prep_step not found, a scaled down version of it is
 #                executed in-line.  If $DATA/err_exit or $DATA/err_chk are not
@@ -188,7 +167,7 @@
 #     programs   :
 #          RELOCATE_MV_NVORTEX - executable $RELOX
 #                                 T126 GRIB global land/sea mask:
-#                                          $FIXRELO/global_slmask.t126.grb
+#                                          ${FIXgfs}/am/global_slmask.t126.grb
 #          SUPVIT               - executable $SUPVX
 #          GETTRK               - executable $GETTX
 #
@@ -204,7 +183,7 @@
 #
 ####
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 MACHINE=${MACHINE:-$(hostname -s | cut -c 1-3)}
 
@@ -275,14 +254,6 @@ set_trace
 
 envir=${envir:-prod}
 
-if [ $MACHINE != sgi ]; then
-   HOMEALL=${HOMEALL:-$OPSROOT}
-else
-   HOMEALL=${HOMEALL:-/disk1/users/snake/prepobs}
-fi
-
-HOMERELO=${HOMERELO:-${shared_global_home}}
-
 envir_getges=${envir_getges:-$envir}
 if [ $modhr -eq 0 ]; then
    network_getges=${network_getges:-global}
@@ -295,21 +266,12 @@ pgmout=${pgmout:-/dev/null}
 tstsp=${tstsp:-/tmp/null/}
 tmmark=${tmmark:-tm00}
 
-USHRELO=${USHRELO:-${HOMERELO}/ush}
-##USHGETGES=${USHGETGES:-/nwprod/util/ush}
-##USHGETGES=${USHGETGES:-${HOMERELO}/ush}
-USHGETGES=${USHGETGES:-${USHRELO}}
-
-EXECRELO=${EXECRELO:-${HOMERELO}/exec}
-
-FIXRELO=${FIXRELO:-${HOMERELO}/fix}
-
-RELOX=${RELOX:-$EXECRELO/relocate_mv_nvortex}
+RELOX=${RELOX:-${EXECgfs}/relocate_mv_nvortex}
 
 export BKGFREQ=${BKGFREQ:-1}
 
-SUPVX=${SUPVX:-$EXECRELO/supvit.x}
-GETTX=${GETTX:-$EXECRELO/gettrk}
+SUPVX=${SUPVX:-${EXECgfs}/supvit.x}
+GETTX=${GETTX:-${EXECgfs}/gettrk}
 
 ################################################
 # EXECUTE TROPICAL CYCLONE RELOCATION PROCESSING
@@ -355,7 +317,7 @@ echo "                    relocation processing date/time"
 echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
       echo
       set_trace
-      $USHGETGES/getges.sh -e $envir_getges -n $network_getges \
+      ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \
        -v $CDATE10 -f $fhr -t tcvges tcvitals.m${fhr}
       set +x
       echo
@@ -405,7 +367,7 @@ echo "                    relocation processing date/time"
 echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
       echo
       set_trace
-      $USHGETGES/getges.sh -e $envir_getges -n $network_getges \
+      ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \
        -v $CDATE10 -t $stype $sges
       errges=$?
       if test $errges -ne 0; then
@@ -439,7 +401,7 @@ to center relocation date/time;"
 #  ----------------------------------------------------------------------------
 
       if [ $fhr = "0"  ]; then
-         "${USHGETGES}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \
+         "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \
           -t "${stype}" > "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}"
          cp "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \
           "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}"
@@ -459,7 +421,7 @@ echo "                    relocation processing date/time"
 echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
       echo
       set_trace
-      $USHGETGES/getges.sh -e $envir_getges -n $network_getges \
+      ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \
        -v $CDATE10 -t $ptype $pges
       errges=$?
       if test $errges -ne 0; then
@@ -541,7 +503,7 @@ else
 #   $DATA/$RUN.$cycle.relocate.model_track.tm00
 #  --------------------------------------------
 
-   $USHRELO/tropcy_relocate_extrkr.sh
+   ${USHgfs}/tropcy_relocate_extrkr.sh
    err=$?
    if [ $err -ne 0 ]; then
 
@@ -550,12 +512,12 @@ else
 
       set +x
       echo
-      echo "$USHRELO/tropcy_relocate_extrkr.sh failed"
+      echo "${USHgfs}/tropcy_relocate_extrkr.sh failed"
       echo "ABNORMAL EXIT!!!!!!!!!!!"
       echo
       set_trace
       if [ -s $DATA/err_exit ]; then
-         $DATA/err_exit "Script $USHRELO/tropcy_relocate_extrkr.sh failed"
+         $DATA/err_exit "Script ${USHgfs}/tropcy_relocate_extrkr.sh failed"
       else
          exit 555
       fi
@@ -569,10 +531,10 @@ else
      rm fort.*
    fi
 
-   ln -sf $DATA/tcvitals.now1      fort.11
-   ln -sf $DATA/model_track.all    fort.30
-   ln -sf $DATA/rel_inform1        fort.62
-   ln -sf $DATA/tcvitals.relocate0 fort.65
+   ${NLN} $DATA/tcvitals.now1      fort.11
+   ${NLN} $DATA/model_track.all    fort.30
+   ${NLN} $DATA/rel_inform1        fort.62
+   ${NLN} $DATA/tcvitals.relocate0 fort.65
 
    i1=20
    i2=53
@@ -586,8 +548,8 @@ else
        tpref=p$fhr
      fi
 
-     ln -sf $DATA/sg${tpref}prep          fort.$i1
-     ln -sf $DATA/sg${tpref}prep.relocate fort.$i2
+     ${NLN} $DATA/sg${tpref}prep          fort.$i1
+     ${NLN} $DATA/sg${tpref}prep.relocate fort.$i2
 
      i1=$((i1+1))
      i2=$((i2+BKGFREQ))
diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh
index ede2318c4a..18e0851368 100755
--- a/ush/tropcy_relocate_extrkr.sh
+++ b/ush/tropcy_relocate_extrkr.sh
@@ -3,7 +3,7 @@
 # This script is executed by the script tropcy_relocate.sh
 # --------------------------------------------------------
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 export machine=${machine:-ZEUS}
 export machine=$(echo $machine|tr '[a-z]' '[A-Z]')
@@ -592,8 +592,8 @@ if [ -s fort.*  ]; then
   rm fort.*
 fi
 
-ln -s -f ${vdir}/vitals.${symd}${dishh}                  fort.31
-ln -s -f ${vdir}/vitals.upd.${cmodel}.${symd}${dishh}    fort.51
+${NLN} ${vdir}/vitals.${symd}${dishh}                  fort.31
+${NLN} ${vdir}/vitals.upd.${cmodel}.${symd}${dishh}    fort.51
 
 ##$XLF_LINKSSH
 #if [ -z $XLF_LINKSSH ] ; then
@@ -1528,19 +1528,19 @@ if [ -s fort.*  ]; then
   rm fort.*
 fi
 
-ln -s -f ${gribfile}                                   fort.11
-ln -s -f ${vdir}/tmp.gfs.atcfunix.${symdh}             fort.14
-ln -s -f ${vdir}/vitals.upd.${cmodel}.${symd}${dishh}  fort.12
-ln -s -f ${ixfile}                                     fort.31
-ln -s -f ${vdir}/trak.${cmodel}.all.${symdh}           fort.61
-ln -s -f ${vdir}/trak.${cmodel}.atcf.${symdh}          fort.62
-ln -s -f ${vdir}/trak.${cmodel}.radii.${symdh}         fort.63
-ln -s -f ${vdir}/trak.${cmodel}.atcfunix.${symdh}      fort.64
+${NLN} ${gribfile}                                   fort.11
+${NLN} ${vdir}/tmp.gfs.atcfunix.${symdh}             fort.14
+${NLN} ${vdir}/vitals.upd.${cmodel}.${symd}${dishh}  fort.12
+${NLN} ${ixfile}                                     fort.31
+${NLN} ${vdir}/trak.${cmodel}.all.${symdh}           fort.61
+${NLN} ${vdir}/trak.${cmodel}.atcf.${symdh}          fort.62
+${NLN} ${vdir}/trak.${cmodel}.radii.${symdh}         fort.63
+${NLN} ${vdir}/trak.${cmodel}.atcfunix.${symdh}      fort.64
 
 if [ $BKGFREQ -eq 1 ]; then
-  ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes_hrly fort.15
+  ${NLN} ${FIXgfs}/am/${cmodel}.tracker_leadtimes_hrly fort.15
 elif [ $BKGFREQ -eq 3 ]; then
-  ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes      fort.15
+  ${NLN} ${FIXgfs}/am/${cmodel}.tracker_leadtimes      fort.15
 fi
 
 ##$XLF_LINKSSH
diff --git a/ush/wafs_mkgbl.sh b/ush/wafs_mkgbl.sh
new file mode 100755
index 0000000000..e6139bc9d3
--- /dev/null
+++ b/ush/wafs_mkgbl.sh
@@ -0,0 +1,152 @@
+#  UTILITY SCRIPT NAME :  wafs_mkgbl.sh
+#               AUTHOR :  Mary Jacobs
+#         DATE WRITTEN :  11/06/96
+#
+#  Abstract:  This utility script produces the GFS WAFS
+#             bulletins.  
+#
+#     Input:  2 arguments are passed to this script.   
+#             1st argument - Forecast Hour - format of 2I
+#             2nd argument - In hours 12-30, the designator of 
+#                            a  or  b.
+#
+#     Logic:   If we are processing hours 12-30, we have the
+#              added variable of the   a   or    b, and process
+#              accordingly.  The other hours, the a or b  is dropped.
+#
+echo "History: SEPT    1996 - First implementation of this utility script"
+echo "History: AUG     1999 - Modified for implementation on IBM SP"
+echo "                      - Allows users to run interactively" 
+#
+
+set -x
+hour_list="$1"
+sets_key=$2
+num=$#
+
+if test $num -ge 2
+then
+   echo " Appropriate number of arguments were passed"
+   set -x
+   if [ -z "$DATA" ]
+   then
+      export DATA=`pwd`
+      cd $DATA
+      setpdy.sh
+      . PDY
+   fi
+else
+   echo ""
+   echo "Usage: wafs_mkgbl.sh \$hour [a|b]"
+   echo ""
+   exit 16
+fi
+
+echo " ------------------------------------------"
+echo " BEGIN MAKING ${NET} WAFS PRODUCTS"
+echo " ------------------------------------------"
+
+echo "Enter Make WAFS utility."
+
+for hour in $hour_list
+do
+   ##############################
+   # Copy Input Field to $DATA
+   ##############################
+
+   if test ! -f pgrbf${hour}
+   then
+#       cp $COMIN/${RUN}.${cycle}.pgrbf${hour} pgrbf${hour}
+
+#      file name and forecast hour of GFS model data in Grib2 are 3 digits
+#      export fhr3=$hour
+#      if test $fhr3 -lt 100
+#      then
+#         export fhr3="0$fhr3"
+#      fi
+       fhr3="$(printf "%03d" $(( 10#$hour )) )"
+
+#      To solve Bugzilla #408: remove the dependency of grib1 files in gfs wafs job in next GFS upgrade
+#      Reason: It's not efficent if simply converting from grib2 to grib1 (costs 6 seconds with 415 records)
+#      Solution: Need to grep 'selected fields on selected levels' before CNVGRIB (costs 1 second with 92 records)
+       ${NLN} $COMIN/${RUN}.${cycle}.pgrb2.1p00.f$fhr3  pgrb2f${hour}
+       $WGRIB2 pgrb2f${hour} | grep -F -f $FIXgfs/grib_wafs.grb2to1.list | $WGRIB2 -i pgrb2f${hour} -grib pgrb2f${hour}.tmp
+#       on Cray, IOBUF_PARAMS has to used to speed up CNVGRIB
+#       export IOBUF_PARAMS='*:size=32M:count=4:verbose'
+       $CNVGRIB -g21 pgrb2f${hour}.tmp  pgrbf${hour}
+#       unset IOBUF_PARAMS
+   fi
+
+   #
+   # BAG - Put in fix on 20070925 to force the percision of U and V winds
+   #       to default to 1 through the use of the grib_wafs.namelist file.
+   #
+   $COPYGB -g3 -i0 -N$FIXgfs/grib_wafs.namelist -x pgrbf${hour} tmp
+   mv tmp pgrbf${hour}
+   $GRBINDEX pgrbf${hour} pgrbif${hour}
+
+   ##############################
+   # Process WAFS
+   ##############################
+
+   if test $hour -ge '12' -a $hour -le '30'
+   then
+       sets=$sets_key
+       set +x
+       echo "We are processing the primary and secondary sets of hours."
+       echo "These sets are the   a   and   b   of hours 12-30."
+       set -x
+   else
+     # This is for hours 00/06 and 36-72.
+     unset sets
+   fi
+
+   export pgm=wafs_makewafs
+   . prep_step
+
+   export FORT11="pgrbf${hour}"
+   export FORT31="pgrbif${hour}"
+   export FORT51="xtrn.wfs${NET}${hour}${sets}"
+   export FORT53="com.wafs${hour}${sets}"
+
+   startmsg
+   $EXECgfs/wafs_makewafs.x < $FIXgfs/grib_wfs${NET}${hour}${sets} >>$pgmout 2>errfile
+   export err=$?;err_chk
+
+
+   ##############################
+   # Post Files to PCOM 
+   ##############################
+
+   if test "$SENDCOM" = 'YES'
+   then
+      cp xtrn.wfs${NET}${hour}${sets} $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix
+#      cp com.wafs${hour}${sets} $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix
+
+#      if test "$SENDDBN_NTC" = 'YES'
+#      then
+#         if test "$NET" = 'gfs'
+#         then
+#               $DBNROOT/bin/dbn_alert MODEL GFS_WAFS $job \
+#                         $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix
+#               $DBNROOT/bin/dbn_alert MODEL GFS_XWAFS $job \
+#                         $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix
+#         fi
+#      fi
+   fi
+
+   ##############################
+   # Distribute Data 
+   ##############################
+
+   if [ "$SENDDBN_NTC" = 'YES' ] ; then
+      $DBNROOT/bin/dbn_alert GRIB_LOW $NET $job $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix
+   else
+      echo "xtrn.wfs${NET}${cyc}${hour}${sets}.$job file not posted to db_net."
+   fi
+
+   echo "Wafs Processing $hour hour completed normally"
+
+done
+
+exit
diff --git a/ush/wave_extractvars.sh b/ush/wave_extractvars.sh
new file mode 100755
index 0000000000..32ee44986b
--- /dev/null
+++ b/ush/wave_extractvars.sh
@@ -0,0 +1,34 @@
+#! /usr/bin/env bash                                                                                                                                                                          
+
+################################################################################
+## UNIX Script Documentation Block
+## Script name:         wave_extractvars.sh
+## Script description:  Extracts variables from wave products
+##                      and saves these variables in arcdir
+#######################
+# Main body starts here
+#######################
+
+source "${USHgfs}/preamble.sh"
+
+subdata=${1}
+
+[[ -d "${subdata}" ]] || mkdir -p "${subdata}"
+
+for (( nh = FHOUT_WAV_EXTRACT; nh <= FHMAX_WAV; nh = nh + FHOUT_WAV_EXTRACT )); do
+  fnh=$(printf "%3.3d" "${nh}")
+
+  infile=${COMIN_WAVE_GRID}/${RUN}wave.t${cyc}z.global.${wavres}.f${fnh}.grib2
+  outfile=${subdata}/${RUN}wave.t${cyc}z.global.${wavres}.f${fnh}.grib2
+  rm -f "${outfile}" # Remove outfile if it already exists before extraction
+
+  if [[ -f "${infile}" ]]; then # Check if input file exists before extraction
+    # shellcheck disable=SC2312 
+    ${WGRIB2} "${infile}" | grep -F -f "${varlist_wav}" | ${WGRIB2} -i "${infile}" -append -grib "${outfile}"
+  else
+    echo "WARNING: ${infile} does not exist."
+  fi 
+  copy_to_comout "${outfile}" "${ARC_RFCST_PROD_WAV}"
+done # nh
+
+exit 0                                                                                                                                                                                        
diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh
index af28760269..99f89f3f37 100755
--- a/ush/wave_grib2_sbs.sh
+++ b/ush/wave_grib2_sbs.sh
@@ -25,7 +25,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "${HOMEgfs}/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -72,7 +72,7 @@ if [[ -n ${waveMEMB} ]]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi
 outfile="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2"
 
 # Only create file if not present in COM
-if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
+if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then
 
   set +x
   echo ' '
@@ -82,8 +82,8 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
   echo "   Model ID         : $WAV_MOD_TAG"
   set_trace
 
-  if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \
-	 [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \
+  if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \
+	 [[ -z "${COMOUT_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \
 	 [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \
      [[ -z "${SENDDBN}" ]]; then
     set +x
@@ -110,8 +110,8 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
 
   # 0.e Links to working directory
 
-  ln -s "${DATA}/mod_def.${grdID}" "mod_def.ww3"
-  ln -s "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3"
+  ${NLN} "${DATA}/mod_def.${grdID}" "mod_def.ww3"
+  ${NLN} "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3"
 
   # --------------------------------------------------------------------------- #
   # 1.  Generate GRIB file with all data
@@ -138,11 +138,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
 
   set +x
   echo "   Run ww3_grib2"
-  echo "   Executing ${EXECwave}/ww3_grib"
+  echo "   Executing ${EXECgfs}/ww3_grib"
   set_trace
 
   export pgm=ww3_grib;. prep_step
-  "${EXECwave}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1
+  "${EXECgfs}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1
   export err=$?;err_chk
 
   if [ ! -s gribfile ]; then
@@ -157,11 +157,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
   fi
 
   if (( fhr > 0 )); then 
-    ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COM_WAVE_GRID}/${outfile}"
+    ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COMOUT_WAVE_GRID}/${outfile}"
     err=$?
   else
     ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" \
-      -set table_1.4 1 -set table_1.2 1 -grib "${COM_WAVE_GRID}/${outfile}"
+      -set table_1.4 1 -set table_1.2 1 -grib "${COMOUT_WAVE_GRID}/${outfile}"
     err=$?
   fi
 
@@ -177,7 +177,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
   fi
 
   # Create index
-  ${WGRIB2} -s "${COM_WAVE_GRID}/${outfile}" > "${COM_WAVE_GRID}/${outfile}.idx"
+  ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${outfile}" > "${COMOUT_WAVE_GRID}/${outfile}.idx"
 
   # Create grib2 subgrid is this is the source grid
   if [[ "${grdID}" = "${WAV_SUBGRBSRC}" ]]; then
@@ -186,14 +186,14 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
       subgrbnam=$(echo ${!subgrb} | cut -d " " -f 21)
       subgrbres=$(echo ${!subgrb} | cut -d " " -f 22)
       subfnam="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${subgrbnam}.${subgrbres}.f${FH3}.grib2"
-      ${COPYGB2} -g "${subgrbref}" -i0 -x "${COM_WAVE_GRID}/${outfile}" "${COM_WAVE_GRID}/${subfnam}"
-      ${WGRIB2} -s "${COM_WAVE_GRID}/${subfnam}" > "${COM_WAVE_GRID}/${subfnam}.idx"
+      ${COPYGB2} -g "${subgrbref}" -i0 -x "${COMOUT_WAVE_GRID}/${outfile}" "${COMOUT_WAVE_GRID}/${subfnam}"
+      ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${subfnam}" > "${COMOUT_WAVE_GRID}/${subfnam}.idx"
    done
   fi
 
   # 1.e Save in /com
 
-  if [[ ! -s "${COM_WAVE_GRID}/${outfile}" ]]; then
+  if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}" ]]; then
     set +x
     echo ' '
     echo '********************************************* '
@@ -205,7 +205,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
     set_trace
     exit 4
   fi
-  if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
+  if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then
     set +x
     echo ' '
     echo '*************************************************** '
@@ -220,11 +220,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
 
   if [[ "${SENDDBN}" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]]; then
     set +x
-    echo "   Alerting GRIB file as ${COM_WAVE_GRID}/${outfile}"
-    echo "   Alerting GRIB index file as ${COM_WAVE_GRID}/${outfile}.idx"
+    echo "   Alerting GRIB file as ${COMOUT_WAVE_GRID}/${outfile}"
+    echo "   Alerting GRIB index file as ${COMOUT_WAVE_GRID}/${outfile}.idx"
     set_trace
-    "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COM_WAVE_GRID}/${outfile}"
-    "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COM_WAVE_GRID}/${outfile}.idx"
+    "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COMOUT_WAVE_GRID}/${outfile}"
+    "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COMOUT_WAVE_GRID}/${outfile}.idx"
   else
     echo "${outfile} is global.0p50 or SENDDBN is NO, no alert sent"
   fi
@@ -245,7 +245,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
 else
   set +x
   echo ' '
-  echo " File ${COM_WAVE_GRID}/${outfile} found, skipping generation process"
+  echo " File ${COMOUT_WAVE_GRID}/${outfile} found, skipping generation process"
   echo ' '
   set_trace
 fi
diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh
index c11a75f89d..31b7808c16 100755
--- a/ush/wave_grid_interp_sbs.sh
+++ b/ush/wave_grid_interp_sbs.sh
@@ -25,7 +25,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -65,8 +65,8 @@ source "$HOMEgfs/ush/preamble.sh"
   echo "   Model ID         : $WAV_MOD_TAG"
   set_trace
 
-  if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \
-	 [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \
+  if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \
+	 [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \
 	 [ -z "${waveGRD}" ]
   then
     set +x
@@ -75,7 +75,7 @@ source "$HOMEgfs/ush/preamble.sh"
     echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***'
     echo '***************************************************'
     echo ' '
-    echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}"
+    echo "${PDY}${cyc} ${cycle} ${EXECgfs} ${COMOUT_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}"
     set_trace
     exit 1
   fi
@@ -85,18 +85,16 @@ source "$HOMEgfs/ush/preamble.sh"
   rm -f ${DATA}/output_${ymdh}0000/out_grd.$grdID
 
   if [ ! -f ${DATA}/${grdID}_interp.inp.tmpl ]; then
-    cp $PARMwave/${grdID}_interp.inp.tmpl ${DATA}
+    cp "${PARMgfs}/wave/${grdID}_interp.inp.tmpl" "${DATA}/${grdID}_interp.inp.tmpl"
   fi
-  ln -sf ${DATA}/${grdID}_interp.inp.tmpl .
+  ${NLN} "${DATA}/${grdID}_interp.inp.tmpl" "${grdID}_interp.inp.tmpl"
 
-  for ID in $waveGRD
-  do
-    ln -sf ${DATA}/output_${ymdh}0000/out_grd.$ID .
+  for ID in ${waveGRD}; do
+    ${NLN} "${DATA}/output_${ymdh}0000/out_grd.${ID}" "out_grd.${ID}"
   done
 
-  for ID in $waveGRD $grdID
-  do
-    ln -sf ${DATA}/mod_def.$ID .
+  for ID in ${waveGRD} ${grdID}; do
+    ${NLN} "${DATA}/mod_def.${ID}" "mod_def.${ID}"
   done
 
 # --------------------------------------------------------------------------- #
@@ -113,42 +111,42 @@ source "$HOMEgfs/ush/preamble.sh"
 
   wht_OK='no'
   if [ ! -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ]; then
-    if [ -f $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ]
+    if [ -f ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ]
     then
       set +x
       echo ' '
-      echo " Copying $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} "
+      echo " Copying ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} "
       set_trace
-      cp $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA}
+      cp ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA}
       wht_OK='yes'
     else
       set +x
       echo ' '
-      echo " Not found: $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} "
+      echo " Not found: ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} "
     fi
   fi
 # Check and link weights file
   if [ -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ]
   then
-    ln -s ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin
+    ${NLN} ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin
   fi
 
 # 1.b Run interpolation code
 
   set +x
   echo "   Run ww3_gint
-  echo "   Executing $EXECwave/ww3_gint
+  echo "   Executing ${EXECgfs}/ww3_gint
   set_trace
 
   export pgm=ww3_gint;. prep_step
-  $EXECwave/ww3_gint 1> gint.${grdID}.out 2>&1
+  ${EXECgfs}/ww3_gint 1> gint.${grdID}.out 2>&1
   export err=$?;err_chk
 
 # Write interpolation file to main TEMP dir area if not there yet
   if [ "wht_OK" = 'no' ]
   then
     cp -f ./WHTGRIDINT.bin ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID}
-    cp -f ./WHTGRIDINT.bin ${FIXwave}/ww3_gint.WHTGRIDINT.bin.${grdID}
+    cp -f ./WHTGRIDINT.bin ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID}
   fi
 
 
@@ -173,9 +171,9 @@ source "$HOMEgfs/ush/preamble.sh"
 # 1.c Save in /com
 
   set +x
-  echo "   Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
+  echo "   Saving GRID file as ${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
   set_trace
-  cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
+  cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
 
 #    if [ "$SENDDBN" = 'YES' ]
 #    then
diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh
index 5b1b212a16..1e8c44054a 100755
--- a/ush/wave_grid_moddef.sh
+++ b/ush/wave_grid_moddef.sh
@@ -20,7 +20,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -59,7 +59,7 @@ source "$HOMEgfs/ush/preamble.sh"
 # 0.c Define directories and the search path.
 #     The tested variables should be exported by the postprocessor script.
 
-  if [ -z "$grdID" ] || [ -z "$EXECwave" ] || [ -z "$wave_sys_ver" ]
+  if [ -z "$grdID" ] || [ -z "${EXECgfs}" ]
   then
     set +x
     echo ' '
@@ -77,14 +77,22 @@ source "$HOMEgfs/ush/preamble.sh"
   set +x
   echo ' '
   echo '   Creating mod_def file ...'
-  echo "   Executing $EXECwave/ww3_grid"
+  echo "   Executing ${EXECgfs}/ww3_grid"
   echo ' '
   set_trace
  
   rm -f ww3_grid.inp 
-  ln -sf ../ww3_grid.inp.$grdID ww3_grid.inp
+  ${NLN} ../ww3_grid.inp.$grdID ww3_grid.inp
+
+  if [ -f ../${grdID}.msh ]
+  then
+     rm -f ${grdID}.msh 
+     ${NLN} ../${grdID}.msh ${grdID}.msh
+  fi
+
+
  
-  $EXECwave/ww3_grid 1> grid_${grdID}.out 2>&1
+  "${EXECgfs}/ww3_grid" 1> "grid_${grdID}.out" 2>&1
   err=$?
 
   if [ "$err" != '0' ]
@@ -99,10 +107,10 @@ source "$HOMEgfs/ush/preamble.sh"
     exit 3
   fi
  
-  if [ -f mod_def.ww3 ]
+  if [[ -f mod_def.ww3 ]]
   then
-    cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}"
-    mv mod_def.ww3 ../mod_def.$grdID
+    cp mod_def.ww3 "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}"
+    mv mod_def.ww3 "../mod_def.${grdID}"
   else
     set +x
     echo ' '
@@ -118,6 +126,6 @@ source "$HOMEgfs/ush/preamble.sh"
 # 3.  Clean up
 
 cd ..
-rm -rf moddef_$grdID
+rm -rf "moddef_${grdID}"
 
 # End of ww3_mod_def.sh ------------------------------------------------- #
diff --git a/ush/wave_outp_cat.sh b/ush/wave_outp_cat.sh
index f4bf6b2294..6ce3ce06cf 100755
--- a/ush/wave_outp_cat.sh
+++ b/ush/wave_outp_cat.sh
@@ -21,7 +21,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
   bloc=$1
diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh
index 5acc0f95ab..37accbae49 100755
--- a/ush/wave_outp_spec.sh
+++ b/ush/wave_outp_spec.sh
@@ -22,7 +22,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
   bloc=$1
@@ -31,6 +31,7 @@ source "$HOMEgfs/ush/preamble.sh"
   workdir=$4
 
   YMDHE=$($NDATE $FHMAX_WAV_PNT $CDATE)
+  model_start_date=$(${NDATE} ${OFFSET_START_HOUR} "${PDY}${cyc}")
 
   cd $workdir
 
@@ -73,21 +74,7 @@ source "$HOMEgfs/ush/preamble.sh"
     exit 1
   else
     buoy=$bloc
-    grep $buoy ${DATA}/buoy_log.ww3 > tmp_list.loc
-    while read line
-    do
-      buoy_name=$(echo $line | awk '{print $2}')
-      if [ $buoy = $buoy_name ]
-      then
-        point=$(echo $line | awk '{ print $1 }')
-        set +x
-        echo "              Location ID/#   : $buoy (${point})"
-        echo "   Spectral output start time : $ymdh "
-        echo ' '
-        set_trace
-        break
-      fi
-    done < tmp_list.loc
+    point=$(awk "{if (\$2 == \"${buoy}\"){print \$1; exit} }" "${DATA}/buoy_log.ww3")
     if [ -z "$point" ]
     then
       set +x
@@ -97,6 +84,11 @@ source "$HOMEgfs/ush/preamble.sh"
       echo ' '
       set_trace
       exit 2
+    else
+      set +x
+      echo "              Location ID/#   : $buoy (${point})"
+      echo "   Spectral output start time : $ymdh "
+      echo ' '
     fi
   fi
 
@@ -104,7 +96,7 @@ source "$HOMEgfs/ush/preamble.sh"
 # 0.c Define directories and the search path.
 #     The tested variables should be exported by the postprocessor script.
 
-  if [ -z "$CDATE" ] || [ -z "$dtspec" ] || [ -z "$EXECwave" ] || \
+  if [ -z "$CDATE" ] || [ -z "$dtspec" ] || [ -z "${EXECgfs}" ] || \
      [ -z "$WAV_MOD_TAG" ] || [ -z "${STA_DIR}" ]
   then
     set +x
@@ -135,8 +127,8 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 0.f Links to mother directory
 
-  ln -s ${DATA}/output_${ymdh}0000/mod_def.${waveuoutpGRD} ./mod_def.ww3
-  ln -s ${DATA}/output_${ymdh}0000/out_pnt.${waveuoutpGRD} ./out_pnt.ww3
+  ${NLN} ${DATA}/output_${ymdh}0000/mod_def.${waveuoutpGRD} ./mod_def.ww3
+  ${NLN} ${DATA}/output_${ymdh}0000/out_pnt.${waveuoutpGRD} ./out_pnt.ww3
 
 # --------------------------------------------------------------------------- #
 # 2.  Generate spectral data file
@@ -170,11 +162,11 @@ source "$HOMEgfs/ush/preamble.sh"
 # 2.b Run the postprocessor
 
   set +x
-  echo "   Executing $EXECwave/ww3_outp"
+  echo "   Executing ${EXECgfs}/ww3_outp"
   set_trace
 
   export pgm=ww3_outp;. prep_step
-  $EXECwave/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1
+  ${EXECgfs}/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1
   export err=$?;err_chk
 
 
@@ -196,31 +188,31 @@ source "$HOMEgfs/ush/preamble.sh"
 
   if [ -f $outfile ]
   then
-   if [ "${ymdh}" = "${CDATE}" ]
+   if [ "${ymdh}" = "${model_start_date}" ]
    then
      if [ "$specdir" = "bull" ]
      then
-       cat $outfile | sed -e '9,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull
-       cat $coutfile | sed -e '8,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull
+       sed '9,$d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull"
+       sed '8,$d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull"
      else
-       cat $outfile >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec
+       cat $outfile >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec"
      fi
    elif [ "${ymdh}" = "${YMDHE}" ]
    then
      if [ "$specdir" = "bull" ]
      then
-       cat $outfile | sed -e '1,7d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull
-       cat $coutfile | sed -e '1,6d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull
+       sed '1,7d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull"
+       sed '1,6d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull"
      else
-       cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec
+       sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec"
      fi
    else
      if [ "$specdir" = "bull" ]
      then
-       cat $outfile | sed -e '1,7d' | sed -e '2,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull
-       cat $coutfile | sed -e '1,6d' | sed -e '2,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull
+       sed '8q;d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull"
+       sed '7q;d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull"
      else
-       cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec
+       sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec"
      fi
    fi
   else
@@ -237,6 +229,6 @@ source "$HOMEgfs/ush/preamble.sh"
 # 3.b Clean up the rest
 
 cd ..
-rm -rf ${specdir}_${bloc}
+rm -rf "${specdir}_${bloc}"
 
 # End of ww3_outp_spec.sh ---------------------------------------------------- #
diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh
index 6b1ab19db2..927710c581 100755
--- a/ush/wave_prnc_cur.sh
+++ b/ush/wave_prnc_cur.sh
@@ -22,7 +22,7 @@
 ################################################################################
 #
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 ymdh_rtofs=$1
 curfile=$2
@@ -46,7 +46,7 @@ mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc"
 # Convert to regular lat lon file
 # If weights need to be regenerated due to CDO ver change, use:
 # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc
-cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc
+cp ${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc ./weights.nc
 
 # Interpolate to regular 5 min grid
 ${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc"
@@ -65,17 +65,17 @@ rm -f cur_temp[123].nc cur_5min_??.nc "cur_glo_uv_${PDY}_${fext}${fh3}.nc weight
 
 if [ ${flagfirst}  = "T" ]
 then
-  sed -e "s/HDRFL/T/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp
+  sed -e "s/HDRFL/T/g" ${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp
 else
-  sed -e "s/HDRFL/F/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp
+  sed -e "s/HDRFL/F/g" ${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp
 fi
 
 rm -f cur.nc
-ln -s "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc"
-ln -s "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3
+${NLN} "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc"
+${NLN} "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3
 
 export pgm=ww3_prnc;. prep_step
-$EXECwave/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1
+${EXECgfs}/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1
 
 export err=$?; err_chk
 
diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh
index 5ec1d7fc2e..be089c30bd 100755
--- a/ush/wave_prnc_ice.sh
+++ b/ush/wave_prnc_ice.sh
@@ -27,7 +27,7 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
@@ -36,7 +36,7 @@ source "$HOMEgfs/ush/preamble.sh"
   rm -rf ice
   mkdir ice
   cd ice
-  ln -s ${DATA}/postmsg .
+  ${NLN} "${DATA}/postmsg" postmsg
 
 # 0.b Define directories and the search path.
 #     The tested variables should be exported by the postprocessor script.
@@ -55,8 +55,8 @@ source "$HOMEgfs/ush/preamble.sh"
   echo "Making ice fields."
 
   if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \
-     [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \
-     [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COM_OBS}" ]]; then
+     [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || \
+     [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COMIN_OBS}" ]]; then
   
     set +x
     echo ' '
@@ -71,13 +71,13 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 0.c Links to working directory
 
-  ln -s ${DATA}/mod_def.$WAVEICE_FID mod_def.ww3
+  ${NLN} ${DATA}/mod_def.$WAVEICE_FID mod_def.ww3
 
 # --------------------------------------------------------------------------- #
 # 1.  Get the necessary files
 # 1.a Copy the ice data file
 
-  file=${COM_OBS}/${WAVICEFILE}
+  file=${COMIN_OBS}/${WAVICEFILE}
 
   if [ -f $file ]
   then
@@ -144,7 +144,7 @@ source "$HOMEgfs/ush/preamble.sh"
 
   export pgm=ww3_prnc;. prep_step
 
-  $EXECwave/ww3_prnc 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1 
+  ${EXECgfs}/ww3_prnc 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1
   export err=$?; err_chk
 
   if [ "$err" != '0' ]
@@ -178,9 +178,9 @@ source "$HOMEgfs/ush/preamble.sh"
   fi
  
   set +x
-  echo "   Saving ice.ww3 as ${COM_WAVE_PREP}/${icefile}"
+  echo "   Saving ice.ww3 as ${COMOUT_WAVE_PREP}/${icefile}"
   set_trace
-  cp ice.ww3 "${COM_WAVE_PREP}/${icefile}"
+  cp ice.ww3 "${COMOUT_WAVE_PREP}/${icefile}"
   rm -f ice.ww3
 
 # --------------------------------------------------------------------------- #
diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh
index 1a8d6d6cc5..f82849854f 100755
--- a/ush/wave_tar.sh
+++ b/ush/wave_tar.sh
@@ -25,11 +25,11 @@
 # --------------------------------------------------------------------------- #
 # 0.  Preparations
 
-source "$HOMEgfs/ush/preamble.sh"
+source "${USHgfs}/preamble.sh"
 
 # 0.a Basic modes of operation
 
-  cd $DATA
+  cd "${DATA}"
   echo "Making TAR FILE"
 
   alertName=$(echo $RUN|tr [a-z] [A-Z])
@@ -47,7 +47,7 @@ source "$HOMEgfs/ush/preamble.sh"
 
 # 0.b Check if type set
 
-  if [ "$#" -lt '3' ]
+  if [[ "$#" -lt '3' ]]
   then
     set +x
     echo ' '
@@ -64,9 +64,9 @@ source "$HOMEgfs/ush/preamble.sh"
   fi
 
   filext=$type
-  if [ "$type" = "ibp" ]; then filext='spec'; fi
-  if [ "$type" = "ibpbull" ]; then filext='bull'; fi
-  if [ "$type" = "ibpcbull" ]; then filext='cbull'; fi
+  if [[ "$type" = "ibp" ]]; then filext='spec'; fi
+  if [[ "$type" = "ibpbull" ]]; then filext='bull'; fi
+  if [[ "$type" = "ibpcbull" ]]; then filext='cbull'; fi
 
 
   rm -rf TAR_${filext}_$ID 
@@ -76,7 +76,7 @@ source "$HOMEgfs/ush/preamble.sh"
 # 0.c Define directories and the search path.
 #     The tested variables should be exported by the postprocessor script.
 
-  if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] ||  \
+  if [[ -z "${cycle}" ]] || [[ -z "${COMOUT_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] ||  \
      [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then
     set +x
     echo ' '
@@ -88,7 +88,7 @@ source "$HOMEgfs/ush/preamble.sh"
     exit 2
   fi
 
-  cd ${STA_DIR}/${filext}
+  cd "${STA_DIR}/${filext}"
 
 # --------------------------------------------------------------------------- #
 # 2.  Generate tar file (spectral files are compressed)
@@ -98,21 +98,27 @@ source "$HOMEgfs/ush/preamble.sh"
   echo '   Making tar file ...'
   set_trace
 
-  count=0
   countMAX=5
   tardone='no'
-
-  while [ "$count" -lt "$countMAX" ] && [ "$tardone" = 'no' ]
+  sleep_interval=10
+  
+  while [[ "${tardone}" = "no" ]]
   do
     
     nf=$(ls | awk '/'$ID.*.$filext'/ {a++} END {print a}')
     nbm2=$(( $nb - 2 ))
-    if [ $nf -ge $nbm2 ]
-    then 
-      tar -cf $ID.$cycle.${type}_tar ./$ID.*.$filext
+    if [[ "${nf}" -ge "${nbm2}" ]]
+    then
+
+      tar -cf "${ID}.${cycle}.${type}_tar" ./${ID}.*.${filext}
       exit=$?
+      filename="${ID}.${cycle}.${type}_tar" 
+      if ! wait_for_file "${filename}" "${sleep_interval}" "${countMAX}" ; then
+        echo "FATAL ERROR: File ${filename} not found after waiting $(( sleep_interval * (countMAX + 1) )) secs"
+        exit 3
+      fi
 
-      if  [ "$exit" != '0' ]
+      if  [[ "${exit}" != '0' ]]
       then
         set +x
         echo ' '
@@ -124,21 +130,15 @@ source "$HOMEgfs/ush/preamble.sh"
         exit 3
       fi
       
-      if [ -f "$ID.$cycle.${type}_tar" ]
+      if [[ -f "${ID}.${cycle}.${type}_tar" ]]
       then
         tardone='yes'
       fi
-    else
-      set +x
-      echo ' All files not found for tar. Sleeping 10 seconds and trying again ..'
-      set_trace
-      sleep 10
-      count=$(expr $count + 1)
     fi
 
   done
 
-  if [ "$tardone" = 'no' ]
+  if [[ "${tardone}" = 'no' ]]
   then
     set +x
     echo ' '
@@ -150,15 +150,15 @@ source "$HOMEgfs/ush/preamble.sh"
     exit 3
   fi
 
-  if [ "$type" = 'spec' ]
+  if [[ "${type}" = 'spec' ]]
   then
-    if [ -s $ID.$cycle.${type}_tar ]
+    if [[ -s "${ID}.${cycle}.${type}_tar" ]]
     then
-      file_name=$ID.$cycle.${type}_tar.gz
-      /usr/bin/gzip -c $ID.$cycle.${type}_tar > ${file_name}
+      file_name="${ID}.${cycle}.${type}_tar.gz"
+      /usr/bin/gzip -c "${ID}.${cycle}.${type}_tar" > "${file_name}"
       exit=$?
 
-      if  [ "$exit" != '0' ]
+      if  [[ "${exit}" != '0' ]]
       then
         set +x
         echo ' '
@@ -171,7 +171,7 @@ source "$HOMEgfs/ush/preamble.sh"
       fi
     fi
   else
-    file_name=$ID.$cycle.${type}_tar
+    file_name="${ID}.${cycle}.${type}_tar"
   fi
 
 # --------------------------------------------------------------------------- #
@@ -179,14 +179,14 @@ source "$HOMEgfs/ush/preamble.sh"
 
   set +x
   echo ' '
-  echo "   Moving tar file ${file_name} to ${COM_WAVE_STATION} ..."
+  echo "   Moving tar file ${file_name} to ${COMOUT_WAVE_STATION} ..."
   set_trace
 
-  cp "${file_name}" "${COM_WAVE_STATION}/."
+  cp "${file_name}" "${COMOUT_WAVE_STATION}/."
 
   exit=$?
 
-  if  [ "$exit" != '0' ]
+  if  [[ "${exit}" != '0' ]]
   then
     set +x
     echo ' '
@@ -198,21 +198,21 @@ source "$HOMEgfs/ush/preamble.sh"
     exit 4
   fi
 
-  if [ "$SENDDBN" = 'YES' ]
+  if [[ "${SENDDBN}" = 'YES' ]]
   then
     set +x
     echo ' '
-    echo "   Alerting TAR file as ${COM_WAVE_STATION}/${file_name}"
+    echo "   Alerting TAR file as ${COMOUT_WAVE_STATION}/${file_name}"
     echo ' '
     set_trace
     "${DBNROOT}/bin/dbn_alert MODEL" "${alertName}_WAVE_TAR" "${job}" \
-      "${COM_WAVE_STATION}/${file_name}"
+      "${COMOUT_WAVE_STATION}/${file_name}"
   fi
 
 # --------------------------------------------------------------------------- #
 # 4.  Final clean up
 
-cd $DATA
+cd "${DATA}"
 
 if [[ ${KEEPDATA:-NO} == "NO" ]]; then
   set -v
diff --git a/versions/build.gaea.ver b/versions/build.gaea.ver
new file mode 100644
index 0000000000..b92fe8c1db
--- /dev/null
+++ b/versions/build.gaea.ver
@@ -0,0 +1,6 @@
+export stack_intel_ver=2023.1.0
+export stack_cray_mpich_ver=8.1.25
+export spack_env=gsi-addon-dev
+
+source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.hercules.ver b/versions/build.hercules.ver
index 5513466631..cab0c92111 100644
--- a/versions/build.hercules.ver
+++ b/versions/build.hercules.ver
@@ -1,3 +1,6 @@
 export stack_intel_ver=2021.9.0
 export stack_impi_ver=2021.9.0
+export intel_mkl_ver=2023.1.0
+export spack_env=gsi-addon-env
 source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.jet.ver b/versions/build.jet.ver
index ff85b1a801..55c0ea0bd1 100644
--- a/versions/build.jet.ver
+++ b/versions/build.jet.ver
@@ -1,3 +1,5 @@
 export stack_intel_ver=2021.5.0
 export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev
 source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.orion.ver b/versions/build.orion.ver
index ff85b1a801..834ecfc166 100644
--- a/versions/build.orion.ver
+++ b/versions/build.orion.ver
@@ -1,3 +1,5 @@
-export stack_intel_ver=2021.5.0
-export stack_impi_ver=2021.5.1
+export stack_intel_ver=2021.9.0
+export stack_impi_ver=2021.9.0
+export spack_env=gsi-addon-env-rocky9
 source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.s4.ver b/versions/build.s4.ver
index a0aae51d87..e2731ccfb3 100644
--- a/versions/build.s4.ver
+++ b/versions/build.s4.ver
@@ -1,3 +1,5 @@
 export stack_intel_ver=2021.5.0
 export stack_impi_ver=2021.5.0
+export spack_env=gsi-addon-env
 source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/build.spack.ver b/versions/build.spack.ver
index fb5b244bf5..808f85dd16 100644
--- a/versions/build.spack.ver
+++ b/versions/build.spack.ver
@@ -1,5 +1,4 @@
-export spack_stack_ver=1.5.1
-export spack_env=gsi-addon
+export spack_stack_ver=1.6.0
 
 export cmake_ver=3.23.1
 
@@ -11,7 +10,7 @@ export fms_ver=2023.02.01
 
 export hdf5_ver=1.14.0
 export netcdf_c_ver=4.9.2
-export netcdf_fortran_ver=4.6.0
+export netcdf_fortran_ver=4.6.1
 
 export bacio_ver=2.4.1
 export nemsio_ver=2.5.4
@@ -19,10 +18,10 @@ export sigio_ver=2.3.2
 export w3emc_ver=2.10.0
 export bufr_ver=11.7.0
 export g2_ver=3.4.5
-export sp_ver=2.3.3
+export sp_ver=2.5.0
 export ip_ver=4.3.0
 export gsi_ncdiag_ver=1.1.2
 export g2tmpl_ver=1.10.2
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
 export wgrib2_ver=2.0.8
 export grib_util_ver=1.3.0
diff --git a/versions/build.wcoss2.ver b/versions/build.wcoss2.ver
index 046ff5c64e..3ae0b3a1cc 100644
--- a/versions/build.wcoss2.ver
+++ b/versions/build.wcoss2.ver
@@ -28,6 +28,6 @@ export wrf_io_ver=1.2.0
 export ncio_ver=1.1.2
 export ncdiag_ver=1.0.0
 export g2tmpl_ver=1.10.2
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
 
 export upp_ver=10.0.8
diff --git a/versions/fix.ver b/versions/fix.ver
index 13d9b56dd2..5ca044ae3d 100644
--- a/versions/fix.ver
+++ b/versions/fix.ver
@@ -4,19 +4,23 @@
 export aer_ver=20220805
 export am_ver=20220805
 export chem_ver=20220805
-export cice_ver=20231219
+export cice_ver=20240416
 export cpl_ver=20230526
 export datm_ver=20220805
 export gdas_crtm_ver=20220805
 export gdas_fv3jedi_ver=20220805
-export gdas_gsibec_ver=20221031
+export gdas_soca_ver=20240624
+export gdas_gsibec_ver=20240416
+export gdas_obs_ver=20240213
 export glwu_ver=20220805
-export gsi_ver=20230911
+export gsi_ver=20240208
 export lut_ver=20220805
-export mom6_ver=20231219
+export mom6_ver=20240416
 export orog_ver=20231027
 export reg2grb2_ver=20220805
 export sfc_climo_ver=20220805
 export ugwd_ver=20220805
 export verif_ver=20220805
 export wave_ver=20240105
+export orog_nest_ver=global-nest.20240419
+export ugwd_nest_ver=global-nest.20240419
diff --git a/versions/run.gaea.ver b/versions/run.gaea.ver
new file mode 100644
index 0000000000..b92fe8c1db
--- /dev/null
+++ b/versions/run.gaea.ver
@@ -0,0 +1,6 @@
+export stack_intel_ver=2023.1.0
+export stack_cray_mpich_ver=8.1.25
+export spack_env=gsi-addon-dev
+
+source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.hera.ver b/versions/run.hera.ver
index b358f9d495..34f81bfe96 100644
--- a/versions/run.hera.ver
+++ b/versions/run.hera.ver
@@ -4,8 +4,10 @@ export spack_env=gsi-addon-dev-rocky8
 
 export hpss_ver=hpss
 export ncl_ver=6.6.2
-export R_ver=3.5.0
-export gempak_ver=7.4.2
+export R_ver=3.6.1
+
+export gempak_ver=7.17.0
+export perl_ver=5.38.0
 
 source "${HOMEgfs:-}/versions/run.spack.ver"
 export spack_mod_path="/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.hercules.ver b/versions/run.hercules.ver
index 43f1b2181d..ee8e4f8aea 100644
--- a/versions/run.hercules.ver
+++ b/versions/run.hercules.ver
@@ -1,12 +1,7 @@
 export stack_intel_ver=2021.9.0
 export stack_impi_ver=2021.9.0
 export intel_mkl_ver=2023.1.0
-
-export ncl_ver=6.6.2
-export perl_ver=5.36.0
+export spack_env=gsi-addon-env
 
 source "${HOMEgfs:-}/versions/run.spack.ver"
-
-# wgrib2 and cdo are different on Hercules from all the other systems
-export wgrib2_ver=3.1.1
-export cdo_ver=2.2.0
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.jet.ver b/versions/run.jet.ver
index 18a82cab4f..3aa586ee42 100644
--- a/versions/run.jet.ver
+++ b/versions/run.jet.ver
@@ -1,9 +1,14 @@
 export stack_intel_ver=2021.5.0
 export stack_impi_ver=2021.5.1
+export spack_env=gsi-addon-dev-rocky8
 
 export hpss_ver=
 export ncl_ver=6.6.2
 export R_ver=4.0.2
 export gempak_ver=7.4.2
 
+# Adding perl as a module; With Rocky8, perl packages will not be from the OS
+export perl_ver=5.38.0
+
 source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.orion.ver b/versions/run.orion.ver
index 7671bc028d..59adda6b50 100644
--- a/versions/run.orion.ver
+++ b/versions/run.orion.ver
@@ -1,11 +1,10 @@
-export stack_intel_ver=2022.0.2
-export stack_impi_ver=2021.5.1
-
-export ncl_ver=6.6.2
-export gempak_ver=7.5.1
+export stack_intel_ver=2021.9.0
+export stack_impi_ver=2021.9.0
+export spack_env=gsi-addon-env-rocky9
 
 #For metplus jobs, not currently working with spack-stack
 #export met_ver=9.1.3
 #export metplus_ver=3.1.1
 
 source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.s4.ver b/versions/run.s4.ver
index 56817ef439..6d0f4cbaca 100644
--- a/versions/run.s4.ver
+++ b/versions/run.s4.ver
@@ -1,6 +1,8 @@
 export stack_intel_ver=2021.5.0
 export stack_impi_ver=2021.5.0
+export spack_env=gsi-addon-env
 
 export ncl_ver=6.4.0-precompiled
 
 source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core"
diff --git a/versions/run.spack.ver b/versions/run.spack.ver
index 80fa6acd1a..9aa5460c80 100644
--- a/versions/run.spack.ver
+++ b/versions/run.spack.ver
@@ -1,29 +1,35 @@
-export spack_stack_ver=1.5.1
-export spack_env=gsi-addon-dev-rocky8
-export python_ver=3.10.8
+export spack_stack_ver=1.6.0
+export python_ver=3.11.6
 
 export jasper_ver=2.0.32
 export libpng_ver=1.6.37
-export cdo_ver=2.0.5
+export cdo_ver=2.2.0
 export nco_ver=5.0.6
 
 export hdf5_ver=1.14.0
 export netcdf_c_ver=4.9.2
-export netcdf_fortran_ver=4.6.0
+export netcdf_fortran_ver=4.6.1
 
 export bufr_ver=11.7.0
 export gsi_ncdiag_ver=1.1.2
 export g2tmpl_ver=1.10.2
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
 export wgrib2_ver=2.0.8
 export grib_util_ver=1.3.0
-export prod_util_ver=1.2.2
+export prod_util_ver=2.1.1
 export py_netcdf4_ver=1.5.8
-export py_pyyaml_ver=5.4.1
+export py_pyyaml_ver=6.0
 export py_jinja2_ver=3.1.2
+export py_pandas_ver=1.5.3
+export py_python_dateutil_ver=2.8.2
+export py_f90nml_ver=1.4.3
+
+export met_ver=9.1.3
+export metplus_ver=3.1.1
+export py_xarray_ver=2023.7.0
 
 export obsproc_run_ver=1.1.2
-export prepobs_run_ver=1.0.1
+export prepobs_run_ver=1.0.2
 
 export ens_tracker_ver=feature-GFSv17_com_reorg
-export fit2obs_ver=1.0.0
+export fit2obs_ver=1.1.2
diff --git a/versions/run.wcoss2.ver b/versions/run.wcoss2.ver
index a188cdea74..7f653dd50e 100644
--- a/versions/run.wcoss2.ver
+++ b/versions/run.wcoss2.ver
@@ -37,15 +37,17 @@ export bufr_dump_ver=1.0.0
 export util_shared_ver=1.4.0
 export g2tmpl_ver=1.10.2
 export ncdiag_ver=1.0.0
-export crtm_ver=2.4.0
+export crtm_ver=2.4.0.1
 export wgrib2_ver=2.0.8
+export met_ver=9.1.3
+export metplus_ver=3.1.1
 
 # Development-only below
 
 export obsproc_run_ver=1.1.2
-export prepobs_run_ver=1.0.1
+export prepobs_run_ver=1.0.2
 
 export ens_tracker_ver=feature-GFSv17_com_reorg
-export fit2obs_ver=1.0.0
+export fit2obs_ver=1.1.2
 export mos_ver=5.4.3
 export mos_shared_ver=2.7.2
diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py
index d45b6a9abc..97a77c2c21 100644
--- a/workflow/applications/applications.py
+++ b/workflow/applications/applications.py
@@ -3,6 +3,7 @@
 from typing import Dict, List, Any
 from datetime import timedelta
 from hosts import Host
+from pathlib import Path
 from wxflow import Configuration, to_timedelta
 from abc import ABC, ABCMeta, abstractmethod
 
@@ -31,7 +32,11 @@ def __init__(self, conf: Configuration) -> None:
 
         self.scheduler = Host().scheduler
 
-        _base = conf.parse_config('config.base')
+        # Save the configuration so we can source the config files when
+        # determining task resources
+        self.conf = conf
+
+        _base = self.conf.parse_config('config.base')
         # Define here so the child __init__ functions can use it; will
         # be overwritten later during _init_finalize().
         self._base = _base
@@ -51,6 +56,7 @@ def __init__(self, conf: Configuration) -> None:
         self.do_ocean = _base.get('DO_OCN', False)
         self.do_ice = _base.get('DO_ICE', False)
         self.do_aero = _base.get('DO_AERO', False)
+        self.do_prep_obs_aero = _base.get('DO_PREP_OBS_AERO', False)
         self.do_bufrsnd = _base.get('DO_BUFRSND', False)
         self.do_gempak = _base.get('DO_GEMPAK', False)
         self.do_awips = _base.get('DO_AWIPS', False)
@@ -64,30 +70,45 @@ def __init__(self, conf: Configuration) -> None:
         self.do_upp = not _base.get('WRITE_DOPOST', True)
         self.do_goes = _base.get('DO_GOES', False)
         self.do_mos = _base.get('DO_MOS', False)
+        self.do_extractvars = _base.get('DO_EXTRACTVARS', False)
 
         self.do_hpssarch = _base.get('HPSSARCH', False)
 
         self.nens = _base.get('NMEM_ENS', 0)
 
-        self.wave_cdumps = None
+        self.wave_runs = None
         if self.do_wave:
-            wave_cdump = _base.get('WAVE_CDUMP', 'BOTH').lower()
-            if wave_cdump in ['both']:
-                self.wave_cdumps = ['gfs', 'gdas']
-            elif wave_cdump in ['gfs', 'gdas']:
-                self.wave_cdumps = [wave_cdump]
-
-    def _init_finalize(self, conf: Configuration):
+            wave_run = _base.get('WAVE_RUN', 'BOTH').lower()
+            if wave_run in ['both']:
+                self.wave_runs = ['gfs', 'gdas']
+            elif wave_run in ['gfs', 'gdas']:
+                self.wave_runs = [wave_run]
+
+        self.aero_anl_runs = None
+        self.aero_fcst_runs = None
+        if self.do_aero:
+            aero_anl_run = _base.get('AERO_ANL_RUN', 'BOTH').lower()
+            if aero_anl_run in ['both']:
+                self.aero_anl_runs = ['gfs', 'gdas']
+            elif aero_anl_run in ['gfs', 'gdas']:
+                self.aero_anl_runs = [aero_anl_run]
+            aero_fcst_run = _base.get('AERO_FCST_RUN', None).lower()
+            if aero_fcst_run in ['both']:
+                self.aero_fcst_runs = ['gfs', 'gdas']
+            elif aero_fcst_run in ['gfs', 'gdas']:
+                self.aero_fcst_runs = [aero_fcst_run]
+
+    def _init_finalize(self, *args):
         print("Finalizing initialize")
 
         # Get a list of all possible config_files that would be part of the application
         self.configs_names = self._get_app_configs()
 
         # Source the config_files for the jobs in the application
-        self.configs = self._source_configs(conf)
+        self.configs = self.source_configs()
 
         # Update the base config dictionary base on application
-        self.configs['base'] = self._update_base(self.configs['base'])
+        self.configs['base'] = self.update_base(self.configs['base'])
 
         # Save base in the internal state since it is often needed
         self._base = self.configs['base']
@@ -104,7 +125,7 @@ def _get_app_configs(self):
 
     @staticmethod
     @abstractmethod
-    def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]:
+    def update_base(base_in: Dict[str, Any]) -> Dict[str, Any]:
         '''
         Make final updates to base and return an updated copy
 
@@ -121,9 +142,9 @@ def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]:
         '''
         pass
 
-    def _source_configs(self, conf: Configuration) -> Dict[str, Any]:
+    def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]:
         """
-        Given the configuration object and jobs,
+        Given the configuration object used to initialize this application,
         source the configurations for each config and return a dictionary
         Every config depends on "config.base"
         """
@@ -131,7 +152,7 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]:
         configs = dict()
 
         # Return config.base as well
-        configs['base'] = conf.parse_config('config.base')
+        configs['base'] = self.conf.parse_config('config.base')
 
         # Source the list of all config_files involved in the application
         for config in self.configs_names:
@@ -145,20 +166,24 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]:
                 files += ['config.anal', 'config.eupd']
             elif config in ['efcs']:
                 files += ['config.fcst', 'config.efcs']
+            elif config in ['atmanlinit', 'atmanlvar', 'atmanlfv3inc']:
+                files += ['config.atmanl', f'config.{config}']
+            elif config in ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc']:
+                files += ['config.atmensanl', f'config.{config}']
             elif 'wave' in config:
                 files += ['config.wave', f'config.{config}']
             else:
                 files += [f'config.{config}']
 
-            print(f'sourcing config.{config}')
-            configs[config] = conf.parse_config(files)
+            print(f'sourcing config.{config}') if log else 0
+            configs[config] = self.conf.parse_config(files, RUN=run)
 
         return configs
 
     @abstractmethod
     def get_task_names(self) -> Dict[str, List[str]]:
         '''
-        Create a list of task names for each CDUMP valid for the configuation.
+        Create a list of task names for each RUN valid for the configuation.
 
         Parameters
         ----------
@@ -166,7 +191,7 @@ def get_task_names(self) -> Dict[str, List[str]]:
 
         Returns
         -------
-        Dict[str, List[str]]: Lists of tasks for each CDUMP.
+        Dict[str, List[str]]: Lists of tasks for each RUN.
 
         '''
         pass
diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py
index b2369e8dfc..364ee2c48b 100644
--- a/workflow/applications/gefs.py
+++ b/workflow/applications/gefs.py
@@ -14,22 +14,33 @@ def _get_app_configs(self):
         """
         Returns the config_files that are involved in gefs
         """
-        configs = ['stage_ic', 'fcst']
+        configs = ['stage_ic', 'fcst', 'atmos_products']
 
         if self.nens > 0:
-            configs += ['efcs']
+            configs += ['efcs', 'atmos_ensstat']
 
         if self.do_wave:
-            configs += ['waveinit']
+            configs += ['waveinit', 'wavepostsbs', 'wavepostpnt']
+            if self.do_wave_bnd:
+                configs += ['wavepostbndpnt', 'wavepostbndpntbll']
+
+        if self.do_ocean or self.do_ice:
+            configs += ['oceanice_products']
+
+        if self.do_aero:
+            configs += ['prep_emissions']
+
+        if self.do_extractvars:
+            configs += ['extractvars']
 
         return configs
 
     @staticmethod
-    def _update_base(base_in):
+    def update_base(base_in):
 
         base_out = base_in.copy()
         base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc'])
-        base_out['CDUMP'] = 'gefs'
+        base_out['RUN'] = 'gefs'
 
         return base_out
 
@@ -40,9 +51,32 @@ def get_task_names(self):
         if self.do_wave:
             tasks += ['waveinit']
 
+        if self.do_aero:
+            tasks += ['prep_emissions']
+
         tasks += ['fcst']
 
         if self.nens > 0:
             tasks += ['efcs']
 
-        return {f"{self._base['CDUMP']}": tasks}
+        tasks += ['atmos_prod']
+
+        if self.nens > 0:
+            tasks += ['atmos_ensstat']
+
+        if self.do_ocean:
+            tasks += ['ocean_prod']
+
+        if self.do_ice:
+            tasks += ['ice_prod']
+
+        if self.do_wave:
+            tasks += ['wavepostsbs']
+            if self.do_wave_bnd:
+                tasks += ['wavepostbndpnt', 'wavepostbndpntbll']
+            tasks += ['wavepostpnt']
+
+        if self.do_extractvars:
+            tasks += ['extractvars']
+
+        return {f"{self._base['RUN']}": tasks}
diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py
index 1ff6cc3723..e049a7d422 100644
--- a/workflow/applications/gfs_cycled.py
+++ b/workflow/applications/gfs_cycled.py
@@ -16,18 +16,19 @@ def __init__(self, conf: Configuration):
         self.do_jediatmvar = self._base.get('DO_JEDIATMVAR', False)
         self.do_jediatmens = self._base.get('DO_JEDIATMENS', False)
         self.do_jediocnvar = self._base.get('DO_JEDIOCNVAR', False)
-        self.do_jedilandda = self._base.get('DO_JEDILANDDA', False)
+        self.do_jedisnowda = self._base.get('DO_JEDISNOWDA', False)
         self.do_mergensst = self._base.get('DO_MERGENSST', False)
+        self.do_vrfy_oceanda = self._base.get('DO_VRFY_OCEANDA', False)
 
         self.lobsdiag_forenkf = False
-        self.eupd_cdumps = None
+        self.eupd_runs = None
         if self.do_hybvar:
             self.lobsdiag_forenkf = self._base.get('lobsdiag_forenkf', False)
-            eupd_cdump = self._base.get('EUPD_CYC', 'gdas').lower()
-            if eupd_cdump in ['both']:
-                self.eupd_cdumps = ['gfs', 'gdas']
-            elif eupd_cdump in ['gfs', 'gdas']:
-                self.eupd_cdumps = [eupd_cdump]
+            eupd_run = self._base.get('EUPD_CYC', 'gdas').lower()
+            if eupd_run in ['both']:
+                self.eupd_runs = ['gfs', 'gdas']
+            elif eupd_run in ['gfs', 'gdas']:
+                self.eupd_runs = [eupd_run]
 
     def _get_app_configs(self):
         """
@@ -37,23 +38,26 @@ def _get_app_configs(self):
         configs = ['prep']
 
         if self.do_jediatmvar:
-            configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal']
+            configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal']
         else:
             configs += ['anal', 'analdiag']
 
         if self.do_jediocnvar:
-            configs += ['prepoceanobs', 'ocnanalprep', 'ocnanalbmat',
-                        'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost',
-                        'ocnanalvrfy']
+            configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun']
+            if self.do_hybvar:
+                configs += ['ocnanalecen']
+            configs += ['ocnanalchkpt', 'ocnanalpost']
+            if self.do_vrfy_oceanda:
+                configs += ['ocnanalvrfy']
 
-        if self.do_ocean:
-            configs += ['ocnpost']
+        if self.do_ocean or self.do_ice:
+            configs += ['oceanice_products']
 
         configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup']
 
         if self.do_hybvar:
             if self.do_jediatmens:
-                configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal']
+                configs += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal']
             else:
                 configs += ['eobs', 'eomg', 'ediag', 'eupd']
             configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc']
@@ -83,7 +87,9 @@ def _get_app_configs(self):
             configs += ['metp']
 
         if self.do_gempak:
-            configs += ['gempak', 'npoess']
+            configs += ['gempak']
+            if self.do_goes:
+                configs += ['npoess']
 
         if self.do_bufrsnd:
             configs += ['postsnd']
@@ -102,9 +108,11 @@ def _get_app_configs(self):
 
         if self.do_aero:
             configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
+            if self.do_prep_obs_aero:
+                configs += ['prepobsaero']
 
-        if self.do_jedilandda:
-            configs += ['preplandobs', 'landanl']
+        if self.do_jedisnowda:
+            configs += ['prepsnowobs', 'snowanl']
 
         if self.do_mos:
             configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep',
@@ -115,7 +123,7 @@ def _get_app_configs(self):
         return configs
 
     @staticmethod
-    def _update_base(base_in):
+    def update_base(base_in):
 
         return GFSCycledAppConfig.get_gfs_cyc_dates(base_in)
 
@@ -130,23 +138,22 @@ def get_task_names(self):
         gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup']
 
         if self.do_jediatmvar:
-            gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal']
+            gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal']
         else:
             gdas_gfs_common_tasks_before_fcst += ['anal']
 
         if self.do_jediocnvar:
-            gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep',
-                                                  'ocnanalbmat', 'ocnanalrun',
-                                                  'ocnanalchkpt', 'ocnanalpost',
-                                                  'ocnanalvrfy']
+            gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun']
+            if self.do_hybvar:
+                gdas_gfs_common_tasks_before_fcst += ['ocnanalecen']
+            gdas_gfs_common_tasks_before_fcst += ['ocnanalchkpt', 'ocnanalpost']
+            if self.do_vrfy_oceanda:
+                gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy']
 
         gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc']
 
-        if self.do_aero:
-            gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
-
-        if self.do_jedilandda:
-            gdas_gfs_common_tasks_before_fcst += ['preplandobs', 'landanl']
+        if self.do_jedisnowda:
+            gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl']
 
         wave_prep_tasks = ['waveinit', 'waveprep']
         wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll']
@@ -156,7 +163,7 @@ def get_task_names(self):
         hybrid_after_eupd_tasks = []
         if self.do_hybvar:
             if self.do_jediatmens:
-                hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres']
+                hybrid_tasks += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'echgres']
             else:
                 hybrid_tasks += ['eobs', 'eupd', 'echgres']
                 hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg']
@@ -168,16 +175,21 @@ def get_task_names(self):
         if not self.do_jediatmvar:
             gdas_tasks += ['analdiag']
 
-        if self.do_wave and 'gdas' in self.wave_cdumps:
+        if self.do_wave and 'gdas' in self.wave_runs:
             gdas_tasks += wave_prep_tasks
 
+        if self.do_aero and 'gdas' in self.aero_anl_runs:
+            gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
+            if self.do_prep_obs_aero:
+                gdas_tasks += ['prepobsaero']
+
         gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst']
 
         if self.do_upp:
             gdas_tasks += ['atmupp']
-        gdas_tasks += ['atmprod']
+        gdas_tasks += ['atmos_prod']
 
-        if self.do_wave and 'gdas' in self.wave_cdumps:
+        if self.do_wave and 'gdas' in self.wave_runs:
             if self.do_wave_bnd:
                 gdas_tasks += wave_bndpnt_tasks
             gdas_tasks += wave_post_tasks
@@ -202,14 +214,25 @@ def get_task_names(self):
         # Collect "gfs" cycle tasks
         gfs_tasks = gdas_gfs_common_tasks_before_fcst.copy()
 
-        if self.do_wave and 'gfs' in self.wave_cdumps:
+        if self.do_wave and 'gfs' in self.wave_runs:
             gfs_tasks += wave_prep_tasks
 
+        if self.do_aero and 'gfs' in self.aero_anl_runs:
+            gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal']
+            if self.do_prep_obs_aero:
+                gfs_tasks += ['prepobsaero']
+
         gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst']
 
+        if self.do_ocean:
+            gfs_tasks += ['ocean_prod']
+
+        if self.do_ice:
+            gfs_tasks += ['ice_prod']
+
         if self.do_upp:
             gfs_tasks += ['atmupp']
-        gfs_tasks += ['atmprod']
+        gfs_tasks += ['atmos_prod']
 
         if self.do_goes:
             gfs_tasks += ['goesupp']
@@ -229,7 +252,7 @@ def get_task_names(self):
         if self.do_metp:
             gfs_tasks += ['metp']
 
-        if self.do_wave and 'gfs' in self.wave_cdumps:
+        if self.do_wave and 'gfs' in self.wave_runs:
             if self.do_wave_bnd:
                 gfs_tasks += wave_bndpnt_tasks
             gfs_tasks += wave_post_tasks
@@ -245,11 +268,12 @@ def get_task_names(self):
             gfs_tasks += ['gempak']
             gfs_tasks += ['gempakmeta']
             gfs_tasks += ['gempakncdcupapgif']
-            gfs_tasks += ['npoess_pgrb2_0p5deg']
-            gfs_tasks += ['gempakpgrb2spec']
+            if self.do_goes:
+                gfs_tasks += ['npoess_pgrb2_0p5deg']
+                gfs_tasks += ['gempakpgrb2spec']
 
         if self.do_awips:
-            gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind']
+            gfs_tasks += ['awips_20km_1p0deg', 'fbwind']
 
         if self.do_mos:
             gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep',
@@ -262,15 +286,15 @@ def get_task_names(self):
         tasks = dict()
         tasks['gdas'] = gdas_tasks
 
-        if self.do_hybvar and 'gdas' in self.eupd_cdumps:
+        if self.do_hybvar and 'gdas' in self.eupd_runs:
             enkfgdas_tasks = hybrid_tasks + hybrid_after_eupd_tasks
             tasks['enkfgdas'] = enkfgdas_tasks
 
-        # Add CDUMP=gfs tasks if running early cycle
+        # Add RUN=gfs tasks if running early cycle
         if self.gfs_cyc > 0:
             tasks['gfs'] = gfs_tasks
 
-            if self.do_hybvar and 'gfs' in self.eupd_cdumps:
+            if self.do_hybvar and 'gfs' in self.eupd_runs:
                 enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks
                 enkfgfs_tasks.remove("echgres")
                 tasks['enkfgfs'] = enkfgfs_tasks
@@ -321,9 +345,4 @@ def get_gfs_cyc_dates(base: Dict[str, Any]) -> Dict[str, Any]:
             base_out['EDATE_GFS'] = edate_gfs
             base_out['INTERVAL_GFS'] = interval_gfs
 
-            fhmax_gfs = {}
-            for hh in ['00', '06', '12', '18']:
-                fhmax_gfs[hh] = base.get(f'FHMAX_GFS_{hh}', base.get('FHMAX_GFS_00', 120))
-            base_out['FHMAX_GFS'] = fhmax_gfs
-
         return base_out
diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py
index 1145863210..caa545d1e1 100644
--- a/workflow/applications/gfs_forecast_only.py
+++ b/workflow/applications/gfs_forecast_only.py
@@ -25,7 +25,8 @@ def _get_app_configs(self):
             configs += ['atmos_products']
 
             if self.do_aero:
-                configs += ['aerosol_init']
+                if not self._base['EXP_WARM_START']:
+                    configs += ['aerosol_init']
 
             if self.do_tracker:
                 configs += ['tracker']
@@ -49,7 +50,7 @@ def _get_app_configs(self):
                 configs += ['awips']
 
         if self.do_ocean or self.do_ice:
-            configs += ['ocnpost']
+            configs += ['oceanice_products']
 
         if self.do_wave:
             configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt']
@@ -69,11 +70,11 @@ def _get_app_configs(self):
         return configs
 
     @staticmethod
-    def _update_base(base_in):
+    def update_base(base_in):
 
         base_out = base_in.copy()
         base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc'])
-        base_out['CDUMP'] = 'gfs'
+        base_out['RUN'] = 'gfs'
 
         return base_out
 
@@ -87,7 +88,10 @@ def get_task_names(self):
         tasks = ['stage_ic']
 
         if self.do_aero:
-            tasks += ['aerosol_init']
+            aero_fcst_run = self._base.get('AERO_FCST_RUN', 'BOTH').lower()
+            if self._base['RUN'] in aero_fcst_run or aero_fcst_run == "both":
+                if not self._base['EXP_WARM_START']:
+                    tasks += ['aerosol_init']
 
         if self.do_wave:
             tasks += ['waveinit']
@@ -100,7 +104,10 @@ def get_task_names(self):
             if self.do_upp:
                 tasks += ['atmupp']
 
-            tasks += ['atmprod']
+            tasks += ['atmos_prod']
+
+            if self.do_goes:
+                tasks += ['goesupp']
 
             if self.do_goes:
                 tasks += ['goesupp']
@@ -124,10 +131,13 @@ def get_task_names(self):
                 tasks += ['gempak', 'gempakmeta', 'gempakncdcupapgif', 'gempakpgrb2spec']
 
             if self.do_awips:
-                tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind']
+                tasks += ['awips_20km_1p0deg', 'fbwind']
 
-        if self.do_ocean or self.do_ice:
-            tasks += ['ocnpost']
+        if self.do_ocean:
+            tasks += ['ocean_prod']
+
+        if self.do_ice:
+            tasks += ['ice_prod']
 
         if self.do_wave:
             if self.do_wave_bnd:
@@ -146,4 +156,4 @@ def get_task_names(self):
 
         tasks += ['arch', 'cleanup']  # arch and cleanup **must** be the last tasks
 
-        return {f"{self._base['CDUMP']}": tasks}
+        return {f"{self._base['RUN']}": tasks}
diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py
index 7e0f350c0f..1317f7be28 100755
--- a/workflow/create_experiment.py
+++ b/workflow/create_experiment.py
@@ -11,6 +11,14 @@
 The yaml file are simply the arguments for these two scripts.
 After this scripts runs the experiment is ready for launch.
 
+Environmental variables
+-----------------------
+    pslot
+        Name of the experiment
+
+    RUNTESTS
+        Root directory where the test EXPDIR and COMROOT will be placed
+
 Output
 ------
 Functionally an experiment is setup as a result running the two scripts described above
@@ -18,7 +26,6 @@
 """
 
 import os
-import sys
 
 from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
 from pathlib import Path
@@ -28,8 +35,6 @@
 import setup_expt
 import setup_xml
 
-from hosts import Host
-
 _here = os.path.dirname(__file__)
 _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..'))
 
@@ -63,7 +68,9 @@ def input_args():
                             formatter_class=ArgumentDefaultsHelpFormatter)
 
     parser.add_argument(
-        '--yaml', help='full path to yaml file describing the experiment configuration', type=Path, required=True)
+        '-y', '--yaml', help='full path to yaml file describing the experiment configuration', type=Path, required=True)
+    parser.add_argument(
+        '-o', '--overwrite', help='overwrite previously created experiment', action="store_true", required=False)
 
     return parser.parse_args()
 
@@ -77,18 +84,15 @@ def input_args():
     data.update(os.environ)
     testconf = parse_j2yaml(path=user_inputs.yaml, data=data)
 
-    if 'skip_ci_on_hosts' in testconf:
-        host = Host()
-        if host.machine.lower() in [machine.lower() for machine in testconf.skip_ci_on_hosts]:
-            logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}')
-            sys.exit(0)
-
     # Create a list of arguments to setup_expt.py
     setup_expt_args = [testconf.experiment.system, testconf.experiment.mode]
     for kk, vv in testconf.arguments.items():
         setup_expt_args.append(f"--{kk}")
         setup_expt_args.append(str(vv))
 
+    if user_inputs.overwrite:
+        setup_expt_args.append("--overwrite")
+
     logger.info(f"Call: setup_expt.main()")
     logger.debug(f"setup_expt.py {' '.join(setup_expt_args)}")
     setup_expt.main(setup_expt_args)
diff --git a/workflow/gsl_template_hera.xml b/workflow/gsl_template_hera.xml
index 6205d45ed4..8c2257cee4 100644
--- a/workflow/gsl_template_hera.xml
+++ b/workflow/gsl_template_hera.xml
@@ -6,7 +6,7 @@
 		Main workflow manager for Global Forecast System
 
 	NOTES:
-		This workflow was automatically generated at 2023-06-13 23:31:49.582810
+		This workflow was automatically generated at 2024-09-05 15:37:41.961069
 	-->
 	<!ENTITY PSLOT "test">
 	<!ENTITY HOMEgfs "/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gsl_ufs_dev"
@@ -15,6 +15,7 @@
 	<!ENTITY JOBS_DIR "&HOMEgfs;/jobs/rocoto">
         <!ENTITY ICSDIR "/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127">
         <!ENTITY CASE     "C768">
+        <!ENTITY ATCFNAME "">
         <!ENTITY COMPONENT "atmos">
 	<!ENTITY NATIVE_STR "--export=NONE">
 	<!ENTITY MAXTRIES "2">
@@ -25,7 +26,7 @@
 	<log verbosity="10"><cyclestr>&EXPDIR;/logs/@Y@m@d@H.log</cyclestr></log>
 
 	<!-- Define the cycles -->
-	<cycledef group="gfs">202401140000 202401140000 24:00:00</cycledef>
+	<cycledef group="gfs">202409050000 202409050000 24:00:00</cycledef>
 
 <task name="gfsgetic" cycledefs="gfs" maxtries="&MAXTRIES;">
 
@@ -41,32 +42,29 @@
 
 	<join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsinit.log</cyclestr></join>
 
-	<envar><name>RUN_ENVIR</name><value>emc</value></envar>
-	<envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
-	<envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
-	<envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>
-	<envar><name>ICSDIR</name><value>&ICSDIR;</value></envar>
-	<envar><name>CASE</name><value>&CASE;</value></envar>
-	<envar><name>COMPONENT</name><value>&COMPONENT;</value></envar>
-	<envar><name>NET</name><value>gfs</value></envar>
-	<envar><name>CDUMP</name><value>gfs</value></envar>
-	<envar><name>RUN</name><value>gfs</value></envar>
-	<envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
-	<envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
-	<envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
-	<envar><name>COMROOT</name><value>/scratch1/NCEPDEV/global/glopara/com</value></envar>
-	<envar><name>DATAROOT</name><value>&ROTDIR;/../RUNDIRS/&PSLOT;</value></envar>
+        <envar><name>RUN_ENVIR</name><value>emc</value></envar>
+        <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
+        <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
+        <envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>
+        <envar><name>ICSDIR</name><value>&ICSDIR;</value></envar>
+        <envar><name>CASE</name><value>&CASE;</value></envar>
+        <envar><name>COMPONENT</name><value>&COMPONENT;</value></envar>
+        <envar><name>NET</name><value>gfs</value></envar>
+        <envar><name>CDUMP</name><value>gfs</value></envar>
+        <envar><name>RUN</name><value>gfs</value></envar>
+        <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
+        <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
+        <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
+        <envar><name>COMROOT</name><value>/scratch1/NCEPDEV/global/glopara/com</value></envar>
+        <envar><name>DATAROOT</name><value>&ROTDIR;/../RUNDIRS/&PSLOT;</value></envar>
+	<envar><name>FHR3</name><value>#fhr#</value></envar>
+        <envar><name>COMPONENT</name><value>atmos</value></envar>
 
 	<dependency>
-                <and>
-                        <not>
-                                <datadep><cyclestr>&ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input</cyclestr></datadep>
-                        </not>
-                        <and>
-                                <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc</cyclestr></datadep>
-                                <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc</cyclestr></datadep>
-                        </and>
-                </and>
+                        <or>
+                                <datadep age="120"><cyclestr>&ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2f#fhr#</cyclestr></datadep>
+                                <taskdep task="gfsfcst"/>
+                        </or>
 	</dependency>
 
 </task>
@@ -79,7 +77,7 @@
 	<account>gsd-fv3</account>
 	<queue>batch</queue>
 	<partition>hera</partition>
-	<walltime>05:00:00</walltime>
+	<walltime>06:00:00</walltime>
 <!--	<nodes>101:ppn=40:tpp=1</nodes> -->    <!-- 16x16, 2th, 2wg, 40wt -->
 	<nodes>56:ppn=40:tpp=1</nodes>         <!-- 12x12, 2th, 1wg, 40wt -->
         <native>&NATIVE_STR;</native>
@@ -112,15 +110,13 @@
 
 <metatask name="gfsatmprod">
 
-	<var name="grp">_f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120</var>
-	<var name="dep">f012 f030 f048 f066 f084 f102 f120</var>
-	<var name="lst">f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120</var>
+	<var name="fhr">000 003 006 009 012 015 018 021 024 027 030 033 036 039 042 045 048 051 054 057 060 063 066 069 072 075 078 081 084 087 090 093 096 099 102 105 108 111 114 117 120</var>
 
-	<task name="gfsatmprod#grp#" cycledefs="gfs" maxtries="&MAXTRIES;">
+	<task name="gfsatmprod_#fhr#" cycledefs="gfs" maxtries="&MAXTRIES;">
 
 		<command>&JOBS_DIR;/atmos_products.sh</command>
 
-		<jobname><cyclestr>&PSLOT;_gfsatmprod#grp#_@H</cyclestr></jobname>
+		<jobname><cyclestr>&PSLOT;_gfsatmprod_f#fhr#_@H</cyclestr></jobname>
 		<account>gsd-fv3</account>
 		<queue>batch</queue>
 		<partition>hera</partition>
@@ -128,31 +124,29 @@
 		<nodes>1:ppn=24:tpp=1</nodes>
 		<native>&NATIVE_STR;</native>
 
-		<join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log</cyclestr></join>
+		<join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsatmprod_f#fhr#.log</cyclestr></join>
 
 		<envar><name>RUN_ENVIR</name><value>emc</value></envar>
 		<envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
 		<envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
-		<envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>
 		<envar><name>NET</name><value>gfs</value></envar>
-		<envar><name>CDUMP</name><value>gfs</value></envar>
 		<envar><name>RUN</name><value>gfs</value></envar>
 		<envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
 		<envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
 		<envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
 		<envar><name>COMROOT</name><value>/scratch1/NCEPDEV/global/glopara/com</value></envar>
 		<envar><name>DATAROOT</name><value>&ROTDIR;/../RUNDIRS/&PSLOT;</value></envar>
-		<envar><name>FHRLST</name><value>#lst#</value></envar>
+		<envar><name>FHR3</name><value>#fhr#</value></envar>
+		<envar><name>COMPONENT</name><value>atmos</value></envar>
 
 		<dependency>
-			<datadep age="120"><cyclestr>&ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep#</cyclestr></datadep>
+			<datadep age="120"><cyclestr>&ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#fhr#</cyclestr></datadep>
 		</dependency>
 
 	</task>
 
 </metatask>
 
-<!--
 <task name="gfsarch" cycledefs="gfs" maxtries="&MAXTRIES;">
 
 	<command>&JOBS_DIR;/arch.sh</command>
@@ -169,10 +163,11 @@
 	<join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsarch.log</cyclestr></join>
 
 	<envar><name>RUN_ENVIR</name><value>emc</value></envar>
-	<envar><name>HOMEgfs</name><value>/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite</value></envar>
-	<envar><name>EXPDIR</name><value>/home/role.rtfim/UFS-CAMsuite//FV3GFSwfm/rt_v17p8_ugwpv1_mynn</value></envar>
-	<envar><name>ROTDIR</name><value>/home/role.rtfim/UFS-CAMsuite//FV3GFSrun/rt_v17p8_ugwpv1_mynn</value></envar>
+	<envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
+	<envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
+	<envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>
 	<envar><name>NET</name><value>gfs</value></envar>
+        <envar><name>ATCFNAME</name><value>&ATCFNAME;</value></envar>
 	<envar><name>CDUMP</name><value>gfs</value></envar>
 	<envar><name>RUN</name><value>gfs</value></envar>
 	<envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
@@ -182,15 +177,11 @@
 	<envar><name>DATAROOT</name><value>&ROTDIR;/../RUNDIRS/&PSLOT;</value></envar>
 
 	<dependency>
-		<and>
-			<taskdep task="gfstracker"/>
-			<taskdep task="gfsgenesis"/>
-			<metataskdep metatask="gfsatmprod"/>
-		</and>
+		<metataskdep metatask="gfsatmprod"/>
 	</dependency>
 
 </task>
-
+<!--
 <task name="gfscleanup" cycledefs="gfs" maxtries="&MAXTRIES;">
 
 	<command>&JOBS_DIR;/cleanup.sh</command>
@@ -207,9 +198,9 @@
 	<join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfscleanup.log</cyclestr></join>
 
 	<envar><name>RUN_ENVIR</name><value>emc</value></envar>
-	<envar><name>HOMEgfs</name><value>/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite</value></envar>
-	<envar><name>EXPDIR</name><value>/home/role.rtfim/UFS-CAMsuite//FV3GFSwfm/rt_v17p8_ugwpv1_mynn</value></envar>
-	<envar><name>ROTDIR</name><value>/home/role.rtfim/UFS-CAMsuite//FV3GFSrun/rt_v17p8_ugwpv1_mynn</value></envar>
+	<envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
+	<envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
+	<envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>
 	<envar><name>NET</name><value>gfs</value></envar>
 	<envar><name>CDUMP</name><value>gfs</value></envar>
 	<envar><name>RUN</name><value>gfs</value></envar>
diff --git a/workflow/hosts.py b/workflow/hosts.py
index a17cd3f4a8..cd0cfe0083 100644
--- a/workflow/hosts.py
+++ b/workflow/hosts.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python3
 
 import os
+import socket
 from pathlib import Path
 
 from wxflow import YAMLFile
@@ -15,7 +16,7 @@ class Host:
     """
 
     SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', 'HERCULES',
-                       'WCOSS2', 'S4', 'CONTAINER', 'AWSPW']
+                       'WCOSS2', 'S4', 'CONTAINER', 'AWSPW', 'GAEA']
 
     def __init__(self, host=None):
 
@@ -39,16 +40,15 @@ def detect(cls):
         if os.path.exists('/scratch1/NCEPDEV'):
             machine = 'HERA'
         elif os.path.exists('/work/noaa'):
-            if os.path.exists('/apps/other'):
-                machine = 'HERCULES'
-            else:
-                machine = 'ORION'
+            machine = socket.gethostname().split("-", 1)[0].upper()
         elif os.path.exists('/lfs4/HFIP'):
             machine = 'JET'
         elif os.path.exists('/lfs/f1'):
             machine = 'WCOSS2'
         elif os.path.exists('/data/prod'):
             machine = 'S4'
+        elif os.path.exists('/gpfs/f5'):
+            machine = 'GAEA'
         elif container is not None:
             machine = 'CONTAINER'
         elif pw_csp is not None:
diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml
index c683010e0e..046dafcfa7 100644
--- a/workflow/hosts/awspw.yaml
+++ b/workflow/hosts/awspw.yaml
@@ -12,6 +12,8 @@ QUEUE: batch
 QUEUE_SERVICE: batch
 PARTITION_BATCH: compute
 PARTITION_SERVICE: compute
+RESERVATION: ''
+CLUSTERS: ''
 CHGRP_RSTPROD: 'YES'
 CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported.
 HPSSARCH: 'YES'
diff --git a/workflow/hosts/container.yaml b/workflow/hosts/container.yaml
index 3fd3856679..d7924724ae 100644
--- a/workflow/hosts/container.yaml
+++ b/workflow/hosts/container.yaml
@@ -12,6 +12,8 @@ QUEUE: ''
 QUEUE_SERVICE: ''
 PARTITION_BATCH: ''
 PARTITION_SERVICE: ''
+RESERVATION: ''
+CLUSTERS: ''
 CHGRP_RSTPROD: 'YES'
 CHGRP_CMD: 'chgrp rstprod'
 HPSSARCH: 'NO'
diff --git a/workflow/hosts/gaea.yaml b/workflow/hosts/gaea.yaml
new file mode 100644
index 0000000000..619a86f2e5
--- /dev/null
+++ b/workflow/hosts/gaea.yaml
@@ -0,0 +1,27 @@
+BASE_GIT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git'
+DMPDIR: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/dump'
+BASE_CPLIC: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/ICSDIR/prototype_ICs'
+PACKAGEROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/nwpara'
+COMROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/com'
+COMINsyn: '${COMROOT}/gfs/prod/syndat'
+HOMEDIR: '/gpfs/f5/ufs-ard/scratch/${USER}'
+STMP: '/gpfs/f5/ufs-ard/scratch/${USER}'
+PTMP: '/gpfs/f5/ufs-ard/scratch/${USER}'
+NOSCRUB: $HOMEDIR
+ACCOUNT: ufs-ard
+SCHEDULER: slurm
+QUEUE: normal
+QUEUE_SERVICE: normal
+PARTITION_BATCH: batch
+PARTITION_SERVICE: batch
+RESERVATION: ''
+CLUSTERS: 'c5'
+CHGRP_RSTPROD: 'NO'
+CHGRP_CMD: 'chgrp rstprod'
+HPSSARCH: 'NO'
+HPSS_PROJECT: emc-global
+LOCALARCH: 'NO'
+ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}'
+MAKE_NSSTBUFR: 'NO'
+MAKE_ACFTBUFR: 'NO'
+SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48']
diff --git a/workflow/hosts/hera_gsl.yaml b/workflow/hosts/hera_gsl.yaml
index c12cac1559..31274dc707 100644
--- a/workflow/hosts/hera_gsl.yaml
+++ b/workflow/hosts/hera_gsl.yaml
@@ -2,8 +2,7 @@ BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git'
 DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump'
 BASE_CPLIC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs'
 PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara'
-COMROOT: '/scratch1/NCEPDEV/global/glopara/com'
-COMINsyn: '${COMROOT}/gfs/prod/syndat'
+COMINsyn: '/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat'
 HOMEDIR: '/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}'
 STMP: '${HOMEgfs}/FV3GFSrun/'
 PTMP: '${HOMEgfs}/FV3GFSrun/'
@@ -14,7 +13,9 @@ QUEUE: batch
 QUEUE_SERVICE: batch
 PARTITION_BATCH: hera
 PARTITION_SERVICE: service
+RESERVATION: ''
 CHGRP_RSTPROD: 'YES'
+CLUSTERS: ''
 CHGRP_CMD: 'chgrp rstprod'
 HPSSARCH: 'YES'
 HPSS_PROJECT: fim
@@ -23,3 +24,6 @@ ATARDIR: '/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}'
 MAKE_NSSTBUFR: 'NO'
 MAKE_ACFTBUFR: 'NO'
 SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48']
+COMINecmwf: /scratch1/NCEPDEV/global/glopara/data/external_gempak/ecmwf
+COMINnam: /scratch1/NCEPDEV/global/glopara/data/external_gempak/nam
+COMINukmet: /scratch1/NCEPDEV/global/glopara/data/external_gempak/ukmet
diff --git a/workflow/hosts/hercules.yaml b/workflow/hosts/hercules.yaml
index 58a9589f2f..b513bfd57a 100644
--- a/workflow/hosts/hercules.yaml
+++ b/workflow/hosts/hercules.yaml
@@ -1,11 +1,11 @@
-BASE_GIT: '/work/noaa/global/glopara/git'
+BASE_GIT: '/work/noaa/global/glopara/git_rocky9'
 DMPDIR: '/work/noaa/rstprod/dump'
 BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs'
 PACKAGEROOT: '/work/noaa/global/glopara/nwpara'
 COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat'
 HOMEDIR: '/work/noaa/global/${USER}'
-STMP: '/work/noaa/stmp/${USER}'
-PTMP: '/work/noaa/stmp/${USER}'
+STMP: '/work/noaa/stmp/${USER}/HERCULES'
+PTMP: '/work/noaa/stmp/${USER}/HERCULES'
 NOSCRUB: $HOMEDIR
 SCHEDULER: slurm
 ACCOUNT: fv3-cpu
@@ -13,7 +13,9 @@ QUEUE: batch
 QUEUE_SERVICE: batch
 PARTITION_BATCH: hercules
 PARTITION_SERVICE: service
+RESERVATION: ''
 CHGRP_RSTPROD: 'YES'
+CLUSTERS: ''
 CHGRP_CMD: 'chgrp rstprod'
 HPSSARCH: 'NO'
 HPSS_PROJECT: emc-global
@@ -22,3 +24,6 @@ ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}'
 MAKE_NSSTBUFR: 'NO'
 MAKE_ACFTBUFR: 'NO'
 SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48']
+COMINecmwf: /work/noaa/global/glopara/data/external_gempak/ecmwf
+COMINnam: /work/noaa/global/glopara/data/external_gempak/nam
+COMINukmet: /work/noaa/global/glopara/data/external_gempak/ukmet
diff --git a/workflow/hosts/jet_gsl.yaml b/workflow/hosts/jet_gsl.yaml
index 9da6f2b2aa..e556ca4663 100644
--- a/workflow/hosts/jet_gsl.yaml
+++ b/workflow/hosts/jet_gsl.yaml
@@ -23,3 +23,6 @@ ATARDIR: '/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}'
 MAKE_NSSTBUFR: 'NO'
 MAKE_ACFTBUFR: 'NO'
 SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48']
+COMINecmwf: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/external_gempak/ecmwf
+COMINnam: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/external_gempak/nam
+COMINukmet: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/external_gempak/ukmet
diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml
index 4c08a878dc..f0f807aacf 100644
--- a/workflow/hosts/orion.yaml
+++ b/workflow/hosts/orion.yaml
@@ -4,8 +4,8 @@ BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs'
 PACKAGEROOT: '/work/noaa/global/glopara/nwpara'
 COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat'
 HOMEDIR: '/work/noaa/global/${USER}'
-STMP: '/work/noaa/stmp/${USER}'
-PTMP: '/work/noaa/stmp/${USER}'
+STMP: '/work/noaa/stmp/${USER}/ORION'
+PTMP: '/work/noaa/stmp/${USER}/ORION'
 NOSCRUB: $HOMEDIR
 SCHEDULER: slurm
 ACCOUNT: fv3-cpu
@@ -13,7 +13,9 @@ QUEUE: batch
 QUEUE_SERVICE: batch
 PARTITION_BATCH: orion
 PARTITION_SERVICE: service
+RESERVATION: ''
 CHGRP_RSTPROD: 'YES'
+CLUSTERS: ''
 CHGRP_CMD: 'chgrp rstprod'
 HPSSARCH: 'NO'
 HPSS_PROJECT: emc-global
@@ -22,3 +24,6 @@ ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}'
 MAKE_NSSTBUFR: 'NO'
 MAKE_ACFTBUFR: 'NO'
 SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48']
+COMINecmwf: /work/noaa/global/glopara/data/external_gempak/ecmwf
+COMINnam: /work/noaa/global/glopara/data/external_gempak/nam
+COMINukmet: /work/noaa/global/glopara/data/external_gempak/ukmet
diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml
index 52a9f7a365..aea807da63 100644
--- a/workflow/hosts/s4.yaml
+++ b/workflow/hosts/s4.yaml
@@ -13,7 +13,9 @@ QUEUE: s4
 QUEUE_SERVICE: serial
 PARTITION_BATCH: s4
 PARTITION_SERVICE: serial
+RESERVATION: ''
 CHGRP_RSTPROD: 'NO'
+CLUSTERS: ''
 CHGRP_CMD: 'ls'
 HPSSARCH: 'NO'
 HPSS_PROJECT: emc-global
diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml
index cfb141061c..7ae2be1424 100644
--- a/workflow/hosts/wcoss2.yaml
+++ b/workflow/hosts/wcoss2.yaml
@@ -13,7 +13,9 @@ QUEUE: 'dev'
 QUEUE_SERVICE: 'dev_transfer'
 PARTITION_BATCH: ''
 PARTITION_SERVICE: ''
+RESERVATION: ''
 CHGRP_RSTPROD: 'YES'
+CLUSTERS: ''
 CHGRP_CMD: 'chgrp rstprod'
 HPSSARCH: 'NO'
 HPSS_PROJECT: emc-global
@@ -22,3 +24,6 @@ ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}'
 MAKE_NSSTBUFR: 'NO'
 MAKE_ACFTBUFR: 'NO'
 SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48']
+COMINecmwf: /lfs/h2/emc/global/noscrub/emc.global/data/external_gempak/ecmwf
+COMINnam: /lfs/h2/emc/global/noscrub/emc.global/data/external_gempak/nam
+COMINukmet: /lfs/h2/emc/global/noscrub/emc.global/data/external_gempak/ukmet
diff --git a/workflow/prod.yml b/workflow/prod.yml
index 64783dd611..55717772b5 100644
--- a/workflow/prod.yml
+++ b/workflow/prod.yml
@@ -113,17 +113,6 @@ suites:
                 jgfs_atmos_awips_f( 3,27,6 ):
                   edits:
                     TRDRUN: 'NO'
-            awips_g2:
-              tasks:
-                jgfs_atmos_awips_g2_f( 0,64,6 ):
-                  template: jgfs_atmos_awips_g2_master
-                  triggers:
-                  - task: jgfs_atmos_post_f( )
-                  edits:
-                    FHRGRP: '( )'
-                    FHRLST: 'f( )'
-                    FCSTHR: '( )'
-                    TRDRUN: 'YES'
             gempak:
               tasks:
                 jgfs_atmos_gempak:
diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py
index c46d9ad452..1b357d8ee3 100644
--- a/workflow/rocoto/gefs_tasks.py
+++ b/workflow/rocoto/gefs_tasks.py
@@ -1,57 +1,70 @@
 from applications.applications import AppConfig
 from rocoto.tasks import Tasks
 import rocoto.rocoto as rocoto
+from datetime import datetime, timedelta
 
 
 class GEFSTasks(Tasks):
 
-    def __init__(self, app_config: AppConfig, cdump: str) -> None:
-        super().__init__(app_config, cdump)
+    def __init__(self, app_config: AppConfig, run: str) -> None:
+        super().__init__(app_config, run)
 
     def stage_ic(self):
-
         cpl_ic = self._configs['stage_ic']
-
         deps = []
-
+        dtg_prefix = "@Y@m@d.@H0000"
+        offset = str(self._configs['base']['OFFSET_START_HOUR']).zfill(2) + ":00:00"
         # Atm ICs
         if self.app_config.do_atm:
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos"
-            for file in ['gfs_ctrl.nc'] + \
-                        [f'{datatype}_data.tile{tile}.nc'
-                         for datatype in ['gfs', 'sfc']
-                         for tile in range(1, self.n_tiles + 1)]:
-                data = f"{prefix}/{file}"
-                dep_dict = {'type': 'data', 'data': data}
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos/"
+            if self._base['EXP_WARM_START']:
+                for file in ['fv_core.res.nc'] + \
+                            [f'{datatype}.tile{tile}.nc'
+                             for datatype in ['ca_data', 'fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']
+                             for tile in range(1, self.n_tiles + 1)]:
+                    data = [prefix, f"{dtg_prefix}.{file}"]
+                    dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
+                    deps.append(rocoto.add_dependency(dep_dict))
+                prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/med/"
+                data = [prefix, f"{dtg_prefix}.ufs.cpld.cpl.r.nc"]
+                dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
                 deps.append(rocoto.add_dependency(dep_dict))
+            else:
+                for file in ['gfs_ctrl.nc'] + \
+                            [f'{datatype}_data.tile{tile}.nc'
+                             for datatype in ['gfs', 'sfc']
+                             for tile in range(1, self.n_tiles + 1)]:
+                    data = f"{prefix}/{file}"
+                    dep_dict = {'type': 'data', 'data': data}
+                    deps.append(rocoto.add_dependency(dep_dict))
 
         # Ocean ICs
         if self.app_config.do_ocean:
             ocn_res = f"{self._base.get('OCNRES', '025'):03d}"
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean"
-            data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc"
-            dep_dict = {'type': 'data', 'data': data}
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean/"
+            data = [prefix, f"{dtg_prefix}.MOM.res.nc"]
+            dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
             deps.append(rocoto.add_dependency(dep_dict))
             if ocn_res in ['025']:
                 # 0.25 degree ocean model also has these additional restarts
                 for res in [f'res_{res_index}' for res_index in range(1, 4)]:
-                    data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc"
-                    dep_dict = {'type': 'data', 'data': data}
+                    data = [prefix, f"{dtg_prefix}.MOM.{res}.nc"]
+                    dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
                     deps.append(rocoto.add_dependency(dep_dict))
 
         # Ice ICs
         if self.app_config.do_ice:
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice"
-            data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc"
-            dep_dict = {'type': 'data', 'data': data}
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice/"
+            data = [prefix, f"{dtg_prefix}.cice_model.res.nc"]
+            dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
             deps.append(rocoto.add_dependency(dep_dict))
 
         # Wave ICs
         if self.app_config.do_wave:
-            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave"
+            prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave/"
             for wave_grid in self._configs['waveinit']['waveGRD'].split():
-                data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}"
-                dep_dict = {'type': 'data', 'data': data}
+                data = [prefix, f"{dtg_prefix}.restart.{wave_grid}"]
+                dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]}
                 deps.append(rocoto.add_dependency(dep_dict))
 
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
@@ -75,7 +88,7 @@ def stage_ic(self):
     def waveinit(self):
 
         resources = self.get_resource('waveinit')
-        task_name = f'waveinit'
+        task_name = f'wave_init'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'envars': self.envars,
@@ -89,21 +102,44 @@ def waveinit(self):
 
         return task
 
-    def fcst(self):
+    def prep_emissions(self):
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'stage_ic'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep=deps)
 
-        # TODO: Add real dependencies
+        resources = self.get_resource('prep_emissions')
+        task_name = 'prep_emissions'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'envars': self.envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/prep_emissions.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+        task = rocoto.create_task(task_dict)
+
+        return task
+
+    def fcst(self):
         dependencies = []
         dep_dict = {'type': 'task', 'name': f'stage_ic'}
         dependencies.append(rocoto.add_dependency(dep_dict))
 
         if self.app_config.do_wave:
-            dep_dict = {'type': 'task', 'name': f'waveinit'}
+            dep_dict = {'type': 'task', 'name': f'wave_init'}
+            dependencies.append(rocoto.add_dependency(dep_dict))
+
+        if self.app_config.do_aero:
+            dep_dict = {'type': 'task', 'name': f'prep_emissions'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
         dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
 
         resources = self.get_resource('fcst')
-        task_name = f'fcst'
+        task_name = f'fcst_mem000'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -124,36 +160,389 @@ def efcs(self):
         dependencies.append(rocoto.add_dependency(dep_dict))
 
         if self.app_config.do_wave:
-            dep_dict = {'type': 'task', 'name': f'waveinit'}
+            dep_dict = {'type': 'task', 'name': f'wave_init'}
+            dependencies.append(rocoto.add_dependency(dep_dict))
+
+        if self.app_config.do_aero:
+            dep_dict = {'type': 'task', 'name': f'prep_emissions'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
         dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
 
         efcsenvars = self.envars.copy()
-        efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))
-
-        groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP'])
-        var_dict = {'grp': groups}
+        efcsenvars_dict = {'ENSMEM': '#member#',
+                           'MEMDIR': 'mem#member#'
+                           }
+        for key, value in efcsenvars_dict.items():
+            efcsenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         resources = self.get_resource('efcs')
 
-        task_name = f'efcs#grp#'
+        task_name = f'fcst_mem#member#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': efcsenvars,
                      'cycledef': 'gefs',
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/efcs.sh',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': 'efmn',
-                         'var_dict': var_dict,
+        member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])}
+        metatask_dict = {'task_name': 'fcst_ens',
+                         'var_dict': member_var_dict,
                          'task_dict': task_dict
                          }
 
         task = rocoto.create_task(metatask_dict)
 
         return task
+
+    def atmos_prod(self):
+        return self._atmosoceaniceprod('atmos')
+
+    def ocean_prod(self):
+        return self._atmosoceaniceprod('ocean')
+
+    def ice_prod(self):
+        return self._atmosoceaniceprod('ice')
+
+    def _atmosoceaniceprod(self, component: str):
+
+        fhout_ocn_gfs = self._configs['base']['FHOUT_OCN_GFS']
+        fhout_ice_gfs = self._configs['base']['FHOUT_ICE_GFS']
+        products_dict = {'atmos': {'config': 'atmos_products',
+                                   'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+                                   'history_file_tmpl': f'{self.run}.t@Hz.master.grb2f#fhr#'},
+                         'ocean': {'config': 'oceanice_products',
+                                   'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+                                   'history_file_tmpl': f'{self.run}.ocean.t@Hz.{fhout_ocn_gfs}hr_avg.f#fhr_next#.nc'},
+                         'ice': {'config': 'oceanice_products',
+                                 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+                                 'history_file_tmpl': f'{self.run}.ice.t@Hz.{fhout_ice_gfs}hr_avg.f#fhr#.nc'}}
+
+        component_dict = products_dict[component]
+        config = component_dict['config']
+        history_path_tmpl = component_dict['history_path_tmpl']
+        history_file_tmpl = component_dict['history_file_tmpl']
+
+        resources = self.get_resource(config)
+
+        history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl], {'MEMDIR': 'mem#member#'})
+        deps = []
+        data = f'{history_path}/{history_file_tmpl}'
+        if component in ['ocean']:
+            dep_dict = {'type': 'data', 'data': data, 'age': 120}
+            deps.append(rocoto.add_dependency(dep_dict))
+            dep_dict = {'type': 'task', 'name': 'fcst_mem#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+            dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')
+        elif component in ['ice']:
+            command = f"{self.HOMEgfs}/ush/check_ice_netcdf.sh @Y @m @d @H #fhr# &ROTDIR; #member# {fhout_ice_gfs}"
+            dep_dict = {'type': 'sh', 'command': command}
+            deps.append(rocoto.add_dependency(dep_dict))
+            dependencies = rocoto.create_dependency(dep=deps)
+        else:
+            dep_dict = {'type': 'data', 'data': data, 'age': 120}
+            deps.append(rocoto.add_dependency(dep_dict))
+            dependencies = rocoto.create_dependency(dep=deps)
+
+        postenvars = self.envars.copy()
+        postenvar_dict = {'ENSMEM': '#member#',
+                          'MEMDIR': 'mem#member#',
+                          'FHR3': '#fhr#',
+                          'COMPONENT': component}
+        for key, value in postenvar_dict.items():
+            postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        task_name = f'{component}_prod_mem#member#_f#fhr#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': postenvars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/{config}.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'}
+
+        fhrs = self._get_forecast_hours('gefs', self._configs[config], component)
+
+        # when replaying, atmos component does not have fhr 0, therefore remove 0 from fhrs
+        is_replay = self._configs[config]['REPLAY_ICS']
+        if is_replay and component in ['atmos'] and 0 in fhrs:
+            fhrs.remove(0)
+
+        # ocean/ice components do not have fhr 0 as they are averaged output
+        if component in ['ocean', 'ice'] and 0 in fhrs:
+            fhrs.remove(0)
+
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+        if component in ['ocean']:
+            fhrs_next = fhrs[1:] + [fhrs[-1] + (fhrs[-1] - fhrs[-2])]
+            fhr_var_dict['fhr_next'] = ' '.join([f"{fhr:03d}" for fhr in fhrs_next])
+
+        fhr_metatask_dict = {'task_name': f'{component}_prod_#member#',
+                             'task_dict': task_dict,
+                             'var_dict': fhr_var_dict}
+
+        member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': f'{component}_prod',
+                                'task_dict': fhr_metatask_dict,
+                                'var_dict': member_var_dict}
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
+
+    def atmos_ensstat(self):
+
+        resources = self.get_resource('atmos_ensstat')
+
+        deps = []
+        for member in range(0, self.nmem + 1):
+            task = f'atmos_prod_mem{member:03d}_f#fhr#'
+            dep_dict = {'type': 'task', 'name': task}
+            deps.append(rocoto.add_dependency(dep_dict))
+
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        postenvars = self.envars.copy()
+        postenvar_dict = {'FHR3': '#fhr#'}
+        for key, value in postenvar_dict.items():
+            postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        task_name = f'atmos_ensstat_f#fhr#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': postenvars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_ensstat.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'}
+
+        fhrs = self._get_forecast_hours('gefs', self._configs['atmos_ensstat'])
+
+        # when replaying, atmos component does not have fhr 0, therefore remove 0 from fhrs
+        is_replay = self._configs['atmos_ensstat']['REPLAY_ICS']
+        if is_replay and 0 in fhrs:
+            fhrs.remove(0)
+
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+        fhr_metatask_dict = {'task_name': f'atmos_ensstat',
+                             'task_dict': task_dict,
+                             'var_dict': fhr_var_dict}
+
+        task = rocoto.create_task(fhr_metatask_dict)
+
+        return task
+
+    def wavepostsbs(self):
+        deps = []
+        for wave_grid in self._configs['wavepostsbs']['waveGRD'].split():
+            wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+            data = f'{wave_hist_path}/gefswave.out_grd.{wave_grid}.@Y@m@d.@H0000'
+            dep_dict = {'type': 'data', 'data': data}
+            deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        wave_post_envars = self.envars.copy()
+        postenvar_dict = {'ENSMEM': '#member#',
+                          'MEMDIR': 'mem#member#',
+                          }
+        for key, value in postenvar_dict.items():
+            wave_post_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        resources = self.get_resource('wavepostsbs')
+
+        task_name = f'wave_post_grid_mem#member#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': wave_post_envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostsbs.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': 'wave_post_grid',
+                                'task_dict': task_dict,
+                                'var_dict': member_var_dict
+                                }
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
+
+    def wavepostbndpnt(self):
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep=deps)
+
+        wave_post_bndpnt_envars = self.envars.copy()
+        postenvar_dict = {'ENSMEM': '#member#',
+                          'MEMDIR': 'mem#member#',
+                          }
+        for key, value in postenvar_dict.items():
+            wave_post_bndpnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        resources = self.get_resource('wavepostbndpnt')
+        task_name = f'wave_post_bndpnt_mem#member#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': wave_post_bndpnt_envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpnt.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': 'wave_post_bndpnt',
+                                'task_dict': task_dict,
+                                'var_dict': member_var_dict
+                                }
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
+
+    def wavepostbndpntbll(self):
+        deps = []
+        atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+
+        # The wavepostbndpntbll job runs on forecast hours up to FHMAX_WAV_IBP
+        last_fhr = self._configs['wave']['FHMAX_WAV_IBP']
+
+        data = f'{atmos_hist_path}/{self.run}.t@Hz.atm.logf{last_fhr:03d}.txt'
+        dep_dict = {'type': 'data', 'data': data}
+        deps.append(rocoto.add_dependency(dep_dict))
+
+        dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
+
+        wave_post_bndpnt_bull_envars = self.envars.copy()
+        postenvar_dict = {'ENSMEM': '#member#',
+                          'MEMDIR': 'mem#member#',
+                          }
+        for key, value in postenvar_dict.items():
+            wave_post_bndpnt_bull_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        resources = self.get_resource('wavepostbndpntbll')
+        task_name = f'wave_post_bndpnt_bull_mem#member#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': wave_post_bndpnt_bull_envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpntbll.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': 'wave_post_bndpnt_bull',
+                                'task_dict': task_dict,
+                                'var_dict': member_var_dict
+                                }
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
+
+    def wavepostpnt(self):
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        if self.app_config.do_wave_bnd:
+            dep_dict = {'type': 'task', 'name': f'wave_post_bndpnt_bull_mem#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        wave_post_pnt_envars = self.envars.copy()
+        postenvar_dict = {'ENSMEM': '#member#',
+                          'MEMDIR': 'mem#member#',
+                          }
+        for key, value in postenvar_dict.items():
+            wave_post_pnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        resources = self.get_resource('wavepostpnt')
+        task_name = f'wave_post_pnt_mem#member#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': wave_post_pnt_envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostpnt.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': 'wave_post_pnt',
+                                'task_dict': task_dict,
+                                'var_dict': member_var_dict
+                                }
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
+
+    def extractvars(self):
+        deps = []
+        if self.app_config.do_wave:
+            dep_dict = {'type': 'task', 'name': 'wave_post_grid_mem#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        if self.app_config.do_ocean:
+            dep_dict = {'type': 'metatask', 'name': 'ocean_prod_#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        if self.app_config.do_ice:
+            dep_dict = {'type': 'metatask', 'name': 'ice_prod_#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        if self.app_config.do_atm:
+            dep_dict = {'type': 'metatask', 'name': 'atmos_prod_#member#'}
+            deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+        extractvars_envars = self.envars.copy()
+        extractvars_dict = {'ENSMEM': '#member#',
+                            'MEMDIR': 'mem#member#',
+                            }
+        for key, value in extractvars_dict.items():
+            extractvars_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+        resources = self.get_resource('extractvars')
+        task_name = f'extractvars_mem#member#'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': extractvars_envars,
+                     'cycledef': 'gefs',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/extractvars.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+        member_metatask_dict = {'task_name': 'extractvars',
+                                'task_dict': task_dict,
+                                'var_dict': member_var_dict
+                                }
+
+        task = rocoto.create_task(member_metatask_dict)
+
+        return task
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 0f5e184192..960a7548ab 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -7,13 +7,13 @@
 
 class GFSTasks(Tasks):
 
-    def __init__(self, app_config: AppConfig, cdump: str) -> None:
-        super().__init__(app_config, cdump)
+    def __init__(self, app_config: AppConfig, run: str) -> None:
+        super().__init__(app_config, run)
 
     @staticmethod
-    def _is_this_a_gdas_task(cdump, task_name):
-        if cdump != 'enkfgdas':
-            raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {cdump}')
+    def _is_this_a_gdas_task(run, task_name):
+        if run != 'enkfgdas':
+            raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {run}')
 
     # Specific Tasks begin here
     def stage_ic(self):
@@ -71,12 +71,12 @@ def stage_ic(self):
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('stage_ic')
-        task_name = f'{self.cdump}stage_ic'
+        task_name = f'{self.run}stage_ic'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump,
+                     'cycledef': self.run,
                      'command': f'{self.HOMEgfs}/jobs/rocoto/stage_ic.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -96,25 +96,25 @@ def prep(self):
         dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"],
                                                        {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix})
 
-        gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False
+        gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_runs else False
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': 'gdasatmprod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+        dep_dict = {'type': 'metatask', 'name': 'gdasatmos_prod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
         data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc'
         dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{dump_path}/{self.cdump}.t@Hz.updated.status.tm00.bufr_d'
+        data = f'{dump_path}/{self.run}.t@Hz.updated.status.tm00.bufr_d'
         dep_dict = {'type': 'data', 'data': data}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
-        cycledef = self.cdump
-        if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4:
+        cycledef = self.run
+        if self.run in ['gfs'] and gfs_enkf and gfs_cyc != 4:
             cycledef = 'gdas'
 
         resources = self.get_resource('prep')
-        task_name = f'{self.cdump}prep'
+        task_name = f'{self.run}prep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -134,17 +134,17 @@ def waveinit(self):
 
         resources = self.get_resource('waveinit')
         dependencies = None
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
+        cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
         if self.app_config.mode in ['cycled']:
             deps = []
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
             deps.append(rocoto.add_dependency(dep_dict))
-            if self.cdump in ['gdas']:
+            if self.run in ['gdas']:
                 dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
                 deps.append(rocoto.add_dependency(dep_dict))
             dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
 
-        task_name = f'{self.cdump}waveinit'
+        task_name = f'{self.run}waveinit'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -163,12 +163,12 @@ def waveinit(self):
     def waveprep(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}waveinit'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
+        cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
         resources = self.get_resource('waveprep')
-        task_name = f'{self.cdump}waveprep'
+        task_name = f'{self.run}waveprep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -197,11 +197,11 @@ def aerosol_init(self):
             dep_dict = {'type': 'data', 'data': data}
             deps.append(rocoto.add_dependency(dep_dict))
 
-        # Calculate offset based on CDUMP = gfs | gdas
+        # Calculate offset based on RUN = gfs | gdas
         interval = None
-        if self.cdump in ['gfs']:
+        if self.run in ['gfs']:
             interval = self._base['INTERVAL_GFS']
-        elif self.cdump in ['gdas']:
+        elif self.run in ['gdas']:
             interval = self._base['INTERVAL']
         offset = timedelta_to_HMS(-interval)
 
@@ -219,7 +219,7 @@ def aerosol_init(self):
 
         cycledef = 'gfs_seq'
         resources = self.get_resource('aerosol_init')
-        task_name = f'{self.cdump}aerosol_init'
+        task_name = f'{self.run}aerosol_init'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -237,7 +237,7 @@ def aerosol_init(self):
 
     def anal(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_hybvar:
             dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
@@ -247,12 +247,12 @@ def anal(self):
             dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('anal')
-        task_name = f'{self.cdump}anal'
+        task_name = f'{self.run}anal'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/anal.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -267,24 +267,24 @@ def sfcanl(self):
 
         deps = []
         if self.app_config.do_jediatmvar:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'}
         else:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}anal'}
         deps.append(rocoto.add_dependency(dep_dict))
-        if self.app_config.do_jedilandda:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'}
+        if self.app_config.do_jedisnowda:
+            dep_dict = {'type': 'task', 'name': f'{self.run}snowanl'}
             deps.append(rocoto.add_dependency(dep_dict))
             dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
         else:
             dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('sfcanl')
-        task_name = f'{self.cdump}sfcanl'
+        task_name = f'{self.run}sfcanl'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/sfcanl.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -299,24 +299,24 @@ def analcalc(self):
 
         deps = []
         if self.app_config.do_jediatmvar:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'}
         else:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}anal'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'}
         deps.append(rocoto.add_dependency(dep_dict))
-        if self.app_config.do_hybvar and self.cdump in ['gdas']:
+        if self.app_config.do_hybvar and self.run in ['gdas']:
             dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
             deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('analcalc')
-        task_name = f'{self.cdump}analcalc'
+        task_name = f'{self.run}analcalc'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -330,17 +330,17 @@ def analcalc(self):
     def analdiag(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}anal'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('analdiag')
-        task_name = f'{self.cdump}analdiag'
+        task_name = f'{self.run}analdiag'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/analdiag.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -354,17 +354,17 @@ def analdiag(self):
     def prepatmiodaobs(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('prepatmiodaobs')
-        task_name = f'{self.cdump}prepatmiodaobs'
+        task_name = f'{self.run}prepatmiodaobs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/prepatmiodaobs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -378,7 +378,7 @@ def prepatmiodaobs(self):
     def atmanlinit(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prepatmiodaobs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prepatmiodaobs'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_hybvar:
             dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
@@ -388,14 +388,14 @@ def atmanlinit(self):
             dependencies = rocoto.create_dependency(dep=deps)
 
         gfs_cyc = self._base["gfs_cyc"]
-        gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False
+        gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_runs else False
 
-        cycledef = self.cdump
-        if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4:
+        cycledef = self.run
+        if self.run in ['gfs'] and gfs_enkf and gfs_cyc != 4:
             cycledef = 'gdas'
 
         resources = self.get_resource('atmanlinit')
-        task_name = f'{self.cdump}atmanlinit'
+        task_name = f'{self.run}atmanlinit'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -411,21 +411,45 @@ def atmanlinit(self):
 
         return task
 
-    def atmanlrun(self):
+    def atmanlvar(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmanlinit'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
-        resources = self.get_resource('atmanlrun')
-        task_name = f'{self.cdump}atmanlrun'
+        resources = self.get_resource('atmanlvar')
+        task_name = f'{self.run}atmanlvar'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlrun.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlvar.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        task = rocoto.create_task(task_dict)
+
+        return task
+
+    def atmanlfv3inc(self):
+
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmanlvar'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep=deps)
+
+        resources = self.get_resource('atmanlfv3inc')
+        task_name = f'{self.run}atmanlfv3inc'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': self.envars,
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlfv3inc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -438,17 +462,17 @@ def atmanlrun(self):
     def atmanlfinal(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfv3inc'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('atmanlfinal')
-        task_name = f'{self.cdump}atmanlfinal'
+        task_name = f'{self.run}atmanlfinal'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlfinal.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -459,20 +483,45 @@ def atmanlfinal(self):
 
         return task
 
+    def prepobsaero(self):
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        resources = self.get_resource('prepobsaero')
+        task_name = f'{self.run}prepobsaero'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': self.envars,
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/prepobsaero.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        task = rocoto.create_task(task_dict)
+
+        return task
+
     def aeroanlinit(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
+        if self.app_config.do_prep_obs_aero:
+            dep_dict = {'type': 'task', 'name': f'{self.run}prepobsaero'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('aeroanlinit')
-        task_name = f'{self.cdump}aeroanlinit'
+        task_name = f'{self.run}aeroanlinit'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlinit.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -486,17 +535,17 @@ def aeroanlinit(self):
     def aeroanlrun(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlinit'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlinit'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('aeroanlrun')
-        task_name = f'{self.cdump}aeroanlrun'
+        task_name = f'{self.run}aeroanlrun'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlrun.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -510,17 +559,17 @@ def aeroanlrun(self):
     def aeroanlfinal(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlrun'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlrun'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('aeroanlfinal')
-        task_name = f'{self.cdump}aeroanlfinal'
+        task_name = f'{self.run}aeroanlfinal'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/aeroanlfinal.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -531,21 +580,21 @@ def aeroanlfinal(self):
 
         return task
 
-    def preplandobs(self):
+    def prepsnowobs(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
-        resources = self.get_resource('preplandobs')
-        task_name = f'{self.cdump}preplandobs'
+        resources = self.get_resource('prepsnowobs')
+        task_name = f'{self.run}prepsnowobs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/preplandobs.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowobs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -555,21 +604,21 @@ def preplandobs(self):
 
         return task
 
-    def landanl(self):
+    def snowanl(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}preplandobs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowobs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
-        resources = self.get_resource('landanl')
-        task_name = f'{self.cdump}landanl'
+        resources = self.get_resource('snowanl')
+        task_name = f'{self.run}snowanl'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/landanl.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/snowanl.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -583,18 +632,18 @@ def prepoceanobs(self):
         ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'})
 
         deps = []
-        data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc'
+        data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc'
         dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('prepoceanobs')
-        task_name = f'{self.cdump}prepoceanobs'
+        task_name = f'{self.run}prepoceanobs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/prepoceanobs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -605,21 +654,24 @@ def prepoceanobs(self):
 
         return task
 
-    def ocnanalprep(self):
+    def marinebmat(self):
+
+        ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'})
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}prepoceanobs'}
+        data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc'
+        dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
-        resources = self.get_resource('ocnanalprep')
-        task_name = f'{self.cdump}ocnanalprep'
+        resources = self.get_resource('marinebmat')
+        task_name = f'{self.run}marinebmat'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalprep.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/marinebmat.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -629,21 +681,25 @@ def ocnanalprep(self):
 
         return task
 
-    def ocnanalbmat(self):
+    def ocnanalprep(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}prepoceanobs'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dependencies = rocoto.create_dependency(dep=deps)
+        dep_dict = {'type': 'task', 'name': f'{self.run}marinebmat'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dep_dict = {'type': 'task', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
-        resources = self.get_resource('ocnanalbmat')
-        task_name = f'{self.cdump}ocnanalbmat'
+        resources = self.get_resource('ocnanalprep')
+        task_name = f'{self.run}ocnanalprep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalbmat.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalprep.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -656,17 +712,17 @@ def ocnanalbmat(self):
     def ocnanalrun(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalbmat'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalprep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('ocnanalrun')
-        task_name = f'{self.cdump}ocnanalrun'
+        task_name = f'{self.run}ocnanalrun'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalrun.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -677,24 +733,51 @@ def ocnanalrun(self):
 
         return task
 
+    def ocnanalecen(self):
+
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalrun'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep=deps)
+
+        resources = self.get_resource('ocnanalecen')
+        task_name = f'{self.run}ocnanalecen'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': self.envars,
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalecen.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        task = rocoto.create_task(task_dict)
+
+        return task
+
     def ocnanalchkpt(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'}
+        if self.app_config.do_hybvar:
+            dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalecen'}
+        else:
+            dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalrun'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_mergensst:
-            data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc'
+            data = f'&ROTDIR;/{self.run}.@Y@m@d/@H/atmos/{self.run}.t@Hz.sfcanl.nc'
             dep_dict = {'type': 'data', 'data': data}
             deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('ocnanalchkpt')
-        task_name = f'{self.cdump}ocnanalchkpt'
+        task_name = f'{self.run}ocnanalchkpt'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalchkpt.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -708,17 +791,17 @@ def ocnanalchkpt(self):
     def ocnanalpost(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalchkpt'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('ocnanalpost')
-        task_name = f'{self.cdump}ocnanalpost'
+        task_name = f'{self.run}ocnanalpost'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalpost.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -732,17 +815,17 @@ def ocnanalpost(self):
     def ocnanalvrfy(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalpost'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('ocnanalvrfy')
-        task_name = f'{self.cdump}ocnanalvrfy'
+        task_name = f'{self.run}ocnanalvrfy'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalvrfy.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -770,24 +853,26 @@ def fcst(self):
     def _fcst_forecast_only(self):
         dependencies = []
 
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}stage_ic'}
         dependencies.append(rocoto.add_dependency(dep_dict))
 
-        if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps:
+        if self.app_config.do_wave and self.run in self.app_config.wave_runs:
             wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit'
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}{wave_job}'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}{wave_job}'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
-        if self.app_config.do_aero:
-            # Calculate offset based on CDUMP = gfs | gdas
+        if self.app_config.do_aero and \
+           self.run in self.app_config.aero_fcst_runs and \
+           not self._base['EXP_WARM_START']:
+            # Calculate offset based on RUN = gfs | gdas
             interval = None
-            if self.cdump in ['gfs']:
+            if self.run in ['gfs']:
                 interval = self._base['INTERVAL_GFS']
-            elif self.cdump in ['gdas']:
+            elif self.run in ['gdas']:
                 interval = self._base['INTERVAL']
             offset = timedelta_to_HMS(-interval)
             deps = []
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}aerosol_init'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}aerosol_init'}
             deps.append(rocoto.add_dependency(dep_dict))
             dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': offset}
             deps.append(rocoto.add_dependency(dep_dict))
@@ -796,12 +881,12 @@ def _fcst_forecast_only(self):
         dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
 
         resources = self.get_resource('fcst')
-        task_name = f'{self.cdump}fcst'
+        task_name = f'{self.run}fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -814,38 +899,38 @@ def _fcst_forecast_only(self):
 
     def _fcst_cycled(self):
 
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'}
         dep = rocoto.add_dependency(dep_dict)
         dependencies = rocoto.create_dependency(dep=dep)
 
         if self.app_config.do_jediocnvar:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}ocnanalpost'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
-        if self.app_config.do_aero:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'}
+        if self.app_config.do_aero and self.run in self.app_config.aero_anl_runs:
+            dep_dict = {'type': 'task', 'name': f'{self.run}aeroanlfinal'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
-        if self.app_config.do_jedilandda:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'}
+        if self.app_config.do_jedisnowda:
+            dep_dict = {'type': 'task', 'name': f'{self.run}snowanl'}
             dependencies.append(rocoto.add_dependency(dep_dict))
 
         dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
 
-        if self.cdump in ['gdas']:
+        if self.run in ['gdas']:
             dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
             dependencies.append(rocoto.add_dependency(dep_dict))
             dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies)
 
-        if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'}
+        if self.app_config.do_wave and self.run in self.app_config.wave_runs:
+            dep_dict = {'type': 'task', 'name': f'{self.run}waveprep'}
             dependencies.append(rocoto.add_dependency(dep_dict))
             dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
 
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
+        cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
 
         resources = self.get_resource('fcst')
-        task_name = f'{self.cdump}fcst'
+        task_name = f'{self.run}fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -863,30 +948,30 @@ def _fcst_cycled(self):
 
     def atmanlupp(self):
         postenvars = self.envars.copy()
-        postenvar_dict = {'FHRLST': 'f000',
+        postenvar_dict = {'FHR3': '000',
                           'UPP_RUN': 'analysis'}
         for key, value in postenvar_dict.items():
             postenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         atm_anl_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_ANALYSIS_TMPL"])
         deps = []
-        data = f'{atm_anl_path}/{self.cdump}.t@Hz.atmanl.nc'
+        data = f'{atm_anl_path}/{self.run}.t@Hz.atmanl.nc'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atm_anl_path}/{self.cdump}.t@Hz.sfcanl.nc'
+        data = f'{atm_anl_path}/{self.run}.t@Hz.sfcanl.nc'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atm_anl_path}/{self.cdump}.t@Hz.loganl.txt'
+        data = f'{atm_anl_path}/{self.run}.t@Hz.loganl.txt'
         dep_dict = {'type': 'data', 'data': data, 'age': 60}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps, dep_condition='and')
         resources = self.get_resource('upp')
-        task_name = f'{self.cdump}atmanlupp'
+        task_name = f'{self.run}atmanlupp'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': postenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/upp.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -899,23 +984,23 @@ def atmanlupp(self):
 
     def atmanlprod(self):
         postenvars = self.envars.copy()
-        postenvar_dict = {'FHRLST': '-f001'}
+        postenvar_dict = {'FHR3': '-001'}
         for key, value in postenvar_dict.items():
             postenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"])
         deps = []
-        data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2anl'
+        data = f'{atm_master_path}/{self.run}.t@Hz.master.grb2anl'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
         resources = self.get_resource('atmos_products')
-        task_name = f'{self.cdump}atmanlprod'
+        task_name = f'{self.run}atmanlprod'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': postenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -926,39 +1011,6 @@ def atmanlprod(self):
 
         return task
 
-    @staticmethod
-    def _get_ufs_postproc_grps(cdump, config):
-
-        fhmin = config['FHMIN']
-        fhmax = config['FHMAX']
-        fhout = config['FHOUT']
-
-        # Get a list of all forecast hours
-        fhrs = []
-        if cdump in ['gdas']:
-            fhrs = range(fhmin, fhmax + fhout, fhout)
-        elif cdump in ['gfs']:
-            fhmax = np.max(
-                [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']])
-            fhout = config['FHOUT_GFS']
-            fhmax_hf = config['FHMAX_HF_GFS']
-            fhout_hf = config['FHOUT_HF_GFS']
-            fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf)
-            fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout))
-
-        nfhrs_per_grp = config.get('NFHRS_PER_GROUP', 1)
-        ngrps = len(fhrs) // nfhrs_per_grp if len(fhrs) % nfhrs_per_grp == 0 else len(fhrs) // nfhrs_per_grp + 1
-
-        fhrs = [f'f{fhr:03d}' for fhr in fhrs]
-        fhrs = np.array_split(fhrs, ngrps)
-        fhrs = [fhr.tolist() for fhr in fhrs]
-
-        grp = ' '.join(f'_{fhr[0]}-{fhr[-1]}' if len(fhr) > 1 else f'_{fhr[0]}' for fhr in fhrs)
-        dep = ' '.join([fhr[-1] for fhr in fhrs])
-        lst = ' '.join(['_'.join(fhr) for fhr in fhrs])
-
-        return grp, dep, lst
-
     def atmupp(self):
         return self._upptask(upp_run='forecast', task_id='atmupp')
 
@@ -971,32 +1023,28 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"):
         if upp_run not in VALID_UPP_RUN:
             raise KeyError(f"{upp_run} is invalid; UPP_RUN options are: {('|').join(VALID_UPP_RUN)}")
 
-        varname1, varname2, varname3 = 'grp', 'dep', 'lst'
-        varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['upp'])
-        var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
-
         postenvars = self.envars.copy()
-        postenvar_dict = {'FHRLST': '#lst#',
+        postenvar_dict = {'FHR3': '#fhr#',
                           'UPP_RUN': upp_run}
         for key, value in postenvar_dict.items():
             postenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"])
         deps = []
-        data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm#dep#.nc'
+        data = f'{atm_hist_path}/{self.run}.t@Hz.atmf#fhr#.nc'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atm_hist_path}/{self.cdump}.t@Hz.sfc#dep#.nc'
+        data = f'{atm_hist_path}/{self.run}.t@Hz.sfcf#fhr#.nc'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt'
+        data = f'{atm_hist_path}/{self.run}.t@Hz.atm.logf#fhr#.txt'
         dep_dict = {'type': 'data', 'data': data, 'age': 60}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps, dep_condition='and')
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
+        cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
         resources = self.get_resource('upp')
 
-        task_name = f'{self.cdump}{task_id}#{varname1}#'
+        task_name = f'{self.run}{task_id}_f#fhr#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -1008,95 +1056,86 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"):
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}{task_id}',
+        fhrs = self._get_forecast_hours(self.run, self._configs['upp'])
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+        metatask_dict = {'task_name': f'{self.run}{task_id}',
                          'task_dict': task_dict,
-                         'var_dict': var_dict
+                         'var_dict': fhr_var_dict
                          }
 
         task = rocoto.create_task(metatask_dict)
 
         return task
 
-    def atmprod(self):
+    def atmos_prod(self):
+        return self._atmosoceaniceprod('atmos')
 
-        varname1, varname2, varname3 = 'grp', 'dep', 'lst'
-        varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['atmos_products'])
-        var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
+    def ocean_prod(self):
+        return self._atmosoceaniceprod('ocean')
 
-        postenvars = self.envars.copy()
-        postenvar_dict = {'FHRLST': '#lst#'}
-        for key, value in postenvar_dict.items():
-            postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+    def ice_prod(self):
+        return self._atmosoceaniceprod('ice')
 
-        atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"])
-        deps = []
-        data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2#dep#'
-        dep_dict = {'type': 'data', 'data': data, 'age': 120}
-        deps.append(rocoto.add_dependency(dep_dict))
-        dependencies = rocoto.create_dependency(dep=deps)
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
-        resources = self.get_resource('atmos_products')
+    def _atmosoceaniceprod(self, component: str):
 
-        task_name = f'{self.cdump}atmprod#{varname1}#'
-        task_dict = {'task_name': task_name,
-                     'resources': resources,
-                     'dependency': dependencies,
-                     'envars': postenvars,
-                     'cycledef': cycledef,
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
-                     'job_name': f'{self.pslot}_{task_name}_@H',
-                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
-                     'maxtries': '&MAXTRIES;'
-                     }
+        products_dict = {'atmos': {'config': 'atmos_products',
+                                   'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+                                   'history_file_tmpl': f'{self.run}.t@Hz.master.grb2f#fhr#'},
+                         'ocean': {'config': 'oceanice_products',
+                                   'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+                                   'history_file_tmpl': f'{self.run}.ocean.t@Hz.6hr_avg.f#fhr_next#.nc'},
+                         'ice': {'config': 'oceanice_products',
+                                 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+                                 'history_file_tmpl': f'{self.run}.ice.t@Hz.6hr_avg.f#fhr#.nc'}}
 
-        metatask_dict = {'task_name': f'{self.cdump}atmprod',
-                         'task_dict': task_dict,
-                         'var_dict': var_dict
-                         }
-
-        task = rocoto.create_task(metatask_dict)
-
-        return task
-
-    def ocnpost(self):
-
-        varname1, varname2, varname3 = 'grp', 'dep', 'lst'
-        varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['ocnpost'])
-        var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
+        component_dict = products_dict[component]
+        config = component_dict['config']
+        history_path_tmpl = component_dict['history_path_tmpl']
+        history_file_tmpl = component_dict['history_file_tmpl']
 
         postenvars = self.envars.copy()
-        postenvar_dict = {'FHRLST': '#lst#',
-                          'ROTDIR': self.rotdir}
+        postenvar_dict = {'FHR3': '#fhr#', 'COMPONENT': component}
         for key, value in postenvar_dict.items():
             postenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
+        history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl])
         deps = []
-        atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"])
-        data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt'
-        dep_dict = {'type': 'data', 'data': data}
+        data = f'{history_path}/{history_file_tmpl}'
+        dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
-        resources = self.get_resource('ocnpost')
+        dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')
+
+        cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
+        resources = self.get_resource(component_dict['config'])
 
-        task_name = f'{self.cdump}ocnpost#{varname1}#'
+        task_name = f'{self.run}{component}_prod_f#fhr#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': postenvars,
                      'cycledef': cycledef,
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/ocnpost.sh',
+                     'command': f"{self.HOMEgfs}/jobs/rocoto/{config}.sh",
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}ocnpost',
+        fhrs = self._get_forecast_hours(self.run, self._configs[config], component)
+
+        # ocean/ice components do not have fhr 0 as they are averaged output
+        if component in ['ocean', 'ice'] and 0 in fhrs:
+            fhrs.remove(0)
+
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+        if component in ['ocean']:
+            fhrs_next = fhrs[1:] + [fhrs[-1] + (fhrs[-1] - fhrs[-2])]
+            fhr_var_dict['fhr_next'] = ' '.join([f"{fhr:03d}" for fhr in fhrs_next])
+        metatask_dict = {'task_name': f'{self.run}{component}_prod',
                          'task_dict': task_dict,
-                         'var_dict': var_dict
-                         }
+                         'var_dict': fhr_var_dict}
 
         task = rocoto.create_task(metatask_dict)
 
@@ -1106,18 +1145,18 @@ def wavepostsbs(self):
         deps = []
         for wave_grid in self._configs['wavepostsbs']['waveGRD'].split():
             wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"])
-            data = f'{wave_hist_path}/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000'
+            data = f'{wave_hist_path}/{self.run}wave.out_grd.{wave_grid}.@Y@m@d.@H0000'
             dep_dict = {'type': 'data', 'data': data}
             deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('wavepostsbs')
-        task_name = f'{self.cdump}wavepostsbs'
+        task_name = f'{self.run}wavepostsbs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostsbs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1130,17 +1169,17 @@ def wavepostsbs(self):
 
     def wavepostbndpnt(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('wavepostbndpnt')
-        task_name = f'{self.cdump}wavepostbndpnt'
+        task_name = f'{self.run}wavepostbndpnt'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpnt.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1152,20 +1191,24 @@ def wavepostbndpnt(self):
         return task
 
     def wavepostbndpntbll(self):
+
+        # The wavepostbndpntbll job runs on forecast hours up to FHMAX_WAV_IBP
+        last_fhr = self._configs['wave']['FHMAX_WAV_IBP']
+
         deps = []
         atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"])
-        data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt'
+        data = f'{atmos_hist_path}/{self.run}.t@Hz.atm.logf{last_fhr:03d}.txt'
         dep_dict = {'type': 'data', 'data': data}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('wavepostbndpntbll')
-        task_name = f'{self.cdump}wavepostbndpntbll'
+        task_name = f'{self.run}wavepostbndpntbll'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpntbll.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1178,20 +1221,20 @@ def wavepostbndpntbll(self):
 
     def wavepostpnt(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_wave_bnd:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpntbll'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}wavepostbndpntbll'}
             deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('wavepostpnt')
-        task_name = f'{self.cdump}wavepostpnt'
+        task_name = f'{self.run}wavepostpnt'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostpnt.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1204,17 +1247,17 @@ def wavepostpnt(self):
 
     def wavegempak(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}wavepostsbs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('wavegempak')
-        task_name = f'{self.cdump}wavegempak'
+        task_name = f'{self.run}wavegempak'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/wavegempak.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1227,19 +1270,19 @@ def wavegempak(self):
 
     def waveawipsbulls(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}wavepostsbs'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}wavepostpnt'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('waveawipsbulls')
-        task_name = f'{self.cdump}waveawipsbulls'
+        task_name = f'{self.run}waveawipsbulls'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/waveawipsbulls.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1252,17 +1295,17 @@ def waveawipsbulls(self):
 
     def waveawipsgridded(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}wavepostsbs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('waveawipsgridded')
-        task_name = f'{self.cdump}waveawipsgridded'
+        task_name = f'{self.run}waveawipsgridded'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/waveawipsgridded.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1275,17 +1318,17 @@ def waveawipsgridded(self):
 
     def postsnd(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('postsnd')
-        task_name = f'{self.cdump}postsnd'
+        task_name = f'{self.run}postsnd'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/postsnd.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1298,15 +1341,15 @@ def postsnd(self):
 
     def fbwind(self):
 
-        atmos_prod_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_GRIB_GRID_TMPL"], {'RUN': self.cdump, 'GRID': '0p25'})
+        atmos_prod_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_GRIB_GRID_TMPL"], {'RUN': self.run, 'GRID': '0p25'})
         deps = []
-        data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f006'
+        data = f'{atmos_prod_path}/{self.run}.t@Hz.pgrb2.0p25.f006'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f012'
+        data = f'{atmos_prod_path}/{self.run}.t@Hz.pgrb2.0p25.f012'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
-        data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f024'
+        data = f'{atmos_prod_path}/{self.run}.t@Hz.pgrb2.0p25.f024'
         dep_dict = {'type': 'data', 'data': data, 'age': 120}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps, dep_condition='and')
@@ -1317,12 +1360,12 @@ def fbwind(self):
         # prematurely starting with partial files. Unfortunately, the
         # ability to "group" post would make this more convoluted than
         # it should be and not worth the complexity.
-        task_name = f'{self.cdump}fbwind'
+        task_name = f'{self.run}fbwind'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/fbwind.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1334,7 +1377,7 @@ def fbwind(self):
         return task
 
     @staticmethod
-    def _get_awipsgroups(cdump, config):
+    def _get_awipsgroups(run, config):
 
         fhmin = config['FHMIN']
         fhmax = config['FHMAX']
@@ -1342,11 +1385,10 @@ def _get_awipsgroups(cdump, config):
 
         # Get a list of all forecast hours
         fhrs = []
-        if cdump in ['gdas']:
+        if run in ['gdas']:
             fhrs = range(fhmin, fhmax + fhout, fhout)
-        elif cdump in ['gfs']:
-            fhmax = np.max(
-                [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']])
+        elif run in ['gfs']:
+            fhmax = config['FHMAX_GFS']
             fhout = config['FHOUT_GFS']
             fhmax_hf = config['FHMAX_HF_GFS']
             fhout_hf = config['FHOUT_HF_GFS']
@@ -1373,7 +1415,7 @@ def _get_awipsgroups(cdump, config):
     def awips_20km_1p0deg(self):
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
@@ -1385,65 +1427,24 @@ def awips_20km_1p0deg(self):
             awipsenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         varname1, varname2, varname3 = 'grp', 'dep', 'lst'
-        varval1, varval2, varval3 = self._get_awipsgroups(self.cdump, self._configs['awips'])
+        varval1, varval2, varval3 = self._get_awipsgroups(self.run, self._configs['awips'])
         var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
 
         resources = self.get_resource('awips')
 
-        task_name = f'{self.cdump}awips_20km_1p0deg#{varname1}#'
+        task_name = f'{self.run}awips_20km_1p0deg#{varname1}#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': awipsenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/awips_20km_1p0deg.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}awips_20km_1p0deg',
-                         'task_dict': task_dict,
-                         'var_dict': var_dict
-                         }
-
-        task = rocoto.create_task(metatask_dict)
-
-        return task
-
-    def awips_g2(self):
-
-        deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
-        deps.append(rocoto.add_dependency(dep_dict))
-        dependencies = rocoto.create_dependency(dep=deps)
-
-        awipsenvars = self.envars.copy()
-        awipsenvar_dict = {'FHRGRP': '#grp#',
-                           'FHRLST': '#lst#',
-                           'ROTDIR': self.rotdir}
-        for key, value in awipsenvar_dict.items():
-            awipsenvars.append(rocoto.create_envar(name=key, value=str(value)))
-
-        varname1, varname2, varname3 = 'grp', 'dep', 'lst'
-        varval1, varval2, varval3 = self._get_awipsgroups(self.cdump, self._configs['awips'])
-        var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
-
-        resources = self.get_resource('awips')
-
-        task_name = f'{self.cdump}awips_g2#{varname1}#'
-        task_dict = {'task_name': task_name,
-                     'resources': resources,
-                     'dependency': dependencies,
-                     'envars': awipsenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/awips_g2.sh',
-                     'job_name': f'{self.pslot}_{task_name}_@H',
-                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
-                     'maxtries': '&MAXTRIES;'
-                     }
-
-        metatask_dict = {'task_name': f'{self.cdump}awips_g2',
+        metatask_dict = {'task_name': f'{self.run}awips_20km_1p0deg',
                          'task_dict': task_dict,
                          'var_dict': var_dict
                          }
@@ -1455,40 +1456,52 @@ def awips_g2(self):
     def gempak(self):
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmos_prod_f#fhr#'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
+        gempak_vars = self.envars.copy()
+        gempak_dict = {'FHR3': '#fhr#'}
+        for key, value in gempak_dict.items():
+            gempak_vars.append(rocoto.create_envar(name=key, value=str(value)))
+
         resources = self.get_resource('gempak')
-        task_name = f'{self.cdump}gempak'
+        task_name = f'{self.run}gempak_f#fhr#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
-                     'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'envars': gempak_vars,
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/gempak.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        task = rocoto.create_task(task_dict)
+        fhrs = self._get_forecast_hours(self.run, self._configs['gempak'])
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+        fhr_metatask_dict = {'task_name': f'{self.run}gempak',
+                             'task_dict': task_dict,
+                             'var_dict': fhr_var_dict}
+
+        task = rocoto.create_task(fhr_metatask_dict)
 
         return task
 
     def gempakmeta(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}gempak'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('gempak')
-        task_name = f'{self.cdump}gempakmeta'
+        task_name = f'{self.run}gempakmeta'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/gempakmeta.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1501,17 +1514,17 @@ def gempakmeta(self):
 
     def gempakmetancdc(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}gempak'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('gempak')
-        task_name = f'{self.cdump}gempakmetancdc'
+        task_name = f'{self.run}gempakmetancdc'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/gempakmetancdc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1524,17 +1537,17 @@ def gempakmetancdc(self):
 
     def gempakncdcupapgif(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}gempak'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('gempak')
-        task_name = f'{self.cdump}gempakncdcupapgif'
+        task_name = f'{self.run}gempakncdcupapgif'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/gempakncdcupapgif.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1547,42 +1560,65 @@ def gempakncdcupapgif(self):
 
     def gempakpgrb2spec(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}npoess_pgrb2_0p5deg'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}npoess_pgrb2_0p5deg'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
+        gempak_vars = self.envars.copy()
+        gempak_dict = {'FHR3': '#fhr#'}
+        for key, value in gempak_dict.items():
+            gempak_vars.append(rocoto.create_envar(name=key, value=str(value)))
+
         resources = self.get_resource('gempak')
-        task_name = f'{self.cdump}gempakgrb2spec'
+        task_name = f'{self.run}gempakgrb2spec_f#fhr#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
-                     'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'envars': gempak_vars,
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/gempakgrb2spec.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        task = rocoto.create_task(task_dict)
+        # Override forecast lengths locally to be that of gempak goes job
+        local_config = self._configs['gempak']
+        goes_times = {
+            'FHMAX_HF_GFS': 0,
+            'FHMAX_GFS': local_config['FHMAX_GOES'],
+            'FHOUT_GFS': local_config['FHOUT_GOES'],
+        }
+        local_config.update(goes_times)
+
+        fhrs = self._get_forecast_hours(self.run, local_config)
+        fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+        fhr_metatask_dict = {'task_name': f'{self.run}gempakgrb2spec',
+                             'task_dict': task_dict,
+                             'var_dict': fhr_var_dict}
+
+        task = rocoto.create_task(fhr_metatask_dict)
 
         return task
 
     def npoess_pgrb2_0p5deg(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmanlprod'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dependencies = rocoto.create_dependency(dep=deps)
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}goesupp'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep=deps, dep_condition='and')
 
         resources = self.get_resource('npoess')
-        task_name = f'{self.cdump}npoess_pgrb2_0p5deg'
+        task_name = f'{self.run}npoess_pgrb2_0p5deg'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/npoess_pgrb2_0p5deg.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/npoess.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -1594,17 +1630,17 @@ def npoess_pgrb2_0p5deg(self):
 
     def verfozn(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}analdiag'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('verfozn')
-        task_name = f'{self.cdump}verfozn'
+        task_name = f'{self.run}verfozn'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/verfozn.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1617,17 +1653,17 @@ def verfozn(self):
 
     def verfrad(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}analdiag'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('verfrad')
-        task_name = f'{self.cdump}verfrad'
+        task_name = f'{self.run}verfrad'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/verfrad.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1640,17 +1676,17 @@ def verfrad(self):
 
     def vminmon(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}anal'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('vminmon')
-        task_name = f'{self.cdump}vminmon'
+        task_name = f'{self.run}vminmon'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/vminmon.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1663,17 +1699,17 @@ def vminmon(self):
 
     def tracker(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('tracker')
-        task_name = f'{self.cdump}tracker'
+        task_name = f'{self.run}tracker'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/tracker.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1686,17 +1722,17 @@ def tracker(self):
 
     def genesis(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('genesis')
-        task_name = f'{self.cdump}genesis'
+        task_name = f'{self.run}genesis'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/genesis.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1709,17 +1745,17 @@ def genesis(self):
 
     def genesis_fsu(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('genesis_fsu')
-        task_name = f'{self.cdump}genesis_fsu'
+        task_name = f'{self.run}genesis_fsu'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/genesis_fsu.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1732,17 +1768,17 @@ def genesis_fsu(self):
 
     def fit2obs(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('fit2obs')
-        task_name = f'{self.cdump}fit2obs'
+        task_name = f'{self.run}fit2obs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/fit2obs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1755,13 +1791,14 @@ def fit2obs(self):
 
     def metp(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}arch'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         metpenvars = self.envars.copy()
         if self.app_config.mode in ['cycled']:
-            metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE_GFS').strftime("%Y%m%d%H")}
+            metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE_GFS').strftime("%Y%m%d%H"),
+                              'EDATE_GFS': self._base.get('EDATE_GFS').strftime("%Y%m%d%H")}
         elif self.app_config.mode in ['forecast-only']:
             metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE').strftime("%Y%m%d%H")}
         metpenvar_dict['METPCASE'] = '#metpcase#'
@@ -1774,19 +1811,19 @@ def metp(self):
 
         resources = self.get_resource('metp')
 
-        task_name = f'{self.cdump}metp#{varname1}#'
+        task_name = f'{self.run}metp#{varname1}#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': metpenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/metp.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}metp',
+        metatask_dict = {'task_name': f'{self.run}metp',
                          'task_dict': task_dict,
                          'var_dict': var_dict
                          }
@@ -1797,17 +1834,17 @@ def metp(self):
 
     def mos_stn_prep(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_stn_prep')
-        task_name = f'{self.cdump}mos_stn_prep'
+        task_name = f'{self.run}mos_stn_prep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_stn_prep.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1820,17 +1857,17 @@ def mos_stn_prep(self):
 
     def mos_grd_prep(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_grd_prep')
-        task_name = f'{self.cdump}mos_grd_prep'
+        task_name = f'{self.run}mos_grd_prep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_grd_prep.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1843,17 +1880,17 @@ def mos_grd_prep(self):
 
     def mos_ext_stn_prep(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_ext_stn_prep')
-        task_name = f'{self.cdump}mos_ext_stn_prep'
+        task_name = f'{self.run}mos_ext_stn_prep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_stn_prep.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1866,17 +1903,17 @@ def mos_ext_stn_prep(self):
 
     def mos_ext_grd_prep(self):
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_ext_grd_prep')
-        task_name = f'{self.cdump}mos_ext_grd_prep'
+        task_name = f'{self.run}mos_ext_grd_prep'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_grd_prep.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1889,17 +1926,17 @@ def mos_ext_grd_prep(self):
 
     def mos_stn_fcst(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_stn_fcst')
-        task_name = f'{self.cdump}mos_stn_fcst'
+        task_name = f'{self.run}mos_stn_fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_stn_fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1912,20 +1949,20 @@ def mos_stn_fcst(self):
 
     def mos_grd_fcst(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_grd_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_grd_fcst')
-        task_name = f'{self.cdump}mos_grd_fcst'
+        task_name = f'{self.run}mos_grd_fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_grd_fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1938,20 +1975,20 @@ def mos_grd_fcst(self):
 
     def mos_ext_stn_fcst(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_stn_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_ext_stn_fcst')
-        task_name = f'{self.cdump}mos_ext_stn_fcst'
+        task_name = f'{self.run}mos_ext_stn_fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_stn_fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1964,23 +2001,23 @@ def mos_ext_stn_fcst(self):
 
     def mos_ext_grd_fcst(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_stn_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_grd_prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_grd_fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_ext_grd_fcst')
-        task_name = f'{self.cdump}mos_ext_grd_fcst'
+        task_name = f'{self.run}mos_ext_grd_fcst'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_grd_fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -1993,17 +2030,17 @@ def mos_ext_grd_fcst(self):
 
     def mos_stn_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_stn_prdgen')
-        task_name = f'{self.cdump}mos_stn_prdgen'
+        task_name = f'{self.run}mos_stn_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_stn_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2016,20 +2053,20 @@ def mos_stn_prdgen(self):
 
     def mos_grd_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_grd_fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_grd_prdgen')
-        task_name = f'{self.cdump}mos_grd_prdgen'
+        task_name = f'{self.run}mos_grd_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_grd_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2042,20 +2079,20 @@ def mos_grd_prdgen(self):
 
     def mos_ext_stn_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_stn_fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_stn_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_ext_stn_prdgen')
-        task_name = f'{self.cdump}mos_ext_stn_prdgen'
+        task_name = f'{self.run}mos_ext_stn_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_stn_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2068,23 +2105,23 @@ def mos_ext_stn_prdgen(self):
 
     def mos_ext_grd_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_grd_fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_grd_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_stn_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_ext_grd_prdgen')
-        task_name = f'{self.cdump}mos_ext_grd_prdgen'
+        task_name = f'{self.run}mos_ext_grd_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_ext_grd_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2097,17 +2134,17 @@ def mos_ext_grd_prdgen(self):
 
     def mos_wx_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_grd_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('mos_wx_prdgen')
-        task_name = f'{self.cdump}mos_wx_prdgen'
+        task_name = f'{self.run}mos_wx_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_wx_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2120,20 +2157,20 @@ def mos_wx_prdgen(self):
 
     def mos_wx_ext_prdgen(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_ext_grd_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_wx_prdgen'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}mos_wx_prdgen'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('mos_wx_ext_prdgen')
-        task_name = f'{self.cdump}mos_wx_ext_prdgen'
+        task_name = f'{self.run}mos_wx_ext_prdgen'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/mos_wx_ext_prdgen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2146,79 +2183,75 @@ def mos_wx_ext_prdgen(self):
 
     def arch(self):
         deps = []
-        dependencies = []
         if self.app_config.mode in ['cycled']:
-            if self.cdump in ['gfs']:
-                dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'}
+            if self.run in ['gfs']:
+                dep_dict = {'type': 'task', 'name': f'{self.run}atmanlprod'}
                 deps.append(rocoto.add_dependency(dep_dict))
                 if self.app_config.do_vminmon:
-                    dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'}
+                    dep_dict = {'type': 'task', 'name': f'{self.run}vminmon'}
                     deps.append(rocoto.add_dependency(dep_dict))
-            elif self.cdump in ['gdas']:  # Block for handling half cycle dependencies
-                deps2 = []
-                dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'}
-                deps2.append(rocoto.add_dependency(dep_dict))
+            elif self.run in ['gdas']:
+                dep_dict = {'type': 'task', 'name': f'{self.run}atmanlprod'}
+                deps.append(rocoto.add_dependency(dep_dict))
                 if self.app_config.do_fit2obs:
-                    dep_dict = {'type': 'task', 'name': f'{self.cdump}fit2obs'}
-                    deps2.append(rocoto.add_dependency(dep_dict))
+                    dep_dict = {'type': 'task', 'name': f'{self.run}fit2obs'}
+                    deps.append(rocoto.add_dependency(dep_dict))
                 if self.app_config.do_verfozn:
-                    dep_dict = {'type': 'task', 'name': f'{self.cdump}verfozn'}
-                    deps2.append(rocoto.add_dependency(dep_dict))
+                    dep_dict = {'type': 'task', 'name': f'{self.run}verfozn'}
+                    deps.append(rocoto.add_dependency(dep_dict))
                 if self.app_config.do_verfrad:
-                    dep_dict = {'type': 'task', 'name': f'{self.cdump}verfrad'}
-                    deps2.append(rocoto.add_dependency(dep_dict))
+                    dep_dict = {'type': 'task', 'name': f'{self.run}verfrad'}
+                    deps.append(rocoto.add_dependency(dep_dict))
                 if self.app_config.do_vminmon:
-                    dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'}
-                    deps2.append(rocoto.add_dependency(dep_dict))
-                dependencies = rocoto.create_dependency(dep_condition='and', dep=deps2)
-                dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
-                dependencies.append(rocoto.add_dependency(dep_dict))
-                dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies)
-        if self.cdump in ['gfs'] and self.app_config.do_tracker:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}tracker'}
+                    dep_dict = {'type': 'task', 'name': f'{self.run}vminmon'}
+                    deps.append(rocoto.add_dependency(dep_dict))
+        if self.run in ['gfs'] and self.app_config.do_tracker:
+            dep_dict = {'type': 'task', 'name': f'{self.run}tracker'}
             deps.append(rocoto.add_dependency(dep_dict))
-        if self.cdump in ['gfs'] and self.app_config.do_genesis:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis'}
+        if self.run in ['gfs'] and self.app_config.do_genesis:
+            dep_dict = {'type': 'task', 'name': f'{self.run}genesis'}
             deps.append(rocoto.add_dependency(dep_dict))
-        if self.cdump in ['gfs'] and self.app_config.do_genesis_fsu:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis_fsu'}
+        if self.run in ['gfs'] and self.app_config.do_genesis_fsu:
+            dep_dict = {'type': 'task', 'name': f'{self.run}genesis_fsu'}
             deps.append(rocoto.add_dependency(dep_dict))
         # Post job dependencies
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}atmos_prod'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_wave:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}wavepostsbs'}
             deps.append(rocoto.add_dependency(dep_dict))
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}wavepostpnt'}
             deps.append(rocoto.add_dependency(dep_dict))
             if self.app_config.do_wave_bnd:
-                dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'}
+                dep_dict = {'type': 'task', 'name': f'{self.run}wavepostbndpnt'}
                 deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_ocean:
-            if self.app_config.mode in ['forecast-only']:  # TODO: fix ocnpost to run in cycled mode
-                dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'}
+            if self.run in ['gfs']:
+                dep_dict = {'type': 'metatask', 'name': f'{self.run}ocean_prod'}
+                deps.append(rocoto.add_dependency(dep_dict))
+        if self.app_config.do_ice:
+            if self.run in ['gfs']:
+                dep_dict = {'type': 'metatask', 'name': f'{self.run}ice_prod'}
                 deps.append(rocoto.add_dependency(dep_dict))
         # MOS job dependencies
-        if self.cdump in ['gfs'] and self.app_config.do_mos:
+        if self.run in ['gfs'] and self.app_config.do_mos:
             mos_jobs = ["stn_prep", "grd_prep", "ext_stn_prep", "ext_grd_prep",
                         "stn_fcst", "grd_fcst", "ext_stn_fcst", "ext_grd_fcst",
                         "stn_prdgen", "grd_prdgen", "ext_stn_prdgen", "ext_grd_prdgen",
                         "wx_prdgen", "wx_ext_prdgen"]
             for job in mos_jobs:
-                dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_{job}'}
+                dep_dict = {'type': 'task', 'name': f'{self.run}mos_{job}'}
                 deps.append(rocoto.add_dependency(dep_dict))
 
-        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps + dependencies)
-
-        cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('arch')
-        task_name = f'{self.cdump}arch'
+        task_name = f'{self.run}arch'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': cycledef,
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/arch.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2232,22 +2265,37 @@ def arch(self):
     # Cleanup
     def cleanup(self):
         deps = []
-        if 'enkf' in self.cdump:
-            dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eamn'}
+        if 'enkf' in self.run:
+            dep_dict = {'type': 'metatask', 'name': f'{self.run}eamn'}
             deps.append(rocoto.add_dependency(dep_dict))
         else:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}arch'}
             deps.append(rocoto.add_dependency(dep_dict))
 
+        if self.app_config.do_gempak:
+            if self.run in ['gdas']:
+                dep_dict = {'type': 'task', 'name': f'{self.run}gempakmetancdc'}
+                deps.append(rocoto.add_dependency(dep_dict))
+            elif self.run in ['gfs']:
+                dep_dict = {'type': 'task', 'name': f'{self.run}gempakmeta'}
+                deps.append(rocoto.add_dependency(dep_dict))
+                dep_dict = {'type': 'task', 'name': f'{self.run}gempakncdcupapgif'}
+                deps.append(rocoto.add_dependency(dep_dict))
+                if self.app_config.do_goes:
+                    dep_dict = {'type': 'metatask', 'name': f'{self.run}gempakgrb2spec'}
+                    deps.append(rocoto.add_dependency(dep_dict))
+                    dep_dict = {'type': 'task', 'name': f'{self.run}npoess_pgrb2_0p5deg'}
+                    deps.append(rocoto.add_dependency(dep_dict))
+
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('cleanup')
-        task_name = f'{self.cdump}cleanup'
+        task_name = f'{self.run}cleanup'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/cleanup.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2261,19 +2309,19 @@ def cleanup(self):
     # Start of ensemble tasks
     def eobs(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'}
+        dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prep'}
         deps.append(rocoto.add_dependency(dep_dict))
         dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('eobs')
-        task_name = f'{self.cdump}eobs'
+        task_name = f'{self.run}eobs'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/eobs.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2286,32 +2334,33 @@ def eobs(self):
 
     def eomg(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}eobs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         eomgenvars = self.envars.copy()
-        eomgenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))
-
-        groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['eobs']['NMEM_EOMGGRP'])
-
-        var_dict = {'grp': groups}
+        eomgenvars_dict = {'ENSMEM': '#member#',
+                           'MEMDIR': 'mem#member#'
+                           }
+        for key, value in eomgenvars_dict.items():
+            eomgenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
         resources = self.get_resource('eomg')
-        task_name = f'{self.cdump}eomg#grp#'
+        task_name = f'{self.run}eomg_mem#member#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': eomgenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/eomg.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}eomn',
-                         'var_dict': var_dict,
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(1, self.nmem + 1)])}
+        metatask_dict = {'task_name': f'{self.run}eomg',
+                         'var_dict': member_var_dict,
                          'task_dict': task_dict,
                          }
 
@@ -2321,17 +2370,17 @@ def eomg(self):
 
     def ediag(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}eobs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('ediag')
-        task_name = f'{self.cdump}ediag'
+        task_name = f'{self.run}ediag'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ediag.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2345,19 +2394,19 @@ def ediag(self):
     def eupd(self):
         deps = []
         if self.app_config.lobsdiag_forenkf:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}ediag'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}ediag'}
         else:
-            dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eomn'}
+            dep_dict = {'type': 'metatask', 'name': f'{self.run}eomg'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('eupd')
-        task_name = f'{self.cdump}eupd'
+        task_name = f'{self.run}eupd'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/eupd.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2370,7 +2419,7 @@ def eupd(self):
 
     def atmensanlinit(self):
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prepatmiodaobs'}
+        dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepatmiodaobs'}
         deps.append(rocoto.add_dependency(dep_dict))
         dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
@@ -2378,7 +2427,7 @@ def atmensanlinit(self):
 
         cycledef = "gdas"
         resources = self.get_resource('atmensanlinit')
-        task_name = f'{self.cdump}atmensanlinit'
+        task_name = f'{self.run}atmensanlinit'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -2394,23 +2443,49 @@ def atmensanlinit(self):
 
         return task
 
-    def atmensanlrun(self):
+    def atmensanlletkf(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlinit'}
         deps.append(rocoto.add_dependency(dep_dict))
         dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
-        resources = self.get_resource('atmensanlrun')
-        task_name = f'{self.cdump}atmensanlrun'
+        resources = self.get_resource('atmensanlletkf')
+        task_name = f'{self.run}atmensanlletkf'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlrun.sh',
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlletkf.sh',
+                     'job_name': f'{self.pslot}_{task_name}_@H',
+                     'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+                     'maxtries': '&MAXTRIES;'
+                     }
+
+        task = rocoto.create_task(task_dict)
+
+        return task
+
+    def atmensanlfv3inc(self):
+
+        deps = []
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlletkf'}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+        deps.append(rocoto.add_dependency(dep_dict))
+        dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+        resources = self.get_resource('atmensanlfv3inc')
+        task_name = f'{self.run}atmensanlfv3inc'
+        task_dict = {'task_name': task_name,
+                     'resources': resources,
+                     'dependency': dependencies,
+                     'envars': self.envars,
+                     'cycledef': self.run.replace('enkf', ''),
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlfv3inc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
@@ -2423,17 +2498,17 @@ def atmensanlrun(self):
     def atmensanlfinal(self):
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfv3inc'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         resources = self.get_resource('atmensanlfinal')
-        task_name = f'{self.cdump}atmensanlfinal'
+        task_name = f'{self.run}atmensanlfinal'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlfinal.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2470,12 +2545,12 @@ def _get_ecengroups():
             return grp, dep, lst
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'}
+        dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_jediatmens:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'}
         else:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}eupd'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
@@ -2491,19 +2566,19 @@ def _get_ecengroups():
 
         resources = self.get_resource('ecen')
 
-        task_name = f'{self.cdump}ecen#{varname1}#'
+        task_name = f'{self.run}ecen#{varname1}#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': ecenenvars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/ecen.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}ecmn',
+        metatask_dict = {'task_name': f'{self.run}ecmn',
                          'var_dict': var_dict,
                          'task_dict': task_dict
                          }
@@ -2513,25 +2588,25 @@ def _get_ecengroups():
 
     def esfc(self):
 
-        # eupd_cdump = 'gdas' if 'gdas' in self.app_config.eupd_cdumps else 'gfs'
+        # eupd_run = 'gdas' if 'gdas' in self.app_config.eupd_runs else 'gfs'
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'}
+        dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'}
         deps.append(rocoto.add_dependency(dep_dict))
         if self.app_config.do_jediatmens:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'}
         else:
-            dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'}
+            dep_dict = {'type': 'task', 'name': f'{self.run}eupd'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
         resources = self.get_resource('esfc')
-        task_name = f'{self.cdump}esfc'
+        task_name = f'{self.run}esfc'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': self.envars,
-                     'cycledef': self.cdump.replace('enkf', ''),
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/esfc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
@@ -2545,9 +2620,9 @@ def esfc(self):
     def efcs(self):
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ecmn'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}ecmn'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}esfc'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}esfc'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
         dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
@@ -2555,31 +2630,30 @@ def efcs(self):
         dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies)
 
         efcsenvars = self.envars.copy()
-        efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))
+        efcsenvars_dict = {'ENSMEM': '#member#',
+                           'MEMDIR': 'mem#member#'
+                           }
+        for key, value in efcsenvars_dict.items():
+            efcsenvars.append(rocoto.create_envar(name=key, value=str(value)))
 
-        groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP'])
-
-        if self.cdump == "enkfgfs":
-            groups = self._get_hybgroups(self._base['NMEM_ENS_GFS'], self._configs['efcs']['NMEM_EFCSGRP_GFS'])
-        cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '')
+        cycledef = 'gdas_half,gdas' if self.run in ['enkfgdas'] else self.run.replace('enkf', '')
         resources = self.get_resource('efcs')
 
-        var_dict = {'grp': groups}
-
-        task_name = f'{self.cdump}efcs#grp#'
+        task_name = f'{self.run}fcst_mem#member#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': efcsenvars,
                      'cycledef': cycledef,
-                     'command': f'{self.HOMEgfs}/jobs/rocoto/efcs.sh',
+                     'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}efmn',
-                         'var_dict': var_dict,
+        member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(1, self.nmem + 1)])}
+        metatask_dict = {'task_name': f'{self.run}fcst',
+                         'var_dict': member_var_dict,
                          'task_dict': task_dict
                          }
 
@@ -2589,19 +2663,19 @@ def efcs(self):
 
     def echgres(self):
 
-        self._is_this_a_gdas_task(self.cdump, 'echgres')
+        self._is_this_a_gdas_task(self.run, 'echgres')
 
         deps = []
-        dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}fcst'}
+        dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
-        dep_dict = {'type': 'task', 'name': f'{self.cdump}efcs01'}
+        dep_dict = {'type': 'task', 'name': f'{self.run}fcst_mem001'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
 
-        cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump
+        cycledef = 'gdas_half,gdas' if self.run in ['enkfgdas'] else self.run
 
         resources = self.get_resource('echgres')
-        task_name = f'{self.cdump}echgres'
+        task_name = f'{self.run}echgres'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -2623,7 +2697,7 @@ def _get_eposgroups(epos):
             fhmin = epos['FHMIN_ENKF']
             fhmax = epos['FHMAX_ENKF']
             fhout = epos['FHOUT_ENKF']
-            if self.cdump == "enkfgfs":
+            if self.run == "enkfgfs":
                 fhmax = epos['FHMAX_ENKF_GFS']
                 fhout = epos['FHOUT_ENKF_GFS']
             fhrs = range(fhmin, fhmax + fhout, fhout)
@@ -2642,7 +2716,7 @@ def _get_eposgroups(epos):
             return grp, dep, lst
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}efmn'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
@@ -2656,11 +2730,11 @@ def _get_eposgroups(epos):
         varval1, varval2, varval3 = _get_eposgroups(self._configs['epos'])
         var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
 
-        cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '')
+        cycledef = 'gdas_half,gdas' if self.run in ['enkfgdas'] else self.run.replace('enkf', '')
 
         resources = self.get_resource('epos')
 
-        task_name = f'{self.cdump}epos#{varname1}#'
+        task_name = f'{self.run}epos#{varname1}#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
@@ -2672,7 +2746,7 @@ def _get_eposgroups(epos):
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}epmn',
+        metatask_dict = {'task_name': f'{self.run}epmn',
                          'var_dict': var_dict,
                          'task_dict': task_dict
                          }
@@ -2684,34 +2758,34 @@ def _get_eposgroups(epos):
     def earc(self):
 
         deps = []
-        dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn'}
+        dep_dict = {'type': 'metatask', 'name': f'{self.run}epmn'}
         deps.append(rocoto.add_dependency(dep_dict))
         dependencies = rocoto.create_dependency(dep=deps)
 
         earcenvars = self.envars.copy()
         earcenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))
 
-        groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['earc']['NMEM_EARCGRP'], start_index=0)
-
-        cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '')
+        # Integer division is floor division, but we need ceiling division
+        n_groups = -(self.nmem // -self._configs['earc']['NMEM_EARCGRP'])
+        groups = ' '.join([f'{grp:02d}' for grp in range(0, n_groups + 1)])
 
         resources = self.get_resource('earc')
 
         var_dict = {'grp': groups}
 
-        task_name = f'{self.cdump}earc#grp#'
+        task_name = f'{self.run}earc#grp#'
         task_dict = {'task_name': task_name,
                      'resources': resources,
                      'dependency': dependencies,
                      'envars': earcenvars,
-                     'cycledef': cycledef,
+                     'cycledef': self.run.replace('enkf', ''),
                      'command': f'{self.HOMEgfs}/jobs/rocoto/earc.sh',
                      'job_name': f'{self.pslot}_{task_name}_@H',
                      'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
                      'maxtries': '&MAXTRIES;'
                      }
 
-        metatask_dict = {'task_name': f'{self.cdump}eamn',
+        metatask_dict = {'task_name': f'{self.run}eamn',
                          'var_dict': var_dict,
                          'task_dict': task_dict
                          }
diff --git a/workflow/rocoto/rocoto.py b/workflow/rocoto/rocoto.py
index 679c0952ed..0abb56cafb 100644
--- a/workflow/rocoto/rocoto.py
+++ b/workflow/rocoto/rocoto.py
@@ -8,6 +8,7 @@
 
     ABOUT:
         Helper module to create tasks, metatasks, and dependencies for Rocoto
+        Rocoto documentation is available at https://christopherwharrop.github.io/rocoto
 '''
 
 __all__ = ['create_task',
@@ -182,7 +183,8 @@ def add_dependency(dep_dict: Dict[str, Any]) -> str:
                'data': _add_data_tag,
                'cycleexist': _add_cycle_tag,
                'streq': _add_streq_tag,
-               'strneq': _add_streq_tag}
+               'strneq': _add_streq_tag,
+               'sh': _add_sh_tag}
 
     dep_condition = dep_dict.get('condition', None)
     dep_type = dep_dict.get('type', None)
@@ -333,6 +335,31 @@ def _add_streq_tag(dep_dict: Dict[str, Any]) -> str:
     return string
 
 
+def _add_sh_tag(dep_dict: Dict[str, Any]) -> str:
+    """
+    create a simple shell execution tag
+    :param: dep_dict: shell command to execute
+    :type dep_dict: dict
+    :return: Rocoto simple shell execution dependency
+    :rtype: str
+    """
+
+    shell = dep_dict.get('shell', '/bin/sh')
+    command = dep_dict.get('command', 'echo "Hello World"')
+
+    if '@' in command:
+        offset_string_b = f'<cyclestr>'
+        offset_string_e = '</cyclestr>'
+    else:
+        offset_string_b = ''
+        offset_string_e = ''
+    cmd = f'{offset_string_b}{command}{offset_string_e}'
+
+    string = f'<sh shell="{shell}">{cmd}</sh>'
+
+    return string
+
+
 def _traverse(o, tree_types=(list, tuple)):
     """
     Traverse through a list of lists or tuples and yield the value
diff --git a/workflow/rocoto/tasks_emc.py b/workflow/rocoto/tasks_emc.py
index 1c79de0c19..353d2aa943 100644
--- a/workflow/rocoto/tasks_emc.py
+++ b/workflow/rocoto/tasks_emc.py
@@ -1,9 +1,11 @@
 #!/usr/bin/env python3
 
+import copy
 import numpy as np
 from applications.applications import AppConfig
 import rocoto.rocoto as rocoto
 from wxflow import Template, TemplateConstants, to_timedelta
+from typing import List
 
 __all__ = ['Tasks']
 
@@ -12,21 +14,21 @@ class Tasks:
     SERVICE_TASKS = ['arch', 'earc']
     VALID_TASKS = ['aerosol_init', 'stage_ic',
                    'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup",
-                   'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal',
+                   'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal',
                    'prepoceanobs',
-                   'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',
+                   'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',
                    'earc', 'ecen', 'echgres', 'ediag', 'efcs',
                    'eobs', 'eomg', 'epos', 'esfc', 'eupd',
-                   'atmensanlinit', 'atmensanlrun', 'atmensanlfinal',
+                   'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal',
                    'aeroanlinit', 'aeroanlrun', 'aeroanlfinal',
-                   'preplandobs', 'landanl',
+                   'prepsnowobs', 'snowanl',
                    'fcst',
-                   'atmanlupp', 'atmanlprod', 'atmupp', 'atmprod', 'goesupp',
-                   'ocnpost',
+                   'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp',
+                   'atmos_prod', 'ocean_prod', 'ice_prod',
                    'verfozn', 'verfrad', 'vminmon',
                    'metp',
                    'tracker', 'genesis', 'genesis_fsu',
-                   'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwind',
+                   'postsnd', 'awips_20km_1p0deg', 'fbwind',
                    'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg'
                    'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit',
                    'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep',
@@ -35,32 +37,43 @@ class Tasks:
                    'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst',
                    'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', 'mos_wx_prdgen', 'mos_wx_ext_prdgen']
 
-    def __init__(self, app_config: AppConfig, cdump: str) -> None:
+    def __init__(self, app_config: AppConfig, run: str) -> None:
 
-        self.app_config = app_config
-        self.cdump = cdump
+        self.app_config = copy.deepcopy(app_config)
+        self.run = run
+        # Re-source the configs with RUN specified
+        print(f"Source configs with RUN={run}")
+        self._configs = self.app_config.source_configs(run=run, log=False)
 
+        # Update the base config for the application
+        self._configs['base'] = self.app_config.update_base(self._configs['base'])
         # Save dict_configs and base in the internal state (never know where it may be needed)
-        self._configs = self.app_config.configs
         self._base = self._configs['base']
+
         self.HOMEgfs = self._base['HOMEgfs']
         self.rotdir = self._base['ROTDIR']
         self.pslot = self._base['PSLOT']
+        if self.run == "enkfgfs":
+            self.nmem = int(self._base['NMEM_ENS_GFS'])
+        else:
+            self.nmem = int(self._base['NMEM_ENS'])
         self._base['cycle_interval'] = to_timedelta(f'{self._base["assim_freq"]}H')
 
         self.n_tiles = 6  # TODO - this needs to be elsewhere
 
+        # DATAROOT is set by prod_envir in ops.  Here, we use `STMP` to construct DATAROOT
+        dataroot_str = f"{self._base.get('STMP')}/RUNDIRS/{self._base.get('PSLOT')}/{self.run}.<cyclestr>@Y@m@d@H</cyclestr>"
         envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'),
                       'HOMEgfs': self.HOMEgfs,
                       'EXPDIR': self._base.get('EXPDIR'),
                       'NET': self._base.get('NET'),
-                      'CDUMP': self.cdump,
-                      'RUN': self.cdump,
+                      'RUN': self.run,
                       'CDATE': '<cyclestr>@Y@m@d@H</cyclestr>',
                       'PDY': '<cyclestr>@Y@m@d</cyclestr>',
                       'cyc': '<cyclestr>@H</cyclestr>',
                       'COMROOT': self._base.get('COMROOT'),
-                      'DATAROOT': self._base.get('DATAROOT')}
+                      'DATAROOT': dataroot_str}
+
         self.envars = self._set_envars(envar_dict)
 
     @staticmethod
@@ -72,12 +85,6 @@ def _set_envars(envar_dict) -> list:
 
         return envars
 
-    @staticmethod
-    def _get_hybgroups(nens: int, nmem_per_group: int, start_index: int = 1):
-        ngrps = nens / nmem_per_group
-        groups = ' '.join([f'{x:02d}' for x in range(start_index, int(ngrps) + 1)])
-        return groups
-
     def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str:
         '''
         Takes a string templated with ${ } and converts it into a string suitable
@@ -87,8 +94,8 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
 
           Variables substitued by default:
             ${ROTDIR} -> '&ROTDIR;'
-            ${RUN}    -> self.cdump
-            ${DUMP}   -> self.cdump
+            ${RUN}    -> self.run
+            ${DUMP}   -> self.run
             ${MEMDIR} -> ''
             ${YMD}    -> '@Y@m@d'
             ${HH}     -> '@H'
@@ -110,8 +117,8 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
         # Defaults
         rocoto_conversion_dict = {
             'ROTDIR': '&ROTDIR;',
-            'RUN': self.cdump,
-            'DUMP': self.cdump,
+            'RUN': self.run,
+            'DUMP': self.run,
             'MEMDIR': '',
             'YMD': '@Y@m@d',
             'HH': '@H'
@@ -123,12 +130,49 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
                                              TemplateConstants.DOLLAR_CURLY_BRACE,
                                              rocoto_conversion_dict.get)
 
+    @staticmethod
+    def _get_forecast_hours(run, config, component='atmos') -> List[str]:
+        # Make a local copy of the config to avoid modifying the original
+        local_config = config.copy()
+
+        # Ocean/Ice components do not have a HF output option like the atmosphere
+        if component in ['ocean', 'ice']:
+            local_config['FHMAX_HF_GFS'] = 0
+
+        if component in ['ocean']:
+            local_config['FHOUT_HF_GFS'] = config['FHOUT_OCN_GFS']
+            local_config['FHOUT_GFS'] = config['FHOUT_OCN_GFS']
+            local_config['FHOUT'] = config['FHOUT_OCN']
+
+        if component in ['ice']:
+            local_config['FHOUT_HF_GFS'] = config['FHOUT_ICE_GFS']
+            local_config['FHOUT_GFS'] = config['FHOUT_ICE_GFS']
+            local_config['FHOUT'] = config['FHOUT_ICE']
+
+        fhmin = local_config['FHMIN']
+
+        # Get a list of all forecast hours
+        fhrs = []
+        if run in ['gdas']:
+            fhmax = local_config['FHMAX']
+            fhout = local_config['FHOUT']
+            fhrs = list(range(fhmin, fhmax + fhout, fhout))
+        elif run in ['gfs', 'gefs']:
+            fhmax = local_config['FHMAX_GFS']
+            fhout = local_config['FHOUT_GFS']
+            fhmax_hf = local_config['FHMAX_HF_GFS']
+            fhout_hf = local_config['FHOUT_HF_GFS']
+            fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf)
+            fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout))
+
+        return fhrs
+
     def get_resource(self, task_name):
         """
         Given a task name (task_name) and its configuration (task_names),
         return a dictionary of resources (task_resource) used by the task.
         Task resource dictionary includes:
-        account, walltime, cores, nodes, ppn, threads, memory, queue, partition, native
+        account, walltime, ntasks, nodes, ppn, threads, memory, queue, partition, native
         """
 
         scheduler = self.app_config.scheduler
@@ -137,32 +181,28 @@ def get_resource(self, task_name):
 
         account = task_config['ACCOUNT']
 
-        walltime = task_config[f'wtime_{task_name}']
-        if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys():
-            walltime = task_config[f'wtime_{task_name}_gfs']
+        walltime = task_config[f'walltime']
+        ntasks = task_config[f'ntasks']
+        ppn = task_config[f'tasks_per_node']
 
-        cores = task_config[f'npe_{task_name}']
-        if self.cdump in ['gfs'] and f'npe_{task_name}_gfs' in task_config.keys():
-            cores = task_config[f'npe_{task_name}_gfs']
+        nodes = int(np.ceil(float(ntasks) / float(ppn)))
 
-        ppn = task_config[f'npe_node_{task_name}']
-        if self.cdump in ['gfs'] and f'npe_node_{task_name}_gfs' in task_config.keys():
-            ppn = task_config[f'npe_node_{task_name}_gfs']
+        threads = task_config[f'threads_per_task']
 
-        nodes = int(np.ceil(float(cores) / float(ppn)))
+        # Memory is not required
+        memory = task_config.get(f'memory', None)
 
-        threads = task_config[f'nth_{task_name}']
-        if self.cdump in ['gfs'] and f'nth_{task_name}_gfs' in task_config.keys():
-            threads = task_config[f'nth_{task_name}_gfs']
-
-        memory = task_config.get(f'memory_{task_name}', None)
         if scheduler in ['pbspro']:
             if task_config.get('prepost', False):
                 memory += ':prepost=true'
 
         native = None
         if scheduler in ['pbspro']:
-            native = '-l debug=true,place=vscatter'
+            # Set place=vscatter by default and debug=true if DEBUG_POSTSCRIPT="YES"
+            if self._base['DEBUG_POSTSCRIPT']:
+                native = '-l debug=true,place=vscatter'
+            else:
+                native = '-l place=vscatter'
             # Set either exclusive or shared - default on WCOSS2 is exclusive when not set
             if task_config.get('is_exclusive', False):
                 native += ':exclhost'
@@ -170,6 +210,10 @@ def get_resource(self, task_name):
                 native += ':shared'
         elif scheduler in ['slurm']:
             native = '--export=NONE'
+            if task_config['RESERVATION'] != "":
+                native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION']
+            if task_config.get('CLUSTERS', "") not in ["", '@CLUSTERS@']:
+                native += ' --clusters=' + task_config['CLUSTERS']
 
         queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE']
 
@@ -181,7 +225,7 @@ def get_resource(self, task_name):
         task_resource = {'account': account,
                          'walltime': walltime,
                          'nodes': nodes,
-                         'cores': cores,
+                         'ntasks': ntasks,
                          'ppn': ppn,
                          'threads': threads,
                          'memory': memory,
diff --git a/workflow/rocoto/tasks_gsl.py b/workflow/rocoto/tasks_gsl.py
index 371721bbb9..3ebd2d9437 100644
--- a/workflow/rocoto/tasks_gsl.py
+++ b/workflow/rocoto/tasks_gsl.py
@@ -1,9 +1,11 @@
 #!/usr/bin/env python3
 
+import copy
 import numpy as np
 from applications.applications import AppConfig
 import rocoto.rocoto as rocoto
 from wxflow import Template, TemplateConstants, to_timedelta
+from typing import List
 
 __all__ = ['Tasks']
 
@@ -12,21 +14,21 @@ class Tasks:
     SERVICE_TASKS = ['arch', 'earc']
     VALID_TASKS = ['aerosol_init', 'stage_ic',
                    'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup",
-                   'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal',
+                   'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal',
                    'prepoceanobs',
-                   'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',
+                   'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',
                    'earc', 'ecen', 'echgres', 'ediag', 'efcs',
                    'eobs', 'eomg', 'epos', 'esfc', 'eupd',
-                   'atmensanlinit', 'atmensanlrun', 'atmensanlfinal',
+                   'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal',
                    'aeroanlinit', 'aeroanlrun', 'aeroanlfinal',
-                   'preplandobs', 'landanl',
+                   'prepsnowobs', 'snowanl',
                    'fcst',
-                   'atmanlupp', 'atmanlprod', 'atmupp', 'atmprod', 'goesupp',
-                   'ocnpost',
+                   'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp',
+                   'atmos_prod', 'ocean_prod', 'ice_prod',
                    'verfozn', 'verfrad', 'vminmon',
                    'metp',
                    'tracker', 'genesis', 'genesis_fsu',
-                   'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwind',
+                   'postsnd', 'awips_20km_1p0deg', 'fbwind',
                    'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg'
                    'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit',
                    'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep',
@@ -35,33 +37,45 @@ class Tasks:
                    'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst',
                    'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', 'mos_wx_prdgen', 'mos_wx_ext_prdgen']
 
-    def __init__(self, app_config: AppConfig, cdump: str) -> None:
+    def __init__(self, app_config: AppConfig, run: str) -> None:
 
-        self.app_config = app_config
-        self.cdump = cdump
+        self.app_config = copy.deepcopy(app_config)
+        self.run = run
+        # Re-source the configs with RUN specified
+        print(f"Source configs with RUN={run}")
+        self._configs = self.app_config.source_configs(run=run, log=False)
 
+        # Update the base config for the application
+        self._configs['base'] = self.app_config.update_base(self._configs['base'])
         # Save dict_configs and base in the internal state (never know where it may be needed)
-        self._configs = self.app_config.configs
         self._base = self._configs['base']
+
         self.HOMEgfs = self._base['HOMEgfs']
         self.rotdir = self._base['ROTDIR']
         self.pslot = self._base['PSLOT']
+        if self.run == "enkfgfs":
+            self.nmem = int(self._base['NMEM_ENS_GFS'])
+        else:
+            self.nmem = int(self._base['NMEM_ENS'])
         self._base['cycle_interval'] = to_timedelta(f'{self._base["assim_freq"]}H')
 
         self.n_tiles = 6  # TODO - this needs to be elsewhere
 
+        # DATAROOT is set by prod_envir in ops.  Here, we use `STMP` to construct DATAROOT
+        dataroot_str = f"{self._base.get('STMP')}/RUNDIRS/{self._base.get('PSLOT')}/{self.run}.<cyclestr>@Y@m@d@H</cyclestr>"
         envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'),
                       'HOMEgfs': self.HOMEgfs,
                       'EXPDIR': self._base.get('EXPDIR'),
+#JKH
                       'ROTDIR': self._base.get('ROTDIR'),
                       'NET': self._base.get('NET'),
-                      'CDUMP': self.cdump,
-                      'RUN': self.cdump,
+                      'RUN': self.run,
                       'CDATE': '<cyclestr>@Y@m@d@H</cyclestr>',
                       'PDY': '<cyclestr>@Y@m@d</cyclestr>',
                       'cyc': '<cyclestr>@H</cyclestr>',
                       'COMROOT': self._base.get('COMROOT'),
-                      'DATAROOT': self._base.get('DATAROOT')}
+                      'DATAROOT': dataroot_str}
+
         self.envars = self._set_envars(envar_dict)
 
     @staticmethod
@@ -73,12 +87,6 @@ def _set_envars(envar_dict) -> list:
 
         return envars
 
-    @staticmethod
-    def _get_hybgroups(nens: int, nmem_per_group: int, start_index: int = 1):
-        ngrps = nens / nmem_per_group
-        groups = ' '.join([f'{x:02d}' for x in range(start_index, int(ngrps) + 1)])
-        return groups
-
     def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str:
         '''
         Takes a string templated with ${ } and converts it into a string suitable
@@ -88,8 +96,8 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
 
           Variables substitued by default:
             ${ROTDIR} -> '&ROTDIR;'
-            ${RUN}    -> self.cdump
-            ${DUMP}   -> self.cdump
+            ${RUN}    -> self.run
+            ${DUMP}   -> self.run
             ${MEMDIR} -> ''
             ${YMD}    -> '@Y@m@d'
             ${HH}     -> '@H'
@@ -111,8 +119,8 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
         # Defaults
         rocoto_conversion_dict = {
             'ROTDIR': '&ROTDIR;',
-            'RUN': self.cdump,
-            'DUMP': self.cdump,
+            'RUN': self.run,
+            'DUMP': self.run,
             'MEMDIR': '',
             'YMD': '@Y@m@d',
             'HH': '@H'
@@ -124,12 +132,49 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) ->
                                              TemplateConstants.DOLLAR_CURLY_BRACE,
                                              rocoto_conversion_dict.get)
 
+    @staticmethod
+    def _get_forecast_hours(run, config, component='atmos') -> List[str]:
+        # Make a local copy of the config to avoid modifying the original
+        local_config = config.copy()
+
+        # Ocean/Ice components do not have a HF output option like the atmosphere
+        if component in ['ocean', 'ice']:
+            local_config['FHMAX_HF_GFS'] = 0
+
+        if component in ['ocean']:
+            local_config['FHOUT_HF_GFS'] = config['FHOUT_OCN_GFS']
+            local_config['FHOUT_GFS'] = config['FHOUT_OCN_GFS']
+            local_config['FHOUT'] = config['FHOUT_OCN']
+
+        if component in ['ice']:
+            local_config['FHOUT_HF_GFS'] = config['FHOUT_ICE_GFS']
+            local_config['FHOUT_GFS'] = config['FHOUT_ICE_GFS']
+            local_config['FHOUT'] = config['FHOUT_ICE']
+
+        fhmin = local_config['FHMIN']
+
+        # Get a list of all forecast hours
+        fhrs = []
+        if run in ['gdas']:
+            fhmax = local_config['FHMAX']
+            fhout = local_config['FHOUT']
+            fhrs = list(range(fhmin, fhmax + fhout, fhout))
+        elif run in ['gfs', 'gefs']:
+            fhmax = local_config['FHMAX_GFS']
+            fhout = local_config['FHOUT_GFS']
+            fhmax_hf = local_config['FHMAX_HF_GFS']
+            fhout_hf = local_config['FHOUT_HF_GFS']
+            fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf)
+            fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout))
+
+        return fhrs
+
     def get_resource(self, task_name):
         """
         Given a task name (task_name) and its configuration (task_names),
         return a dictionary of resources (task_resource) used by the task.
         Task resource dictionary includes:
-        account, walltime, cores, nodes, ppn, threads, memory, queue, partition, native
+        account, walltime, ntasks, nodes, ppn, threads, memory, queue, partition, native
         """
 
         scheduler = self.app_config.scheduler
@@ -138,39 +183,40 @@ def get_resource(self, task_name):
 
         account = task_config['ACCOUNT']
 
-        walltime = task_config[f'wtime_{task_name}']
-        if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys():
-            walltime = task_config[f'wtime_{task_name}_gfs']
+        walltime = task_config[f'walltime']
+        ntasks = task_config[f'ntasks']
+        ppn = task_config[f'tasks_per_node']
 
-        cores = task_config[f'npe_{task_name}']
-        if self.cdump in ['gfs'] and f'npe_{task_name}_gfs' in task_config.keys():
-            cores = task_config[f'npe_{task_name}_gfs']
+        nodes = int(np.ceil(float(ntasks) / float(ppn)))
 
-        ppn = task_config[f'npe_node_{task_name}']
-        if self.cdump in ['gfs'] and f'npe_node_{task_name}_gfs' in task_config.keys():
-            ppn = task_config[f'npe_node_{task_name}_gfs']
+        threads = task_config[f'threads_per_task']
 
-        nodes = int(np.ceil(float(cores) / float(ppn)))
+        # Memory is not required
+        memory = task_config.get(f'memory', None)
 
-        threads = task_config[f'nth_{task_name}']
-        if self.cdump in ['gfs'] and f'nth_{task_name}_gfs' in task_config.keys():
-            threads = task_config[f'nth_{task_name}_gfs']
-
-        memory = task_config.get(f'memory_{task_name}', None)
         if scheduler in ['pbspro']:
             if task_config.get('prepost', False):
                 memory += ':prepost=true'
 
         native = None
         if scheduler in ['pbspro']:
-            native = '-l debug=true,place=vscatter'
+            # Set place=vscatter by default and debug=true if DEBUG_POSTSCRIPT="YES"
+            if self._base['DEBUG_POSTSCRIPT']:
+                native = '-l debug=true,place=vscatter'
+            else:
+                native = '-l place=vscatter'
             # Set either exclusive or shared - default on WCOSS2 is exclusive when not set
             if task_config.get('is_exclusive', False):
                 native += ':exclhost'
             else:
                 native += ':shared'
         elif scheduler in ['slurm']:
+#JKH            native = '--export=NONE'
             native = '&NATIVE_STR;'
+            if task_config['RESERVATION'] != "":
+                native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION']
+            if task_config.get('CLUSTERS', "") not in ["", '@CLUSTERS@']:
+                native += ' --clusters=' + task_config['CLUSTERS']
 
         queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE']
 
@@ -182,7 +228,7 @@ def get_resource(self, task_name):
         task_resource = {'account': account,
                          'walltime': walltime,
                          'nodes': nodes,
-                         'cores': cores,
+                         'ntasks': ntasks,
                          'ppn': ppn,
                          'threads': threads,
                          'memory': memory,
diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py
index 84af898d36..78c31dba1b 100644
--- a/workflow/rocoto/workflow_tasks.py
+++ b/workflow/rocoto/workflow_tasks.py
@@ -14,10 +14,10 @@ def get_wf_tasks(app_config: AppConfig) -> List:
     """
 
     tasks = []
-    # Loop over all keys of cycles (CDUMP)
-    for cdump, cdump_tasks in app_config.task_names.items():
-        task_obj = tasks_factory.create(app_config.net, app_config, cdump)  # create Task object based on cdump
-        for task_name in cdump_tasks:
+    # Loop over all keys of cycles (RUN)
+    for run, run_tasks in app_config.task_names.items():
+        task_obj = tasks_factory.create(app_config.net, app_config, run)  # create Task object based on run
+        for task_name in run_tasks:
             tasks.append(task_obj.get_task(task_name))
 
     return tasks
diff --git a/workflow/rocoto_viewer.py b/workflow/rocoto_viewer.py
index 95dd9e76dd..459381f601 100755
--- a/workflow/rocoto_viewer.py
+++ b/workflow/rocoto_viewer.py
@@ -1360,7 +1360,7 @@ def main(screen):
         screen.refresh()
         curses.mousemask(1)
         curses.noecho()
-        for i in range(0, curses.COLORS):
+        for i in range(0, curses.COLORS - 1):
             curses.init_pair(i + 1, i, curses.COLOR_BLACK)
             if i == 4:
                 curses.init_pair(i + 1, i, curses.COLOR_WHITE)
diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py
index 7d7ac84aad..3e70df0f02 100755
--- a/workflow/setup_expt.py
+++ b/workflow/setup_expt.py
@@ -73,10 +73,10 @@ def fill_ROTDIR_cycled(host, inputs):
 
     # Test if we are using the new COM structure or the old flat one for ICs
     if inputs.start in ['warm']:
-        pathstr = os.path.join(inputs.icsdir, f'{inputs.cdump}.{rdatestr[:8]}',
+        pathstr = os.path.join(inputs.icsdir, f'{inputs.run}.{rdatestr[:8]}',
                                rdatestr[8:], 'model_data', 'atmos')
     else:
-        pathstr = os.path.join(inputs.icsdir, f'{inputs.cdump}.{idatestr[:8]}',
+        pathstr = os.path.join(inputs.icsdir, f'{inputs.run}.{idatestr[:8]}',
                                idatestr[8:], 'model_data', 'atmos')
 
     if os.path.isdir(pathstr):
@@ -96,6 +96,7 @@ def fill_ROTDIR_cycled(host, inputs):
     dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart')
     dst_ocn_anl_dir = os.path.join('analysis', 'ocean')
     dst_ice_rst_dir = os.path.join('model_data', 'ice', 'restart')
+    dst_ice_anl_dir = os.path.join('analysis', 'ice')
     dst_atm_anl_dir = os.path.join('analysis', 'atmos')
 
     if flat_structure:
@@ -111,6 +112,7 @@ def fill_ROTDIR_cycled(host, inputs):
         src_ocn_rst_dir = os.path.join('ocean', 'RESTART')
         src_ocn_anl_dir = 'ocean'
         src_ice_rst_dir = os.path.join('ice', 'RESTART')
+        src_ice_anl_dir = dst_ice_anl_dir
         src_atm_anl_dir = 'atmos'
     else:
         src_atm_dir = dst_atm_dir
@@ -118,6 +120,7 @@ def fill_ROTDIR_cycled(host, inputs):
         src_ocn_rst_dir = dst_ocn_rst_dir
         src_ocn_anl_dir = dst_ocn_anl_dir
         src_ice_rst_dir = dst_ice_rst_dir
+        src_ice_anl_dir = dst_ice_anl_dir
         src_atm_anl_dir = dst_atm_anl_dir
 
     def link_files_from_src_to_dst(src_dir, dst_dir):
@@ -129,8 +132,8 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
 
     # Link ensemble member initial conditions
     if inputs.nens > 0:
-        previous_cycle_dir = f'enkf{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}'
-        current_cycle_dir = f'enkf{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}'
+        previous_cycle_dir = f'enkf{inputs.run}.{rdatestr[:8]}/{rdatestr[8:]}'
+        current_cycle_dir = f'enkf{inputs.run}.{idatestr[:8]}/{idatestr[8:]}'
 
         for ii in range(1, inputs.nens + 1):
             memdir = f'mem{ii:03d}'
@@ -152,7 +155,7 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
                 link_files_from_src_to_dst(src_dir, dst_dir)
 
                 # First 1/2 cycle needs a MOM6 increment
-                incfile = f'enkf{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc'
+                incfile = f'enkf{inputs.run}.t{idatestr[8:]}z.ocninc.nc'
                 src_file = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_ocn_anl_dir, incfile)
                 dst_file = os.path.join(rotdir, current_cycle_dir, memdir, dst_ocn_anl_dir, incfile)
                 makedirs_if_missing(os.path.join(rotdir, current_cycle_dir, memdir, dst_ocn_anl_dir))
@@ -173,8 +176,8 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
                 link_files_from_src_to_dst(src_dir, dst_dir)
 
     # Link deterministic initial conditions
-    previous_cycle_dir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}'
-    current_cycle_dir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}'
+    previous_cycle_dir = f'{inputs.run}.{rdatestr[:8]}/{rdatestr[8:]}'
+    current_cycle_dir = f'{inputs.run}.{idatestr[:8]}/{idatestr[8:]}'
 
     # Link atmospheric files
     if inputs.start in ['warm']:
@@ -195,7 +198,7 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
         link_files_from_src_to_dst(src_dir, dst_dir)
 
         # First 1/2 cycle needs a MOM6 increment
-        incfile = f'{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc'
+        incfile = f'{inputs.run}.t{idatestr[8:]}z.ocninc.nc'
         src_file = os.path.join(inputs.icsdir, current_cycle_dir, src_ocn_anl_dir, incfile)
         dst_file = os.path.join(rotdir, current_cycle_dir, dst_ocn_anl_dir, incfile)
         makedirs_if_missing(os.path.join(rotdir, current_cycle_dir, dst_ocn_anl_dir))
@@ -203,8 +206,9 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
 
     # Link ice files
     if do_ice:
-        dst_dir = os.path.join(rotdir, previous_cycle_dir, dst_ice_rst_dir)
-        src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_ice_rst_dir)
+        # First 1/2 cycle needs a CICE6 analysis restart
+        src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_ice_anl_dir)
+        dst_dir = os.path.join(rotdir, current_cycle_dir, src_ice_anl_dir)
         makedirs_if_missing(dst_dir)
         link_files_from_src_to_dst(src_dir, dst_dir)
 
@@ -220,10 +224,29 @@ def link_files_from_src_to_dst(src_dir, dst_dir):
     dst_dir = os.path.join(rotdir, current_cycle_dir, dst_atm_anl_dir)
     makedirs_if_missing(dst_dir)
     for ftype in ['abias', 'abias_pc', 'abias_air', 'radstat']:
-        fname = f'{inputs.cdump}.t{idatestr[8:]}z.{ftype}'
+        fname = f'{inputs.run}.t{idatestr[8:]}z.{ftype}'
         src_file = os.path.join(src_dir, fname)
         if os.path.exists(src_file):
             os.symlink(src_file, os.path.join(dst_dir, fname))
+    # First 1/2 cycle also needs a atmos increment if doing warm start
+    if inputs.start in ['warm']:
+        for ftype in ['atmi003.nc', 'atminc.nc', 'atmi009.nc']:
+            fname = f'{inputs.run}.t{idatestr[8:]}z.{ftype}'
+            src_file = os.path.join(src_dir, fname)
+            if os.path.exists(src_file):
+                os.symlink(src_file, os.path.join(dst_dir, fname))
+        if inputs.nens > 0:
+            current_cycle_dir = f'enkf{inputs.run}.{idatestr[:8]}/{idatestr[8:]}'
+            for ii in range(1, inputs.nens + 1):
+                memdir = f'mem{ii:03d}'
+                src_dir = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_atm_anl_dir)
+                dst_dir = os.path.join(rotdir, current_cycle_dir, memdir, dst_atm_anl_dir)
+                makedirs_if_missing(dst_dir)
+                for ftype in ['ratmi003.nc', 'ratminc.nc', 'ratmi009.nc']:
+                    fname = f'enkf{inputs.run}.t{idatestr[8:]}z.{ftype}'
+                    src_file = os.path.join(src_dir, fname)
+                    if os.path.exists(src_file):
+                        os.symlink(src_file, os.path.join(dst_dir, fname))
 
     return
 
@@ -245,12 +268,6 @@ def fill_EXPDIR(inputs):
     expdir = os.path.join(inputs.expdir, inputs.pslot)
 
     configs = glob.glob(f'{configdir}/config.*')
-    exclude_configs = ['base', 'base.emc.dyn', 'base.nco.static', 'fv3.nco.static']
-    for exclude in exclude_configs:
-        try:
-            configs.remove(f'{configdir}/config.{exclude}')
-        except ValueError:
-            pass
     if len(configs) == 0:
         raise IOError(f'no config files found in {configdir}')
     for config in configs:
@@ -270,6 +287,8 @@ def _update_defaults(dict_in: dict) -> dict:
     data = AttrDict(host.info, **inputs.__dict__)
     data.HOMEgfs = _top
     yaml_path = inputs.yaml
+    if not os.path.exists(yaml_path):
+        raise IOError(f'YAML file does not exist, check path:' + yaml_path)
     yaml_dict = _update_defaults(AttrDict(parse_j2yaml(yaml_path, data)))
 
     # First update config.base
@@ -288,7 +307,8 @@ def _update_defaults(dict_in: dict) -> dict:
 
 def edit_baseconfig(host, inputs, yaml_dict):
     """
-    Parses and populates the templated `config.base.emc.dyn` to `config.base`
+    Parses and populates the templated `HOMEgfs/parm/config/<gfs|gefs>/config.base`
+    to `EXPDIR/pslot/config.base`
     """
 
     tmpl_dict = {
@@ -316,7 +336,8 @@ def edit_baseconfig(host, inputs, yaml_dict):
         "@EXP_WARM_START@": is_warm_start,
         "@MODE@": inputs.mode,
         "@gfs_cyc@": inputs.gfs_cyc,
-        "@APP@": inputs.app
+        "@APP@": inputs.app,
+        "@NMEM_ENS@": getattr(inputs, 'nens', 0)
     }
     tmpl_dict = dict(tmpl_dict, **extend_dict)
 
@@ -324,7 +345,6 @@ def edit_baseconfig(host, inputs, yaml_dict):
     if getattr(inputs, 'nens', 0) > 0:
         extend_dict = {
             "@CASEENS@": f'C{inputs.resensatmos}',
-            "@NMEM_ENS@": inputs.nens,
         }
         tmpl_dict = dict(tmpl_dict, **extend_dict)
 
@@ -340,7 +360,7 @@ def edit_baseconfig(host, inputs, yaml_dict):
     except KeyError:
         pass
 
-    base_input = f'{inputs.configdir}/config.base.emc.dyn'
+    base_input = f'{inputs.configdir}/config.base'
     base_output = f'{inputs.expdir}/{inputs.pslot}/config.base'
     edit_config(base_input, base_output, tmpl_dict)
 
@@ -383,7 +403,7 @@ def input_args(*argv):
     Method to collect user arguments for `setup_expt.py`
     """
 
-    ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW']
+    ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW', 'S2SWA']
 
     def _common_args(parser):
         parser.add_argument('--pslot', help='parallel experiment name',
@@ -399,12 +419,14 @@ def _common_args(parser):
         parser.add_argument('--idate', help='starting date of experiment, initial conditions must exist!',
                             required=True, type=lambda dd: to_datetime(dd))
         parser.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd))
+        parser.add_argument('--overwrite', help='overwrite previously created experiment (if it exists)',
+                            action='store_true', required=False)
         return parser
 
     def _gfs_args(parser):
         parser.add_argument('--start', help='restart mode: warm or cold', type=str,
                             choices=['warm', 'cold'], required=False, default='cold')
-        parser.add_argument('--cdump', help='CDUMP to start the experiment',
+        parser.add_argument('--run', help='RUN to start the experiment',
                             type=str, required=False, default='gdas')
         # --configdir is hidden from help
         parser.add_argument('--configdir', help=SUPPRESS, type=str, required=False, default=os.path.join(_top, 'parm/config/gfs'))
@@ -429,7 +451,7 @@ def _gfs_or_gefs_ensemble_args(parser):
 
     def _gfs_or_gefs_forecast_args(parser):
         parser.add_argument('--app', help='UFS application', type=str,
-                            choices=ufs_apps + ['S2SWA'], required=False, default='ATM')
+                            choices=ufs_apps, required=False, default='ATM')
         parser.add_argument('--gfs_cyc', help='Number of forecasts per day', type=int,
                             choices=[1, 2, 4], default=1, required=False)
         return parser
@@ -493,17 +515,19 @@ def _gefs_args(parser):
     return parser.parse_args(list(*argv) if len(argv) else None)
 
 
-def query_and_clean(dirname):
+def query_and_clean(dirname, force_clean=False):
     """
     Method to query if a directory exists and gather user input for further action
     """
 
     create_dir = True
     if os.path.exists(dirname):
-        print()
-        print(f'directory already exists in {dirname}')
-        print()
-        overwrite = input('Do you wish to over-write [y/N]: ')
+        print(f'\ndirectory already exists in {dirname}')
+        if force_clean:
+            overwrite = True
+            print(f'removing directory ........ {dirname}\n')
+        else:
+            overwrite = input('Do you wish to over-write [y/N]: ')
         create_dir = True if overwrite in [
             'y', 'yes', 'Y', 'YES'] else False
         if create_dir:
@@ -553,8 +577,8 @@ def main(*argv):
     rotdir = os.path.join(user_inputs.comroot, user_inputs.pslot)
     expdir = os.path.join(user_inputs.expdir, user_inputs.pslot)
 
-    create_rotdir = query_and_clean(rotdir)
-    create_expdir = query_and_clean(expdir)
+    create_rotdir = query_and_clean(rotdir, force_clean=user_inputs.overwrite)
+    create_expdir = query_and_clean(expdir, force_clean=user_inputs.overwrite)
 
     if create_rotdir:
         makedirs_if_missing(rotdir)
@@ -565,6 +589,11 @@ def main(*argv):
         fill_EXPDIR(user_inputs)
         update_configs(host, user_inputs)
 
+    print(f"*" * 100)
+    print(f'EXPDIR: {expdir}')
+    print(f'ROTDIR: {rotdir}')
+    print(f"*" * 100)
+
 
 if __name__ == '__main__':