diff --git a/Makefile b/Makefile index 714b86f105..0b2ebc77e8 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,8 @@ -MODEL_FORMULATION = +MODEL_FORMULATION = +ifneq "${MPAS_SHELL}" "" + SHELL = ${MPAS_SHELL} +endif dummy: ( $(MAKE) error ) @@ -30,7 +33,37 @@ xlf: "USE_PAPI = $(USE_PAPI)" \ "OPENMP = $(OPENMP)" \ "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI" ) - + +xlf-summit-omp-offload: + ( $(MAKE) all \ + "FC_PARALLEL = mpif90" \ + "CC_PARALLEL = mpicc" \ + "CXX_PARALLEL = mpiCC" \ + "FC_SERIAL = xlf90_r" \ + "CC_SERIAL = xlc_r" \ + "CXX_SERIAL = xlc++_r" \ + "FFLAGS_PROMOTION = -qrealsize=8" \ + "FFLAGS_OPT = -g -qfullpath -qmaxmem=-1 -qphsinfo -qzerosize -qfree=f90 -qxlf2003=polymorphic -qspillsize=2500 -qextname=flush -O2 -qstrict -Q" \ + "CFLAGS_OPT = -g -qfullpath -qmaxmem=-1 -qphsinfo -O3" \ + "CXXFLAGS_OPT = -g -qfullpath -qmaxmem=-1 -qphsinfo -O3" \ + "LDFLAGS_OPT = -Wl,--relax -Wl,--allow-multiple-definition -qsmp -qoffload -lcudart -L$(CUDA_DIR)/lib64" \ + "FFLAGS_GPU = -qsmp -qoffload" \ + "LDFLAGS_GPU = -qsmp -qoffload -lcudart -L$(CUDA_DIR)/lib64" \ + "FFLAGS_DEBUG = -O0 -g -qinitauto=7FF7FFFF -qflttrap=ov:zero:inv:en" \ + "CFLAGS_DEBUG = -O0 -g" \ + "CXXFLAGS_DEBUG = -O0 -g" \ + "LDFLAGS_DEBUG = -O0 -g" \ + "FFLAGS_OMP = -qsmp=omp" \ + "CFLAGS_OMP = -qsmp=omp" \ + "PICFLAG = -qpic" \ + "BUILD_TARGET = $(@)" \ + "CORE = $(CORE)" \ + "DEBUG = $(DEBUG)" \ + "USE_PAPI = $(USE_PAPI)" \ + "OPENMP = $(OPENMP)" \ + "OPENMP_OFFLOAD = $(OPENMP_OFFLOAD)" \ + "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI -DFORTRAN_SAME -DCPRIBM -DLINUX" ) + ftn: ( $(MAKE) all \ "FC_PARALLEL = ftn" \ @@ -97,7 +130,37 @@ pgi: "DEBUG = $(DEBUG)" \ "USE_PAPI = $(USE_PAPI)" \ "OPENMP = $(OPENMP)" \ - "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI" ) + "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI -DCPRPGI" ) + +pgi-summit: + ( $(MAKE) all \ + "FC_PARALLEL = mpif90" \ + "CC_PARALLEL = mpicc" \ + "CXX_PARALLEL = mpicxx" \ + "FC_SERIAL = pgf90" \ + "CC_SERIAL = pgcc" \ + "CXX_SERIAL = pgc++" \ + "FFLAGS_PROMOTION = -r8" \ + "FFLAGS_OPT = -g -O3 -byteswapio -Mfree" \ + "CFLAGS_OPT = -O3 " \ + "CXXFLAGS_OPT = -O3 " \ + "LDFLAGS_OPT = -O3 " \ + "FFLAGS_ACC = -acc -Minfo=accel -ta=tesla:cc70,cc60,deepcopy,nollvm " \ + "CFLAGS_ACC = -acc -Minfo=accel -ta=tesla:cc70,cc60,deepcopy,nollvm " \ + "FFLAGS_DEBUG = -O0 -g -Mbounds -Mchkptr -byteswapio -Mfree -Ktrap=divz,fp,inv,ovf -traceback" \ + "CFLAGS_DEBUG = -O0 -g -traceback" \ + "CXXFLAGS_DEBUG = -O0 -g -traceback" \ + "LDFLAGS_DEBUG = -O0 -g -Mbounds -Mchkptr -Ktrap=divz,fp,inv,ovf -traceback" \ + "FFLAGS_OMP = -mp" \ + "CFLAGS_OMP = -mp" \ + "PICFLAG = -fpic" \ + "BUILD_TARGET = $(@)" \ + "CORE = $(CORE)" \ + "DEBUG = $(DEBUG)" \ + "USE_PAPI = $(USE_PAPI)" \ + "OPENMP = $(OPENMP)" \ + "OPENACC = $(OPENACC)" \ + "CPPFLAGS = -DpgiFortran -D_MPI -DUNDERSCORE" ) pgi-nersc: ( $(MAKE) all \ @@ -119,7 +182,7 @@ pgi-nersc: "DEBUG = $(DEBUG)" \ "USE_PAPI = $(USE_PAPI)" \ "OPENMP = $(OPENMP)" \ - "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI" ) + "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI -DCPRPGI" ) pgi-llnl: ( $(MAKE) all \ @@ -141,7 +204,7 @@ pgi-llnl: "DEBUG = $(DEBUG)" \ "USE_PAPI = $(USE_PAPI)" \ "OPENMP = $(OPENMP)" \ - "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI" ) + "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI -DCPRPGI" ) ifort: ( $(MAKE) all \ @@ -444,9 +507,34 @@ llvm: "OPENMP = $(OPENMP)" \ "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI" ) -CPPINCLUDES = -FCINCLUDES = -LIBS = +nag: + ( $(MAKE) all \ + "FC_PARALLEL = mpifort" \ + "CC_PARALLEL = mpicc" \ + "CXX_PARALLEL = mpic++" \ + "FC_SERIAL = nagfor" \ + "CC_SERIAL = gcc" \ + "CXX_SERIAL = g++" \ + "FFLAGS_PROMOTION = -r8" \ + "FFLAGS_OPT = -free -mismatch -O3 -convert=big_ieee" \ + "CFLAGS_OPT = -O3" \ + "CXXFLAGS_OPT = -O3" \ + "LDFLAGS_OPT = -O3" \ + "FFLAGS_DEBUG = -free -mismatch -O0 -g -C -convert=big_ieee" \ + "CFLAGS_DEBUG = -O0 -g -Wall -pedantic" \ + "CXXFLAGS_DEBUG = -O0 -g -Wall -pedantic" \ + "LDFLAGS_DEBUG = -O0 -g -C" \ + "FFLAGS_OMP = -qsmp=omp" \ + "CFLAGS_OMP = -qsmp=omp" \ + "CORE = $(CORE)" \ + "DEBUG = $(DEBUG)" \ + "USE_PAPI = $(USE_PAPI)" \ + "OPENMP = $(OPENMP)" \ + "CPPFLAGS = $(MODEL_FORMULATION) -D_MPI -DUNDERSCORE -DNAG_COMPILER" ) + +CPPINCLUDES = +FCINCLUDES = +LIBS = # # If user has indicated a PIO2 library, define USE_PIO2 pre-processor macro @@ -481,9 +569,15 @@ endif # Depending on PIO version, libraries may be libpio.a, or libpiof.a and libpioc.a # Keep open the possibility of shared libraries in future with, e.g., .so suffix # +# Check if libpio.* exists and link -lpio if so, but we make an exception for +# libpio.settings (a file added in PIO2), which is not a library to link ifneq ($(wildcard $(PIO_LIB)/libpio\.*), ) - LIBS += -lpio + # Makefiles don't support "and" operators so we have nested "if" instead + ifneq "$(wildcard $(PIO_LIB)/libpio\.*)" "$(PIO_LIB)/libpio.settings" + LIBS += -lpio + endif endif + ifneq ($(wildcard $(PIO_LIB)/libpiof\.*), ) LIBS += -lpiof endif @@ -495,17 +589,23 @@ ifneq ($(wildcard $(PIO_LIB)/libgptl\.*), ) endif ifneq "$(NETCDF)" "" +ifneq ($(wildcard $(NETCDF)/lib), ) + NETCDFLIBLOC = lib +endif +ifneq ($(wildcard $(NETCDF)/lib64), ) + NETCDFLIBLOC = lib64 +endif CPPINCLUDES += -I$(NETCDF)/include FCINCLUDES += -I$(NETCDF)/include - LIBS += -L$(NETCDF)/lib + LIBS += -L$(NETCDF)/$(NETCDFLIBLOC) NCLIB = -lnetcdf NCLIBF = -lnetcdff - ifneq ($(wildcard $(NETCDF)/lib/libnetcdff.*), ) # CHECK FOR NETCDF4 + ifneq ($(wildcard $(NETCDF)/$(NETCDFLIBLOC)/libnetcdff.*), ) # CHECK FOR NETCDF4 LIBS += $(NCLIBF) endif # CHECK FOR NETCDF4 ifneq "$(NETCDFF)" "" FCINCLUDES += -I$(NETCDFF)/include - LIBS += -L$(NETCDFF)/lib + LIBS += -L$(NETCDFF)/$(NETCDFLIBLOC) LIBS += $(NCLIBF) endif LIBS += $(NCLIB) @@ -513,9 +613,21 @@ endif ifneq "$(PNETCDF)" "" +ifneq ($(wildcard $(PNETCDF)/lib), ) + PNETCDFLIBLOC = lib +endif +ifneq ($(wildcard $(PNETCDF)/lib64), ) + PNETCDFLIBLOC = lib64 +endif CPPINCLUDES += -I$(PNETCDF)/include FCINCLUDES += -I$(PNETCDF)/include - LIBS += -L$(PNETCDF)/lib -lpnetcdf + LIBS += -L$(PNETCDF)/$(PNETCDFLIBLOC) -lpnetcdf +endif + +ifneq "$(LAPACK)" "" + LIBS += -L$(LAPACK) + LIBS += -llapack + LIBS += -lblas endif RM = rm -f @@ -582,6 +694,22 @@ ifeq "$(OPENMP)" "true" LDFLAGS += $(FFLAGS_OMP) endif #OPENMP IF +ifeq "$(OPENACC)" "true" + FFLAGS += $(FFLAGS_ACC) + CFLAGS += $(CFLAGS_ACC) + CXXFLAGS += $(CFLAGS_ACC) + override CPPFLAGS += "-DMPAS_OPENACC" + LDFLAGS += $(FFLAGS_ACC) +endif #OPENACC IF + +ifeq "$(OPENMP_OFFLOAD)" "true" + FFLAGS += $(FFLAGS_GPU) + CFLAGS += $(FFLAGS_GPU) + CXXFLAGS += $(FFLAGS_GPU) + override CPPFLAGS += "-DMPAS_OPENMP_OFFLOAD" + LDFLAGS += $(LDFLAGS_GPU) +endif #OPENMP_OFFLOAD IF + ifeq "$(PRECISION)" "single" CFLAGS += "-DSINGLE_PRECISION" CXXFLAGS += "-DSINGLE_PRECISION" @@ -672,6 +800,18 @@ else OPENMP_MESSAGE="MPAS was built without OpenMP support." endif +ifeq "$(OPENMP_OFFLOAD)" "true" + OPENMP_OFFLOAD_MESSAGE="MPAS was built with OpenMP-offload GPU support enabled." +else + OPENMP_OFFLOAD_MESSAGE="MPAS was built without OpenMP-offload GPU support." +endif + +ifeq "$(OPENACC)" "true" + OPENACC_MESSAGE="MPAS was built with OpenACC accelerator support enabled." +else + OPENACC_MESSAGE="MPAS was built without OpenACC accelerator support." +endif + ifneq ($(wildcard .mpas_core_*), ) # CHECK FOR BUILT CORE ifneq ($(wildcard .mpas_core_$(CORE)), ) # CHECK FOR SAME CORE AS ATTEMPTED BUILD. @@ -695,14 +835,6 @@ else CONTINUE=true endif # END IF BUILT CORE CHECK -ifeq "$(SHARELIB)" "true" - FFLAGS += -fPIC - CFLAGS += -fPIC - CXXFLAGS += -fPIC - override CPPFLAGS += -fPIC - LDFLAGS += -fPIC -endif #SHARELIB IF - ifneq ($(wildcard namelist.$(NAMELIST_SUFFIX)), ) # Check for generated namelist file. NAMELIST_MESSAGE="A default namelist file (namelist.$(NAMELIST_SUFFIX).defaults) has been generated, but namelist.$(NAMELIST_SUFFIX) has not been modified." else @@ -857,6 +989,8 @@ endif @echo $(PAPI_MESSAGE) @echo $(TAU_MESSAGE) @echo $(OPENMP_MESSAGE) + @echo $(OPENMP_OFFLOAD_MESSAGE) + @echo $(OPENACC_MESSAGE) @echo $(SHAREDLIB_MESSAGE) ifeq "$(AUTOCLEAN)" "true" @echo $(AUTOCLEAN_MESSAGE) diff --git a/README.md b/README.md index c29fec6914..6344d5c57c 100644 --- a/README.md +++ b/README.md @@ -43,14 +43,17 @@ only described below the src directory. MPAS-Model ├── src - │ ├── registry -- Code for building Registry.xml parser (Shared) │ ├── driver -- Main driver for MPAS in stand-alone mode (Shared) │ ├── external -- External software for MPAS (Shared) │ ├── framework -- MPAS Framework (Includes DDT Descriptions, and shared routines. Shared) │ ├── operators -- MPAS Opeartors (Includes Operators for MPAS meshes. Shared) - │ ├── inc -- Empty directory for include files that Registry generates (Shared) + │ ├── tools -- Empty directory for include files that Registry generates (Shared) + │ │ ├── registry -- Code for building Registry.xml parser (Shared) + │ │ └── input_gen -- Code for generating streams and namelist files (Shared) │ └── core_* -- Individual model cores. - └────── testing_and_setup -- tools for setting up configurations and tests cases (Shared) + │ └── inc -- Empty directory for include files that Registry generates + ├── testing_and_setup -- Tools for setting up configurations and test cases (Shared) + └── default_inputs -- Copies of default stream and namelists files (Shared) Model cores are typically developed independently. For information about building and running a particular core, please refer to that core's user's diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000000..d8f2c4ba98 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,230 @@ +trigger: + branches: + include: + - master + - develop + - ocean/develop + - lanice/develop + - ocean/coastal + tags: + include: + - '*' +pr: + branches: + include: + - master + - develop + - ocean/develop + - lanice/develop + - ocean/coastal + +jobs: +- job: + displayName: docs + pool: + vmImage: 'ubuntu-16.04' + strategy: + matrix: + Python38: + python.version: '3.8' + + steps: + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda config --add channels conda-forge + conda config --set channel_priority strict + displayName: Configure conda + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda create -y -n docs python=$PYTHON_VERSION sphinx mock sphinx_rtd_theme m2r + displayName: Create docs environment + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda activate docs + + echo "source branch: $(Build.SourceBranch)" + echo "repository: $(Build.Repository.Name)" + + tag=$(git describe --tags $(git rev-list --tags --max-count=1)) + echo "tag: $tag" + + REPO_PATH=$PWD + + if [[ "$(Build.SourceBranch)" == refs/tags/* ]]; then + echo "this is a tag build" + export DOCS_VERSION="$tag" + deploy=True + run=True + elif [[ "$(Build.SourceBranch)" == refs/heads/* ]]; then + branch="$(Build.SourceBranchName)" + echo "this is a merge build of $branch" + deploy=True + elif [[ "$(Build.SourceBranch)" == refs/pull/*/merge ]]; then + branch="$(System.PullRequest.TargetBranch)" + echo "this is a pull request into $branch" + deploy=False + fi + + if [ -n ${branch} ]; then + echo "This build is for branch $branch" + if [[ ${branch} == "master" ]]; then + export DOCS_VERSION="stable" + run=True + elif [[ ${branch} == "develop" ]]; then + export DOCS_VERSION="latest" + run=True + elif [[ ${branch} == "ocean/develop" ]]; then + export DOCS_VERSION="latest ocean" + run=True + elif [[ ${branch} == "ocean/coastal" ]]; then + export DOCS_VERSION="latest coastal" + run=True + elif [[ ${branch} == "landice/develop" ]]; then + export DOCS_VERSION="latest landice" + run=True + else + echo "We don't build docs for $branch" + deploy=False + run=False + fi + fi + + if [[ "${run}" == "False" ]]; then + echo "Not building docs for branch ${branch}" + exit 0 + fi + + echo "Docs version: $DOCS_VERSION" + echo "Deploy to gh-pages? $deploy" + cd docs || exit 1 + make html + + cd "$REPO_PATH" || exit 1 + + if [[ "$deploy" == "False" ]]; then + exit 0 + fi + + PUBLICATION_BRANCH=gh-pages + DOCS_PATH="${DOCS_VERSION// /_}" + # Checkout the branch + pushd $HOME || exit 1 + git clone --branch=$PUBLICATION_BRANCH https://$(GitHubToken)@github.com/$(Build.Repository.Name) publish + cd publish || exit 1 + + # Update pages + if [[ -d "$DOCS_PATH" ]]; then + git rm -rf "$DOCS_PATH" > /dev/null + fi + mkdir "$DOCS_PATH" + cp -r "$REPO_PATH"/docs/_build/html/* "$DOCS_PATH" + # Commit and push latest version + git add . + if git diff-index --quiet HEAD; then + echo "No changes in the docs." + else + git config --local user.name "Azure Pipelines" + git config --local user.email "azuredevops@microsoft.com" + git commit -m "[skip ci] Update $DOCS_VERSION" + git push -fq origin $PUBLICATION_BRANCH + fi + popd || exit 1 + displayName: build and deploy docs + +- job: + displayName: compass-linux + pool: + vmImage: 'ubuntu-16.04' + strategy: + matrix: + Python36: + python.version: '3.6' + Python37: + python.version: '3.7' + Python38: + python.version: '3.8' + + steps: + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda config --add channels conda-forge + conda config --set channel_priority strict + displayName: Configure conda + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda create -y -n compass --override-channels -c conda-forge -c e3sm -c defaults \ + python=$PYTHON_VERSION "compass=*=nompi*" + displayName: Create compass conda environment + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda activate compass + + cd testing_and_setup/compass || exit 1 + ./list_testcases.py -h + ./setup_testcase.py -h + ./clean_testcase.py -h + ./manage_regression_suite.py -h + cd ../.. + displayName: Test compass + + +- job: + displayName: compass-osx + pool: + vmImage: 'macOS-10.14' + strategy: + matrix: + Python36: + python.version: '3.6' + Python37: + python.version: '3.7' + Python38: + python.version: '3.8' + + steps: + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda config --add channels conda-forge + conda config --set channel_priority strict + displayName: Configure conda + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda create -y -n compass --override-channels -c conda-forge -c e3sm -c defaults \ + python=$PYTHON_VERSION "compass=*=nompi*" + displayName: Create compass conda environment + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda activate compass + + cd testing_and_setup/compass || exit 1 + ./list_testcases.py -h + ./setup_testcase.py -h + ./clean_testcase.py -h + ./manage_regression_suite.py -h + cd ../.. + displayName: Test compass + diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000..19e1d4f711 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = mpas_model +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/compass/clean_testcase.rst b/docs/compass/clean_testcase.rst new file mode 100644 index 0000000000..3dbbe5f06b --- /dev/null +++ b/docs/compass/clean_testcase.rst @@ -0,0 +1,37 @@ +.. _compass_clean_testcase: + +clean\_testcase.py +================== + +This script is used to clean one or more test cases that have already been +setup. + +It will remove directories and driver scripts that were generated as part of +setting up a test case. + +Command-line options:: + + $ ./clean_testcase.py -h + usage: clean_testcase.py [-h] [-o CORE] [-c CONFIG] [-r RES] [-t TEST] + [-n NUM] [-q] [-a] [--work_dir PATH] + + This script is used to clean one or more test cases that have already been + setup. + + It will remove directories / driver scripts that were generated as part of + setting up a test case. + + optional arguments: + -h, --help show this help message and exit + -o CORE, --core CORE Core that contains configurations to clean + -c CONFIG, --configuration CONFIG + Configuration to clean + -r RES, --resolution RES + Resolution of configuration to clean + -t TEST, --test TEST Test name within a resolution to clean + -n NUM, --case_number NUM + Case number to clean, as listed from list_testcases.py. Can be a comma delimited list of case numbers. + -q, --quiet If set, script will not write a command_history file + -a, --all Is set, the script will clean all test cases in the work_dir. + --work_dir PATH If set, script will clean case directories in work_dir rather than the current directory. + diff --git a/docs/compass/config.rst b/docs/compass/config.rst new file mode 100644 index 0000000000..ab4e624f21 --- /dev/null +++ b/docs/compass/config.rst @@ -0,0 +1,303 @@ +.. _compass_config: + +config +====== + +A config file is used to setup a case directory. +This file contains information describing how to configure a case +directory, including files that the case depends on, executables that are +required for the case, namelists and streams files the case requires, and run +scripts which can be used to automate running a case. + +How to use pre-defined paths +---------------------------- + +This testing infrastructure has several predefined paths available as +attributes to several XML tags. Attributes that can use these will have the +line "Can use pre-defined paths" in their description. + +In order to help you make use of these pre-defined paths, this section will +describe what they are, and how to use them. + +To begin, there are two standard paths. These are referred to as ``<work_dir>`` +and ``<script_path>``. + + - ``<work_dir>`` is the location where the test cases are setup to run. + - ``<script_path>`` is the location where the testing infrastructure scripts live. + +Additionally, there are 4 sub-paths: + + - ``<core_dir>`` - This is the core directory that contains the test case + - ``<configuration_dir>`` - This is the configuration directory that contains the test case + - ``<resolution_dir>`` - This is the resolution directory that contains the test case + - ``<test_dir>`` - This is the test directory that contains the test case + - ``<case_dir>`` - This is the case directory that is generated from an XML config file + +Now, all attributes that can use pre-defined paths can build a path using the +following syntax:: + + {base}_{sub} + +Where ``{base}`` can be either ``work`` or ``script``, and ``{sub}`` can be any of +``core_dir``, ``configuration_dir``, ``resolution_dir``, ``test_dir``, and ``case_dir``. + +Note however, ``case_dir`` isn't valid when {base} is ``script`` as a case +directory isn't typically generated in the script path if it's different from +the work path. + +As an example: + + - ``script_test_dir`` would point to the location that the XML files exist to + setup a testcase + - ``work_test_dir`` would point to the location that the testcase will be setup, + and will not include the case directory created from an XML file. + + +Description of XML file +----------------------- + +Below, you will see text describing the various XML tags available in a config +file. Each will describe the tag itself, any attributes the tag can have, and +what children can be placed below the tag. + +``<config>`` - This is the overarching parent tag of a config file that describes the setup for a case. + + - Attributes: + * ``case``: The name of the case directory that will be created from this + config tag. + + - Children: + * ``<get_file>`` + + * ``<add_executable>`` + + * ``<add_link>`` + + * ``<namelist>`` + + * ``<streams>`` + + * ``<run_script>`` + +``<get_file>`` - This tag defines the need for ensuring a required file is available, and the +appropriate ways of acquiring the file. + + - Attributes: + * ``hash``: (Optional) The expected hash of the mesh file. The acquired + mesh file will be validated using this. If this attribute is omitted, + the resulting file will not be validated. + + * ``dest_path``: The path the resulting file should be placed in. Should be + the name of a path defined in the config file, or optionally 'case' + which is expanded to be the case directory generated from the XML + file containing the get_file tag. Can additionally take the values of + pre-defined paths + + * ``file_name``: The name of the file that will be downloaded and placed in dest_path. + + - Children: + * ``<mirror>`` + +``<mirror>`` - This tag defined the different methods of acquiring a required file. + + - Attributes: + * ``protocol``: A description of how the mesh should be retrieved. + Currently supports ``wget``. + + * ``url``: Only used if ``protocol == wget``. The url (pre-filename) portion of + the ``wget`` command. + +``<add_executable>`` - This tag defined the need to link an executable defined in a +configuration file (e.g. general.config) into a case directory. + + - Attributes: + * ``source``: The name of the executable, defined in the configuration file + (e.g. ``general.config``). This name is a short name, and will be + expanded to executables.source + + * ``dest``: The name of the link that will be generated from the executable. + +``<add_link>`` - This tag defined the need to link a file into a case directory. + + - Attributes: + * ``source_path``: The path variable from a configure file to find the + source file in. If it is empty, source is assumed to + have the full path to the file. Additionally, it can + take the values of: + + - Can use pre-defined paths + + * ``source``: The source to generate a symlink from. Relative to the case + directory that will be generated from the parent ``<config>`` tag. + + * ``dest``: The name of the resulting symlink. + +``<namelist>`` - This tag defines a namelist that should be generated from a template. + + - Attributes: + * ``name``: The name of the namelist file that will be generated from the + template namelist pointed to by its mode attribute. + + * ``mode``: The name of the mode to use from the template input files + Each core can define these arbitrarily + + - Children: + * ``<template>`` + + * ``<option>`` + +``<streams>`` - This tag defines a streams file that should be generated from a template. + + - Attributes: + * ``name``: The name of the streams file that will be generated from the + template streams file pointed to by its mode. + + * ``mode``: The name of the mode to use from the template input files + Each core can define these arbitrarily + + * ``keep``: A definition of which streams to keep from the template. Values are: + + - ``all``: keep all streams from the template + + - ``immutable``: keep all immutable streams, and discard any mutable + streams from the template + + - ``mutable``: keep all mutable streams, and discard any immutable + streams from the template + + - ``none``: discard all streams from teh template + + - Children: + * ``<template>`` + * ``<stream>`` + +``<template>`` - This tag defines a template that should be applied to a set of configurations. + + - Attributes: + * ``file``: The file that contains the template that should be expanded here. When + used within a ``<namelist>`` tag, the namelist portion of the template + will be applied. When used within a ``<stream>`` tag, the streams portion + of the template will be applied. Additionally, ``<template>`` tags + can be used within ``<compare_fields>`` and ``<compare_timers>`` tags + to define template fields and timers to compare. + + * ``path_base``: The base that the path attribute should be used relative + to. Can have a value of pre-defined paths + + * ``path``: The path that the file lives in, relative to path_base. + +``<option>`` - This tag defines an option that should be modified in the generated +namelist. + + - Attributes: + * ``name``: The name of the option that should be modifed + + - Text: + * The text within <option> and </option> tags will be used to set the + value of the namelist option. + +``<stream>`` - This tag defines a stream that should be modified / created in the +generated streams file. + + - Attributes: + * ``name``: The name of the stream that should be modified / created + + - Children: + * ``<attribute>`` + + * ``<add_contents>`` + + * ``<remove_contents>`` + +``<attribute>`` - This tag defines an attribute that should be created / modified +in a stream definition. + + - Attributes: + * ``name``: The name of the stream attribute to modify / define + + - Text: + * The text in between the <attribute> and </attribute> tags will be + used to set the value of the attribute. + +``<add_contents>`` - This tag defines a list of members to add to a stream definition + + - Children: + * ``<member>`` + +``<remove_contents>`` - This tag defines a list of members to remove from a stream definition + + - Children: + * ``<member>`` + +``<member>`` - This tag defines a member that should be added or removed from a stream definition. + + - Attributes: + * ``name``: The name of the member that will be defined. If this is in an + ``<add_contents>`` tag, it will be added to the stream, if it is in a + ``<remove_contents>`` tag, it will be removed from the stream. + + * ``type``: The type of the member to add (This is ignored if it's within a + ``<remove_contents>`` tag). Example values are var, var_array, + ``var_struct``, and stream. + +``<run_script>`` - This tag defines a new run script that should be generated. + + - Attributes: + * ``name``: The name of the script that will be generated + + - Children: + * ``<step>`` + + * ``<define_env_var>`` + + * ``<model_run>`` + +``<step>`` - This tag defines a step in a run script + + - Attributes: + * ``executable``: The base executable for this step of the run script. e.g. mpirun + + * ``executable_name``: The name of the executable that has been defined in + the configuration file to be used for this step of the run script. + + - Children: + * ``<argument>`` + +``<argument>`` - This tag defines arguments for the executable in a specific step of +a run script. + + - Attributes: + * ``flag``: A flag that will come before the argument. e.g. ``-n`` + + - Text: + * The text between the ``<argument>`` and ``</argument>`` tags will be used as + the argument after the flag. In the example ``mpirun -n 4`` the flag + would be -n, and the text would be 4. + +``<define_env_var>`` - This tag is used to define an environment variable which +might be needed when running the model. For example, +setting the value of OMP_NUM_THREADS to ensure OpenMP +threading is used. + + - Attributes: + * ``name``: The name of the variable that will be set + + * ``value``: The value that will be given to the variable + +``<model_run>`` - This tag is used to define a run of the model, as configured by +some set of attributes. + + - Attributes: + * This tag is unique, in that it can take a variety of attributes. The + attributes available depend on the batch system. Within the + definition of the batch system, any attribute that has a value with + the given format ``attr_{name}`` represents an attribute that is required + when using this tag. An example of attributes that most batch systems + would require is: + + - ``procs``: The number of MPI tasks to spawn + - ``threads``: The number of OpenMP threads to use in the run + - ``namelist``: The namelist file to use when performing the run + - ``streams``: The streams file to use when performing the run + - ``executable``: (Optional) The name of the executble to use from the + config file. If this is not specified, it defaults to 'model'. diff --git a/docs/compass/details.rst b/docs/compass/details.rst new file mode 100644 index 0000000000..9556a36fa2 --- /dev/null +++ b/docs/compass/details.rst @@ -0,0 +1,12 @@ +.. _compass_details: + +Details +======= + +.. toctree:: + + config + driver_script + template + regression_suite + run_config diff --git a/docs/compass/driver_script.rst b/docs/compass/driver_script.rst new file mode 100644 index 0000000000..9f4b65b49c --- /dev/null +++ b/docs/compass/driver_script.rst @@ -0,0 +1,159 @@ +.. _compass_driver_script: + +driver\_script +============== + +A ``driver_script`` file is used to generate a driver script that can be used to +automate several steps, including running multiple cases, and comparing output. +This file contains information describing how to create a script +that can be used to perform multiple steps in the process of running a test. +This includes running multiple run scripts within case directories, or +performing standard actions, such as comparing multiple output files. + +Below, you will see text describing the various XML tags available in a +``driver_script`` file. Each will describe the tag itself, any attributes the +tag can have, and what children can be placed below the tag. + +``<driver_script>`` - This tag defines a driver script for a set of case directories. + + - Attributes: + * ``name``: The name of the driver script that will be generated + + - Children: + * ``<case>`` + + * ``<step>`` + + * ``<define_env_var>`` + + * ``<validation>`` + +``<case>`` - This tag defines the case that will be used for part of a driver +script. It implies the driver script should ``cd`` into the case +directory before executing the steps and arguments defined within. + + - Attributes: + * ``name``: The name of the case directory that will be used for this + portion of the driver script. + + - Children: + * ``<step>`` + + * ``<define_env_var>`` + +``<step>`` - This tag defines a step in a driver script + + - Attributes: + * ``executable``: The base executable for this step of the script. e.g. + test_model + + * ``executable_name``: The name of the executable that has been defined + in the configuration file to be used for this step of the script. + + - Children: + * ``<argument>`` + +``<argument>`` - This tag defines arguments for the executable in a specific step of +a script. + + - Attributes: + * ``flag``: A flag that will come before the argument. e.g. ``-n`` + + - Text: + * The text between the ``<argument>`` and ``</argument>`` tags will be + used as the argument after the flag. In the example ``mpirun -np 4`` + the flag would be ``-np``, and the text would be ``4``. + +``<define_env_var>`` - This tag is used to define an environment variable which +might be needed when running the model. For example, +setting the value of ``OMP_NUM_THREADS`` to ensure OpenMP +threading is used. + + - Attributes: + * ``name``: The name of the variable that will be set + + * ``value``: The value that will be given to the variable + +``<validation>`` - This tag is used to define a block of validations that should +happen within the driver script. These validations are standard +operations that multiple driver scripts can use easily, such as +comparing fields in multiple files or against baselines. Test +case specific validations might happen in separate steps. + + - Children: + * ``<compare_fields>`` + + * ``<compare_timers>`` + +``<compare_fields>`` - This tag is used to define a comparison of specified +fields in two NetCDF files. + + - Attributes: + * ``file1``: This defines the first file in the comparison. + Relative to the ``<core>/<configuration>/<resolution>`` directory. + Will also cause file1 to be compared against file1 in the baseline directory. + + * ``file2``: This defines the second file in the comparison. + Relative to the ``<core>/<configuration>/<resolution>`` directory. + Will also cause file2 to be compared against file2 in the baseline directory. + + * NOTE: If only one of file1 or file2 is specified, the testcase will + only compare against baselines. + + - Children: + * ``<field>`` + + * ``<template>`` + +``<field>`` - This tag is used to define a field that will be compared within a +``<compare_fields>`` tag. Any norm thresholds that are specified all must pass to +have the comparison pass. + + - Attributes: + * ``name``: This attribute defines the name of the field that will be compared + + * ``l1_norm``: This attribute defines the threshold for an L1 norm, to + define a pass. If not specified, the L1 norm will not be used in + determining if the comparison passes or fails. + + * ``l2_norm``: This attribute defines the threshold for an L2 norm, to + define a pass. If not specified, the L2 norm will not be used in + determining if the comparison passes or fails. + + * ``linf_norm``: This attribute defines the threshold for an L-Infinity norm, to + define a pass. If not specified, the L-Infinity norm will not be used in + determining if the comparison passes or fails. + +``<template>`` - This tag defines a template that should be applied to define a list of field comparisions. + + - Attributes: + * ``file``: The file that contains the template that should be expanded here. + + * ``path_base``: The base that the path attribute should be used relative + to. Can be a pre-defined paths (see :ref:`compass_config`) + + * path: The path that the file lives in, relative to path_base. + +``<compare_timers>`` - This tag is used to define a comparison of timers in two run directories. +The comparison will work with native or gptl timers automatically. + + - Attributes: + * ``rundir1``: This is the first run directory to compare. If it is the + only one specified timers in it will be compared only against it's + baseline. + + * ``rundir2``: This is the second run directory to compare. If it is the + only one specified timers in it will be compared only against it's + baseline. + + - Children: + * ``<timer>`` + + * ``<template>`` + +``<timer>`` - This tag is used to define a timer that should be compared between two run directories. + + - Attributes: + * ``name``: This is the name of the timer to compare. It should be the full + expected name of the timer, not the printed name from the timer + library. diff --git a/docs/compass/index.rst b/docs/compass/index.rst new file mode 100644 index 0000000000..e062617a68 --- /dev/null +++ b/docs/compass/index.rst @@ -0,0 +1,46 @@ +COMPASS +======= + +Overview +-------- + +The COMPASS (Configuration Of Model for Prediction Across Scales Setups) +infrastructure provides a capability for defining simple test-case workflows. +It is intended to house a small number of files which can describe the steps to +setup and configure a test case. + +It provides four utility python scripts: + +* ``clean_testcase.py`` +* ``list_testcases.py`` +* ``setup_testcase.py`` +* ``manage_regression_suite.py`` + +and two configuration file templates: + + * ``general.config.test`` + * ``general.config.ocean`` + +Each of the python scripts can be run with a ``-h`` argument only to get usage +information. + +Additionally, each core has a directory at the top level (e.g. ocean for the +ocean test cases). There is also a templates directory where a core can place +template files that are intended to be available for it's test cases. + +An example test case is placed in ``ocean/baroclinic_channel/10km`` +An example template is placed in ``templates/ocean/global_stats.xml`` + +Test cases are described by XML files. Each test case can have an arbitrary +number of XML files that configure the steps for setting up the test case. + +The various XML files that can be used with this test case infrastructure are +described in the README files contained in the doc directory. + +.. toctree:: + :titlesonly: + + details + scripts + ocean + ocean_testcases/index \ No newline at end of file diff --git a/docs/compass/list_testcases.rst b/docs/compass/list_testcases.rst new file mode 100644 index 0000000000..512cb5883d --- /dev/null +++ b/docs/compass/list_testcases.rst @@ -0,0 +1,44 @@ +.. _compass_list_testcases: + +list\_testcases.py +================== + +This script is used to list available test cases. + +It iterates through the directory structure and prints out configuration +options to setup specific test cases. Additionally, the ``-o``, ``-c``, ``-r``, and ``-t`` +flags can be used to narrow the information that this script prints. If any of +them are passed in, the script will only print test cases that match all +criteria. + +Additionally, if ``-n`` is passed in to get information about a single test case, +it will only print the flags needed to setup that specific test case. + +Command-line options:: + + $ ./list_testcases.py -h + usage: list_testcases.py [-h] [-o CORE] [-c CONFIG] [-r RES] [-t TEST] + [-n NUMBER] + + This script is used to list available test cases. + + It iterates through the directory structure and prints out configuration + options to setup specific test cases. Additionally, the -o, -c, -r, and -t + flags can be used to narrow the information that this script prints. If any of + them are passed in, the script will only print test cases that match all + criteria. + + Additionally, if -n is passed in to get information about a single test case, + it will only print the flags needed to setup that specific test case. + + optional arguments: + -h, --help show this help message and exit + -o CORE, --core CORE Core to search for configurations within + -c CONFIG, --configuration CONFIG + Configuration name to search for + -r RES, --resolution RES + Resolution to search for + -t TEST, --test TEST Test name to search for + -n NUMBER, --number NUMBER + If set, script will print the flags to use a the N'th configuration. + diff --git a/docs/compass/manage_regression_suite.rst b/docs/compass/manage_regression_suite.rst new file mode 100644 index 0000000000..b8289d339d --- /dev/null +++ b/docs/compass/manage_regression_suite.rst @@ -0,0 +1,48 @@ +.. _compass_manage_regression_suite: + +manage\_regression\_suite.py +============================ + +This script is used to manage regression suites. A regression suite is a set of +test cases that ensure one or more features in a model meet certain criteria. + +Using this script one can setup or clean a regression suite. + +When setting up a regression suite, this script will generate a script to run +all tests in the suite, and additionally setup each individual test case. + +When cleaning a regression suite, this script will remove any generated files +for each individual test case, and the run script that runs all test cases. + +Command-line options:: + + $ ./manage_regression_suite.py -h + usage: manage_regression_suite.py [-h] -t FILE [-f FILE] [-s] [-c] [-v] + [-m FILE] [-b PATH] [--work_dir PATH] + + This script is used to manage regression suites. A regression suite is a set of + test cases that ensure one or more features in a model meet certain criteria. + + Using this script one can setup or clean a regression suite. + + When setting up a regression suite, this script will generate a script to run + all tests in the suite, and additionally setup each individual test case. + + When cleaning a regression suite, this script will remove any generated files + for each individual test case, and the run script that runs all test cases. + + optional arguments: + -h, --help show this help message and exit + -t FILE, --test_suite FILE + Path to file containing a test suite to setup + -f FILE, --config_file FILE + Configuration file for test case setup + -s, --setup Option to determine if regression suite should be setup or not. + -c, --clean Option to determine if regression suite should be cleaned or not. + -v, --verbose Use verbose output from setup_testcase.py + -m FILE, --model_runtime FILE + Definition of how to build model run commands on this machine + -b PATH, --baseline_dir PATH + Location of baseslines that can be compared to + --work_dir PATH If set, script will setup the test suite in work_dir rather in this script's location. + diff --git a/docs/compass/ocean.rst b/docs/compass/ocean.rst new file mode 100644 index 0000000000..d3bf600bbf --- /dev/null +++ b/docs/compass/ocean.rst @@ -0,0 +1,56 @@ +.. _compass_ocean: + +COMPASS for ocean test case +=========================== + +COMPASS conda environment +------------------------- + +To set up and run ocean test cases from COMPASS, you will need a conda +environment. First, install Miniconda3 (if miniconda is not already +installed), then create a new conda environment as follows:: + + conda create -n compass_0.1.2 -c conda-forge -c e3sm python=3.7 compass=0.1.2 + +Each time you want to work with COMPASS, you will need to run:: + + conda activate compass_0.1.2 + +An appropriate conda environment is already available on Los Alamos National +Laboratory's Institutional Computing (LANL IC) machines as well as Anvil, Compy +and Cori. In each case, you will run:: + + source <base_path>/load_latest_compass.sh + +Values of ``<base_path>`` are: + + * grizzly and badger - ``/usr/projects/climate/SHARED_CLIMATE/anaconda_envs`` + * anvil (blues) - ``/lcrc/soft/climate/e3sm-unified/`` + * compy - ``/share/apps/E3SM/conda_envs`` + * cori - ``/global/cfs/cdirs/acme/software/anaconda_envs`` + +Setting config options +---------------------- + +The file ``general.config.ocean`` is a template containing a set of config +options that the COMPASS user must set in order to set up ocean test cases. +Make a copy of this file (e.g. ``config.ocean``) and set the options as follows. +In six places, replace ``FULL_PATH_TO_MPAS_MODEL_REPO`` with the path where you +have checked out (and built) the branch of MPAS-Model you are planning to use. +Five other paths are required, as explained below. + +mesh\_database, initial\_condition\_database and bathymetry\_database +--------------------------------------------------------------------- + +These are directories for storing pre-generated mesh files, data sets for +creating initial conditions, and bathymetry data. These can be empty directories, in which case +meshes and other data sets will be downloaded as required during test-case +setup. (If a test case appears to hang during setup, it is most likely +downloading mesh, initial-condition or bathymetry data.) + +On LANL IC, the shared data bases can be found at:: + + mesh_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/mesh_database + initial_condition_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/initial_condition_database + bathymetry_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/bathymetry_database + diff --git a/docs/compass/ocean_testcases/baroclinic_channel.rst b/docs/compass/ocean_testcases/baroclinic_channel.rst new file mode 100644 index 0000000000..ebfbb2d89a --- /dev/null +++ b/docs/compass/ocean_testcases/baroclinic_channel.rst @@ -0,0 +1,6 @@ +.. _compass_ocean_baroclinic_channel: + +Baroclinic Channel +================== + +Blah diff --git a/docs/compass/ocean_testcases/global_ocean.rst b/docs/compass/ocean_testcases/global_ocean.rst new file mode 100644 index 0000000000..47744f8e0e --- /dev/null +++ b/docs/compass/ocean_testcases/global_ocean.rst @@ -0,0 +1,6 @@ +.. _compass_ocean_global_ocean: + +Global Ocean +============ + +Blah diff --git a/docs/compass/ocean_testcases/index.rst b/docs/compass/ocean_testcases/index.rst new file mode 100644 index 0000000000..d2692e1a7c --- /dev/null +++ b/docs/compass/ocean_testcases/index.rst @@ -0,0 +1,11 @@ +.. _compass_ocean_testcases: + +Ocean Test Cases +================ + +.. toctree:: + :titlesonly: + + global_ocean + baroclinic_channel + isomip_plus diff --git a/docs/compass/ocean_testcases/isomip_plus.rst b/docs/compass/ocean_testcases/isomip_plus.rst new file mode 100644 index 0000000000..1448dc6dd6 --- /dev/null +++ b/docs/compass/ocean_testcases/isomip_plus.rst @@ -0,0 +1,16 @@ +.. _compass_ocean_isomip_plus: + +ISOMIP+ +======= + +Blah + + +More Information +---------------- + +.. toctree:: + :titlesonly: + + isomip_plus_at_lanl + diff --git a/docs/compass/ocean_testcases/isomip_plus_at_lanl.rst b/docs/compass/ocean_testcases/isomip_plus_at_lanl.rst new file mode 100644 index 0000000000..bdcdd81787 --- /dev/null +++ b/docs/compass/ocean_testcases/isomip_plus_at_lanl.rst @@ -0,0 +1,658 @@ +.. _compass_ocean_isomip_plus_at_lanl: + +Instructions for setting up and running ISOMIP+ Ocean0 on LANL IC +================================================================= + +In what follows, replace ``username`` with your user name. + +1. SSH tricks +------------- + +A couple of tricks for your laptop if you’re not already using them: +Save your SSH connections: + +.. code-block:: bash + + vim ~/.ssh/config + +Add the following: + +.. code-block:: + + Host * + ControlMaster auto + ControlPath ~/.ssh/connections/%r@%h:%p + ServerAliveInterval 300 + + Host wtrw + Hostname wtrw.lanl.gov + User username + +.. code-block:: bash + + mkdir ~/.ssh/connections + +Alias connections to LANL HPC machines: + +.. code-block:: bash + + vim ~/.bashrc + +Add: + +.. code-block:: bash + + alias gr="ssh -t wtrw ssh gr-fe1" + alias ba="ssh -t wtrw ssh ba-fe2" + alias ko="ssh -t wtrw ssh ko-fe" + +2. Making sure git is set up nicely +----------------------------------- + +2.1 Storing your LANL IC SSH key on GitHub +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It's useful to set up GitHub to know your public SSH key from LANL IC if you +haven't already done this. It means you don’t have to type your password for +GitHub each time you git fetch, git push, etc. + +I believe this is the right link for +`more details <https://help.github.com/en/articles/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent>`_ +If you haven't done this already and this gives you trouble, let me know and we +can work through it together. + +2.2 git settings +^^^^^^^^^^^^^^^^ + +On IC or on your laptop, make sure you’ve got these settings defined: + +.. code-block:: bash + + git config --global user.name "First Last" + git config --global user.email user@domain.com + git config --global core.editor vim + git config --global push.default nothing + git config --global color.ui true + git config --global core.autocrlf input + git config --global core.whitespace trailing-space + git config --global alias.logg "log --graph --oneline --decorate" + +I use ``git logg`` all the time so this last alias is particularly important. + +2.3 git tab completion +^^^^^^^^^^^^^^^^^^^^^^ + +Download `git-completion.bash <https://raw.githubusercontent.com/git/git/master/contrib/completion/git-completion.bash>`_ + +.. code-block:: bash + + cd ~ + wget https://raw.githubusercontent.com/git/git/master/contrib/completion/git-completion.bash + +Add this to your .bashrc + +.. code-block:: + + module load git + source git-completion.bash + +3. Forking and Cloning MPAS-Model +--------------------------------- + + +* Go to: `https://github.com/MPAS-Dev/MPAS-Model <https://github.com/MPAS-Dev/MPAS-Model>`_ +* Make your own fork by clicking “Fork” at the top right: +* Go to your new fork (e.g. `https://github.com/username/MPAS-Model <https://github.com/username/MPAS-Model>`_ ) +* Whenever you ever need to know the link to clone your fork + + * Click on “Clone or download” + * If it says “Clone with HTTPS”, click Use SSH (either works but SSH will use + the SSH keys you’ve set up above and you never have to type my Git + password.) + * Copy the link with the clipboard icon + +In a terminal window, log in to a LANL machine (I use Grizzly from here on +except where stated): + +.. code-block:: bash + + ssh -t wtrw ssh gr-fe1 + +Make a directory for the code, e.g.: + +.. code-block:: bash + + mkdir /usr/projects/climate/username + cd /usr/projects/climate/username + mkdir -p mpas/model + cd mpas/model/ + +Clone the repo: + +.. code-block:: bash + + git clone git@github.com:username/MPAS-Model.git repo + cd repo + +Rename your remote so it’s easier to not confuse it with other forks: + +.. code-block:: bash + + git remote rename origin username/MPAS-Model + +Add the main repo: + +.. code-block:: bash + + git remote add MPAS-Dev/MPAS-Model git@github.com:MPAS-Dev/MPAS-Model.git + +Add my fork (you can add other people’s forks in the same way): + +.. code-block:: bash + + git remote add xylar/MPAS-Model git@github.com:xylar/MPAS-Model.git + +Get the latest version of all the remotes (pruning anything that has been +deleted): + +.. code-block:: bash + + git fetch --all -p + +Let's store some settings you'll need to load every time you build MPAS. The following +are only appropriate for Grizzly and we'll need a similar file with settings for +Badge and any other machines we might use in the future. + +.. code-block:: bash + + vim ../setup_gr.bash + +In this file, put: + +.. code-block:: bash + + echo "Setting up grizzly intel environment for building and running MPAS" + module purge + module load git + source /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base/etc/profile.d/conda.sh + conda activate compass_py3.7 + module use /usr/projects/climate/SHARED_CLIMATE/modulefiles/all/ + module load intel/17.0.1 openmpi/1.10.5 netcdf/4.4.1 parallel-netcdf/1.5.0 pio/1.7.2 + export CORE=ocean + +4. Checking out an MPAS branch and building the model +----------------------------------------------------- + +**Note: this is a good place to come back to when you need to start over on +a new branch.** + +Add a "worktree", a copy of the repo that we can point to a different branch. +We will work with my branch ``ocean/update_isomip_plus_viz``\ , where I have added some +new viz tools. This is based off of the latest ``ocean/develop``. In general, +``ocean/develop`` is the place to start, since the ``master`` branch is updated only +rarely when we make releases: + +.. code-block:: bash + + cd /usr/projects/climate/username/mpas/model/reop + +Let's make sure we have the latest version of all the branches on all of the remotes + +.. code-block:: bash + + git fetch --all -p + +Okay, now we're ready to make a new folder to work from. + +.. code-block:: bash + + git worktree add ../ocean/update_isomip_plus_viz -b ocean/update_isomip_plus_viz + cd ../ocean/update_isomip_plus_viz + +Take a look at which branch were on: + +.. code-block:: + + git logg + +We don't start off on ``MPAS-Dev/MPAS-Model/ocean/update_isomip_plus_viz`` (even though +the name of the local branch might trick you into thinking you're there), so we need +to do a hard reset to put us there: + +.. code-block:: bash + + git reset --hard xylar/MPAS-Model/ocean/update_isomip_plus_viz + git logg + +Now source the file with modules and settings for building MPAS on grizzly: + +.. code-block:: bash + + source ../../setup_gr.bash + +If all goes well, you should see ``comapss_py3.7`` as part of your command prompt and you should be read to build MPAS. + +.. code-block:: bash + + make ifort + +Take a coffee break, this will take some time. +... + +5. Setting up a test case +------------------------- + +Okay you're back and refreshed? Let's set up a test case. + +.. code-block:: bash + + cd testing_and_setup/compass/ + +COMPASS (COnfiguration of Model for Prediction Across Scales Setups -- yes, a litle tortured) is a set of python +scripts we use to set up and run our test cases. To build test cases, you need to tell COMPASS where to find a few +thing on Grizzly. Open a file ``config.ocean`` and put the following in it: + +.. code-block:: + + # This file is the ocean core's configuration file. It is specific to the ocean + # core, and a specific machine. Each machine will configure this file + # differently, but it can be used to point on version of the testing + # infrastructure at a different version of the model. + + + # The namelists section defines paths to template namelists that will be used + # to generate specific namelists. Typically these will point to the forward and + # init namelists in the default_inputs directory after a successful build of + # the ocean model. + [namelists] + forward = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz/namelist.ocean.forward + init = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz/namelist.ocean.init + + + # The streams section defines paths to template streams files that will be used + # to generate specific streams files. Typically these will point to the forward and + # init streams files in the default_inputs directory after a successful build of + # the ocean model. + [streams] + forward = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz/streams.ocean.forward + init = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz/streams.ocean.init + + + # The executables section defines paths to required executables. These + # executables are provided for use by specific test cases. + # Full paths should be provided in order to access the executables from + # anywhere on the machine. + [executables] + model = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz/ocean_model + + + # The paths section describes paths that are used within the ocean core test + # cases. + [paths] + + # The mesh_database and the initial_condition_database are locations where + # meshes / initial conditions might be found on a specific machine. They can be + # the same directory, or different directory. Additionally, if they are empty + # some test cases might download data into them, which will then be reused if + # the test case is run again later. + mpas_model = /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz + mesh_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/mesh_database + initial_condition_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/initial_condition_database + bathymetry_database = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/bathymetry_database + +In theory, you can point to default namelists, streams files and executables for other branches than +the one you're currently on but that's very rarely (if ever) going to be useful to you so you'll +just have to bear with all these redundant references to + +.. code-block:: + + /usr/projects/climate/username/mpas/model/ocean/update_isomip_plus_viz + +If you want to set up a worktree for a different branch, the ``config.ocean`` looks the same except +that you would need to replace the above path with the one for your new worktree. + +List the available test cases: + +.. code-block:: bash + + ./list_testcases.py + +At present, there are 107 of them! Let's look at only the ISOMIP+ ones (component: ``ocean``\ , case: ``isomip_plus``\ ): + +.. code-block:: bash + + ./list_testcases.py -o ocean -c isomip_plus + +There are 2 resolutions (2 km and 5 km) and 3 test cases at each resolution (Ocean0, 1 and 2). For now, we're +going to focus on Ocean0, which has boundary conditions and ocean properties consistent with a (very) warm +continental shelf. This one spins up to a quasi-steady state in about 2 years (compared to several decades +for the other 2, which are purposefully designed as transient experiments) so it's a good starting point. +We'll use the 2 km version because the domain is only 80 km wide, so 5 km is really quite coarse. Plus, this +is the "standard" resolution for ISOMIP+. + +Set up the test case as follows: + +.. code-block:: bash + + ./setup_testcase.py -o ocean -c isomip_plus -r 2km -t Ocean0 -f config.ocean -m runtime_definitions/srun.xml --work_dir /lustre/scratch4/turquoise/username/isomip_plus_Ocean0 + +6. Running the test case +------------------------ + +We'll do a short test run (1 month) to make sure everything is working, rathere than jumping into a 2-year simulation. + +.. code-block:: bash + + cd /lustre/scratch4/turquoise/username/isomip_plus_Ocean0/ocean/isomip_plus/2km/Ocean0/ + salloc --nodes=1 --time=0:20:00 --account=e3sm + + module purge + source /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base/etc/profile.d/conda.sh + conda activate compass_py3.7 + module use /usr/projects/climate/SHARED_CLIMATE/modulefiles/all/ + module load intel/17.0.1 openmpi/1.10.5 netcdf/4.4.1 parallel-netcdf/1.5.0 pio/1.7.2 + + ./run_test.py + +If you don't have access to the ``e3sm`` account, ask Steve or Mark for help to get acces. Somewhere on the +HPC website, there is a way to ask for access, but they may just be able to add you directly. + +7. Running a full 2-year Ocean0 simulation +------------------------------------------ + +For this one, you should use a job script. + +.. code-block:: + + cd /lustre/scratch4/turquoise/username/isomip_plus_Ocean0/ocean/isomip_plus/2km/Ocean0/forward + vim job_script.bash + +Put this in the job script: + +.. code-block:: + + #!/bin/bash + #SBATCH --nodes=4 + #SBATCH --time=4:00:00 + #SBATCH --account=e3sm + #SBATCH --job-name=Ocean0 + #SBATCH --output=Ocean0.o%j + #SBATCH --error=Ocean0.e%j + #SBATCH --qos=interactive + + # exit if there are any errors + set -e + + module purge + source /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base/etc/profile.d/conda.sh + conda activate compass_py3.7 + module use /usr/projects/climate/SHARED_CLIMATE/modulefiles/all/ + module load intel/17.0.1 openmpi/1.10.5 netcdf/4.4.1 parallel-netcdf/1.5.0 pio/1.7.2 + + months_per_job=24 + end_date="0003-01-01_00:00:00" + + for month in `seq 0 $months_per_job` + do + ./check_progress.py -f namelist.ocean -e $end_date + ./run.py + ./setup_restart.py -f namelist.ocean + done + +Submit the job: + +.. code-block:: + + sbatch job_script.bash + +Once it's running, monitor the progress with: + +.. code-block:: + + tail log.ocean.0000.out + +This writes a message for each time step (if all is going well). + +The simulation runs one month at a time and then does some adjustment in a python script to make sure sea level doesn't +get out of control (there's a lot of melting going on so we have to have a compensating level of "evaporation" at the +domain boundary). It also will check to see if we've already reached year 2 and won't run again if so. + +Some basic output is available in: + +.. code-block:: + + analysis_members/globalStats.0001-01-01_00.00.00.nc + +To see the mean melt flux and how time is progressing there, do: + +.. code-block:: + + ncdump -v xtime,landIceFreshwaterFluxAvg analysis_members/globalStats.0001-01-01_00.00.00.nc | tail -n 50 + +Keep in mind that the units are ``kg m^{-2} s^{-1}``\ , not m/yr, so not the most intuitive output. There are +some pretty outdated viz scripts in the ``viz`` directory linked there, but these might at least provide some +starting guidelines for how to do python viz. You can also look at output in paraview. I'll clean things +up and add instructions for viz in the near future as I have time. + +8. Visualization +---------------- + +8.1 Running the default viz +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Viz should be light enough weight that you can run it on the login node but you could get an interactive job if you prefer. +It produces images, rather than anything interactive, so no need for x-windows or anything like that. + +There should be a link to ``viz`` in the ``forward`` output directory. This is a link to a python package (you can tell because +it contains a ``__init__.py`` (which is empty) and a ``__main__.py``\ , which is the main script for visualization. To start +with, we'll run the default viz. If you don't already have the compass conda environment loaded, do: + +.. code-block:: bash + + source /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base/etc/profile.d/conda.sh + conda activate compass_py3.7 + +Then, run: + +.. code-block:: bash + + python -m viz + +This will run the ``main()`` function in ``__main__.py``. You could optionally set the input directory and the experiment +number but the defaults are the current directory and ``Ocean0``\ , respectively, so there's no need in this case. +This will take maybe 10 or 15 minutes (most of it on the overturning streamfunction). You should see something like: + +.. code-block:: + + barotropic streamfunction: 100% |##############################| Time: 0:00:15 + compute and caching transport on MPAS grid: + [########################################] | 100% Completed | 7.2s + interpolating tansport on z-level grid: 100% |#################| Time: 0:10:13 + caching transport on z-level grid: + [########################################] | 100% Completed | 2.2s + compute and caching vertical transport sum on z-level grid: + [########################################] | 100% Completed | 2.4s + bin overturning streamfunction: 100% |#########################| Time: 0:02:03 + plotting barotropic streamfunction: 100% |#####################| Time: 0:00:08 + plotting overturning streamfunction: 100% |####################| Time: 0:00:05 + plotting melt rate: 100% |#####################################| Time: 0:00:07 + plotting heat flux from ocean to ice-ocean interface: 100% |###| Time: 0:00:07 + plotting heat flux into ice at ice-ocean interface: 100% |#####| Time: 0:00:07 + plotting thermal driving: 100% |###############################| Time: 0:00:07 + plotting haline driving: 100% |################################| Time: 0:00:07 + plotting friction velocity: 100% |#############################| Time: 0:00:08 + plotting top temperature: 100% |###############################| Time: 0:00:09 + plotting bot temperature: 100% |###############################| Time: 0:00:08 + plotting temperature section: 100% |###########################| Time: 0:00:05 + plotting top salinity: 100% |##################################| Time: 0:00:08 + plotting bot salinity: 100% |##################################| Time: 0:00:08 + plotting salinity section: 100% |##############################| Time: 0:00:05 + plotting top potential density: 100% |#########################| Time: 0:00:10 + plotting bot potential density: 100% |#########################| Time: 0:00:08 + plotting potential density section: 100% |#####################| Time: 0:00:05 + running ffmpeg -y -r 30 -i ./plots/botPotRho/botPotRho_%04d.png -b:v 32000k -r 30 ./movies/botPotRho.mp4 + running ffmpeg -y -r 30 -i ./plots/botSalinity/botSalinity_%04d.png -b:v 32000k -r 30 ./movies/botSalinity.mp4 + running ffmpeg -y -r 30 -i ./plots/botTemp/botTemp_%04d.png -b:v 32000k -r 30 ./movies/botTemp.mp4 + running ffmpeg -y -r 30 -i ./plots/bsf/bsf_%04d.png -b:v 32000k -r 30 ./movies/bsf.mp4 + running ffmpeg -y -r 30 -i ./plots/frictionVelocity/frictionVelocity_%04d.png -b:v 32000k -r 30 ./movies/frictionVelocity.mp4 + running ffmpeg -y -r 30 -i ./plots/halineDriving/halineDriving_%04d.png -b:v 32000k -r 30 ./movies/halineDriving.mp4 + running ffmpeg -y -r 30 -i ./plots/iceHeatFlux/iceHeatFlux_%04d.png -b:v 32000k -r 30 ./movies/iceHeatFlux.mp4 + running ffmpeg -y -r 30 -i ./plots/meltRate/meltRate_%04d.png -b:v 32000k -r 30 ./movies/meltRate.mp4 + running ffmpeg -y -r 30 -i ./plots/oceanHeatFlux/oceanHeatFlux_%04d.png -b:v 32000k -r 30 ./movies/oceanHeatFlux.mp4 + running ffmpeg -y -r 30 -i ./plots/osf/osf_%04d.png -b:v 32000k -r 30 ./movies/osf.mp4 + running ffmpeg -y -r 30 -i ./plots/sectionPotRho/sectionPotRho_%04d.png -b:v 32000k -r 30 ./movies/sectionPotRho.mp4 + running ffmpeg -y -r 30 -i ./plots/sectionSalinity/sectionSalinity_%04d.png -b:v 32000k -r 30 ./movies/sectionSalinity.mp4 + running ffmpeg -y -r 30 -i ./plots/sectionTemp/sectionTemp_%04d.png -b:v 32000k -r 30 ./movies/sectionTemp.mp4 + running ffmpeg -y -r 30 -i ./plots/thermalDriving/thermalDriving_%04d.png -b:v 32000k -r 30 ./movies/thermalDriving.mp4 + running ffmpeg -y -r 30 -i ./plots/topPotRho/topPotRho_%04d.png -b:v 32000k -r 30 ./movies/topPotRho.mp4 + running ffmpeg -y -r 30 -i ./plots/topSalinity/topSalinity_%04d.png -b:v 32000k -r 30 ./movies/topSalinity.mp4 + running ffmpeg -y -r 30 -i ./plots/topTemp/topTemp_%04d.png -b:v 32000k -r 30 ./movies/topTemp.mp4 + +The more interesting results should be a series of movies in ``movies`` and 4 time series plots in ``plots`` +(mean melt rate, total melt flux, mean thermal driving and mean friction velocity) and the same plots in +``timeSeriesBelow300m``\ , but this time averaged only over the deepest part of the ice shelf (where much of the action is). + +You'll likely need to scp or rsync them to your laptop to view them. Let me know if it's not clear what these are. + +8.2 Doing your own viz +^^^^^^^^^^^^^^^^^^^^^^ + +A starting point for doing your own viz is to make a local copy of ``__main__.py`` to edit: + +.. code-block:: bash + + cp viz/__main__.py myviz.py + vim myviz.py + +You could, for example, take out the slow streamfunction stuff if you don't need that (it was added because I required it +as standard output in MISOMIP). + +The script imports the following + +.. code-block:: py + + from viz.streamfunction import compute_barotropic_streamfunction, \ + compute_overturning_streamfunction + +These are functions for computing the stream functions and writing them to NetCDF files. + +.. code-block:: py + + from viz.plot import MoviePlotter, TimeSeriesPlotter + +These can be used to create "plotter" object that can then produce either time-series plots or a series of image for making movies. + +.. code-block:: py + + from viz.misomip import compute_misomip_interp_coeffs, interp_misomip + +These are used to write out MISOMIP standard output on a regular grid. + +You can look at ``streamfunction.py``\ , ``plot.py`` and ``misomip.py`` to learn a bit more about what these do. There's a bit +of commenting, particularly for the "public" functions that don't start with an underscore. + +Maybe simplify it down to eliminate the streamfunction and MISOMIP stuff, and don't worry about the plots averaged over +the deeper part of the ice draft (none of this is probably all that relevant to you): + +.. code-block:: py + + #!/usr/bin/env python + + import xarray + import argparse + + from viz.plot import MoviePlotter, TimeSeriesPlotter + + def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--folder", dest="folder", + help="Folder for plots", default='.') + parser.add_argument("-e", "--expt", dest="expt", + help="Experiment number (0, 1 or 2)", default=0) + args = parser.parse_args() + + folder = args.folder + expt = args.expt + + dsMesh = xarray.open_dataset('{}/init.nc'.format(folder)) + + ds = xarray.open_mfdataset('{}/timeSeriesStatsMonthly*.nc'.format(folder), + concat_dim='Time') + + tsPlotter = TimeSeriesPlotter(inFolder=folder, + outFolder='{}/plots'.format(folder), + expt=expt) + tsPlotter.plot_melt_time_series() + + mPlotter = MoviePlotter(inFolder=folder, + outFolder='{}/plots'.format(folder), + expt=expt) + + mPlotter.plot_melt_rates() + mPlotter.plot_ice_shelf_boundary_variables() + mPlotter.plot_temperature() + mPlotter.plot_salinity() + mPlotter.plot_potential_density() + + mPlotter.images_to_movies(outFolder='{}/movies'.format(folder), + framesPerSecond=30, extension='mp4') + + if __name__ == '__main__': + main() + +I've set things up to plot some of the more common fields by default. The following plot either time series or +movies of some common fields related to the ice-ocean interface -- melt rates, thermal driving, friction velocity, +etc. + +.. code-block:: py + + tsPlotter.plot_melt_time_series() + ... + mPlotter.plot_melt_rates() + mPlotter.plot_ice_shelf_boundary_variables() + +These functions plot 3D fields at the top of the ocean (either the ice draft or the sea surace), the sea floor +and in a transect through the middle of the domain: + +.. code-block:: py + + mPlotter.plot_temperature() + mPlotter.plot_salinity() + mPlotter.plot_potential_density() + +You could also add your own custom fields as long as they're available in the ``timeSeriesStatsMonthly*.nc`` files. + +Here are a couple of examples: + +.. code-block:: + + # plot a time series of SST + areaCell = tsPlotter.dsMesh.areaCell + temperature = tsPlotter.ds.timeMonthly_avg_activeTracers_temperature + sst = temperature.isel(nVertLevels=0) + meanSST = (sst*areaCell).sum(dim='nCells')/areaCell.sum(dim='nCells') + + tsPlotter.plot_time_series(meanSST, 'mean sea-surface temperature', + prefix='meanSST', units='deg C') + ... + # plot the x and y components of velocity at top, bottom and transect + da = mPlotter.ds.timeMonthly_avg_velocityX + mPlotter.plot_3d_field_top_bot_section( + da, nameInTitle='x velocity', prefix='Vx', units='m/s', + vmin=-0.2, vmax=0.2) + da = mPlotter.ds.timeMonthly_avg_velocityY + mPlotter.plot_3d_field_top_bot_section( + da, nameInTitle='y velocity', prefix='Vy', units='m/s', + vmin=-0.2, vmax=0.2) + +Make sure any new plots with the movie plotter happen before movies are made (\ ``mPlotter.images_to_movies()``\ ) +so they get included in the movies. + +The data sets (\ ``ds``\ ) and data arrays (\ ``da``\ ) come from ``xarray``\ , which is a really handy package for working with +NetCDF-style files in memory in python. It's a lot smarter about named dimensions than ``numpy`` and a lot more easy +to manipulate than default python ``NetCDF4`` data sets. But there's a bit of a learning curve involving a lot of Googling +the documentation and StackOverflow. + +Hopefully that's a start... + diff --git a/docs/compass/regression_suite.rst b/docs/compass/regression_suite.rst new file mode 100644 index 0000000000..78421e6012 --- /dev/null +++ b/docs/compass/regression_suite.rst @@ -0,0 +1,53 @@ +.. _compass_regression_suite: + +regression\_suite +================= + +A ``regression_suite`` file is used to define a regression suite, which +involves a set of tests that should be run. This file contains information +describing a set of tests that should be setup and run as part of a regression +test suite. + +Below, you will see text describing the various XML tags available in a +``regression_suite`` file. Each will describe the tag itself, any attributes the +tag can have, and what children can be placed below the tag. + +``<regression_suite>`` - This is the overarching parent tag in a ``regression_suite`` +file. It defines the suite that will be setup + + - Attributes: + * ``name``: This attribute defines the name of the regression suite. A + script will be generated named ``<name>.py`` that will run the entire + regression suite in the location the regression suite is setup (i.e. + ``work_dir``) + + - Children: + * ``<test>`` + +``<test>`` - This tag defines a test that will be included as part of the +regression suite. + + - Attributes: + * ``name``: This attribute defines the name of the test as part of this + regression suite. NOTE: This name is only used in this + regression suite, so multiple suites can name the same test in + different ways. + + * ``core``: This attribute defines the core that would be passed to + ``setup_testcases.py`` to setup this test. + + * configuration: This attribute defines the configuration that would be passed to + ``setup_testcases.py`` to setup this test. + + * resolution: This attribute defines the resolution that would be passed to + ``setup_testcases.py`` to setup this test. + + - Children: + * ``<script>`` + +``<script>`` - This tag defines a script that will be run to perform the +specified test. + + - Attributes: + * ``name``: This attribute defines the name of the script that will be run + to perform the specified test. Typically this is a driver script. diff --git a/docs/compass/run_config.rst b/docs/compass/run_config.rst new file mode 100644 index 0000000000..d244769124 --- /dev/null +++ b/docs/compass/run_config.rst @@ -0,0 +1,85 @@ +.. _compass_run_config: + +run\_config +=========== + +A ``run_config`` file is used to define an environment to run the model in. +This file will describe the steps that convert a line of ``<model_run>`` +from a run_script (defined in a config file) into an execution of the model. + +Below is a description of the XML tags available within a run_config file: + +``<run_config>`` - The parent tag of a run_config file. This is used to define a +run_config definition. + + - Children: + * ``<define_env_var>`` + + * ``<step>`` + +``<define_env_var>`` - This tag is used to specify the definition of an environment +variable. For example, OMP_NUM_THREADS. + + - Attributes: + * ``name``: This attribute defines the name of the environment variable + that will be set. + + * ``value``: This attribute defines the value that will be given to the + environment variable. This attribute can take the value + ``attr_{name}`` to use the value of the attribute in the + ``<model_run>`` tag that is generating the model run, rather than + having it's value hard coded. + +``<step>`` - This tag defines a step in a run script + - Attributes: + * ``executable``: The base executable for this step of the run script. + e.g. ``mpirun`` + + * ``executable_name``: The name of the executable that has been defined in + the configuration file to be used for this step of the run script. + + - Children: + * ``<argument>`` + +``<argument>`` - This tag defines arguments for the executable in a specific step of +a run script. + + - Attributes: + * flag: A flag that will come before the argument. e.g. ``-n`` + + - Text: + * The text between the ``<argument>`` and ``</argument>`` tags will be used as + the argument after the flag. In the example ``mpirun -np 4`` the flag + would be ``-np``, and the text would be ``4``. + Additionally, the text of this tag can take the following keyword values: + + - ``model``: Use the model executable in place of an actual argument + + - ``attr_{name}``: Use the attribute of the ``<model_run>`` tag that + generates a model run named ``{name}`` for the value. + +Examples +-------- + +As an example, suppose a case for the test core had the following ``<model_run>`` +tag:: + + <model_run procs="1" threads="1" namelist="namelist.test" streams="streams.test"/> + +And you wanted this to produce a set of lines that looked like:: + + export OMP_NUM_THREADS=1 + mpirun -n 1 ./test_model -n namelist.test -s streams.test + +A ``<run_config>`` file would look as follows for this:: + + <run_config> + <define_env_var name="OMP_NUM_THREADS" value="attr_threads"/> + <step executable="mpirun"> + <argument flag="-n">attr_procs</argument> + <argument flag="">model</argument> + <argument flag="-n">attr_namelist</argument> + <argument flag="-s">attr_streams</argument> + </step> + </run_config> + diff --git a/docs/compass/scripts.rst b/docs/compass/scripts.rst new file mode 100644 index 0000000000..c5f00516db --- /dev/null +++ b/docs/compass/scripts.rst @@ -0,0 +1,12 @@ +.. _compass_scripts: + +Scripts +======= + +.. toctree:: + :titlesonly: + + list_testcases + setup_testcase + clean_testcase + manage_regression_suite diff --git a/docs/compass/setup_testcase.rst b/docs/compass/setup_testcase.rst new file mode 100644 index 0000000000..0978f4091c --- /dev/null +++ b/docs/compass/setup_testcase.rst @@ -0,0 +1,57 @@ +.. _compass_setup_testcase: + +setup\_testcase.py +================== + +This script is used to setup individual test cases. Available test cases +can be see using the :ref:`compass_list_testcases` script. + +Specifically, this script parses XML files that define cases (steps in test +cases) and driver scripts, and generates directories and scripts to run each +step in the process of creating a test case. + +This script requires a setup configuration file. Configuration files are +specific to each core. Template configuration files for each core can be seen +in this directory named ``general.config.{core}`` (see :ref:`compass_config`). +Each core may have different requirements as far as what is required within a +configuration file. + +Command-line options:: + + $ ./setup_testcase.py -h + usage: setup_testcase.py [-h] [-o CORE] [-c CONFIG] [-r RES] [-t TEST] + [-n NUM] [-f FILE] [-m FILE] [-b PATH] [-q] + [--no_download] [--work_dir PATH] + + This script is used to setup individual test cases. Available test cases + can be see using the list_testcases.py script. + + Specifically, this script parses XML files that define cases (steps in test + cases) and driver scripts, and generates directories and scripts to run each + step in the process of creating a test case. + + This script requires a setup configuration file. Configuration files are + specific to each core. Template configuration files for each core can be seen + in this directory named 'general.config.{core}'. Each core may have different + requirements as far as what is required within a configuration file. + + optional arguments: + -h, --help show this help message and exit + -o CORE, --core CORE Core that contains configurations + -c CONFIG, --configuration CONFIG + Configuration to setup + -r RES, --resolution RES + Resolution of configuration to setup + -t TEST, --test TEST Test name within a resolution to setup + -n NUM, --case_number NUM + Case number to setup, as listed from list_testcases.py. Can be a comma delimited list of case numbers. + -f FILE, --config_file FILE + Configuration file for test case setup + -m FILE, --model_runtime FILE + Definition of how to build model run commands on this machine + -b PATH, --baseline_dir PATH + Location of baseslines that can be compared to + -q, --quiet If set, script will not write a command_history file + --no_download If set, script will not auto-download base_mesh files + --work_dir PATH If set, script will create case directories in work_dir rather than the current directory. + diff --git a/docs/compass/template.rst b/docs/compass/template.rst new file mode 100644 index 0000000000..8696e04b14 --- /dev/null +++ b/docs/compass/template.rst @@ -0,0 +1,137 @@ +.. _compass_template: + +template +======== + +This document describes the format of a template file, which is used to define +a set of namelist and streams configurations that will be used multiple times. + +A template file contains information describing how to configure a namelist +and/or a streams file in a standard way. A template file can be applied to +multiple cases, and allows config files for cases to be shorter since the +options are defined within the template instead of in the config file. + +Below, you will see text describing the various XML tags available in a template +file. Each will describe the tag itself, any attributes the tag can have, and +what children can be placed below the tag. + +``<template>`` - This is the overarching parent tag within a template file. It +begins a template definition. + + Children: + * ``<namelist>`` + + * ``<streams>`` + +``<namelist>`` - This tag defines a set of namelist modifications that will be +applied if this template is used within a config file's ``<namelist>`` tag. + + - Children: + * ``<option>`` + + * ``<template>`` (child template) + +``<option>`` - This tag defines an option that should be modified in the namelist file. + + - Attributes: + * ``name``: The name of the option that should be modified + + - Text: + * The text within ``<option>`` and ``</option>`` tags will be used to set the + value of the namelist option. + +``<streams>`` - This tag defines a set of streams modifications that will be +applied if this template is used within a config file's ``<streams>`` tag. + + - Children: + * ``<stream>`` + +``<stream>`` - This tag defines a stream that should be modified / created in the +streams file. + + - Children: + * ``<attribute>`` + + * ``<add_contents>`` + + * ``<remove_contents>`` + +``<attribute>`` - This tag defines an attribute that should be created / modified +in a stream definition. + + - Attributes: + * ``name``: The name of the stream attribute to modify / define + + - Text: + * The text in between the ``<attribute>`` and ``</attribute>`` tags will be + used to set the value of the attribute. + +``<add_contents>`` - This tag defines a list of members to add to a stream definition + + - Children: + * ``<member>`` + +``<remove_contents>`` - This tag defines a list of members to remove from a stream definition + + - Children: + * ``<member>`` + +``<member>`` - This tag defines a member that should be added or removed from a stream definition. + + - Attributes: + * ``name``: The name of the member that will be defined. If this is in an + ``<add_contents>`` tag, it will be added to the stream, if it is in a + ``<remove_contents>`` tag, it will be removed from the stream. + + * ``type``: The type of the member to add (This is ignored if it's within a + ``<remove_contents>`` tag). Example values are ``var``, ``var_array``, + ``var_struct``, and ``stream``. + +``<validation>`` - This tag defines a set of templated validations that can be applied from a template file. +The validation template tag can only be applied to driver script files. + + - Children: + * ``<compare_fields>`` + + * ``<compare_timers>`` + +``<compare_fields>`` - This tag contains a list of fields that should be compared +if the template is used within a ``<compare_fields>`` tag. + + - Children: + * ``<field>`` + + * ``<template>`` + +``<field>`` - This tag defines a specific field that should be compared if this +template is used within a ``<compare_fields>`` tag. See +:ref:`compass_driver_script` for configuration information. + +``<compare_timers>`` - This tag contains a list of timers that should be compared +if the template is used within a ``<compare_timers>`` tag. + + - Children: + * ``<timer>`` + + * ``<template>`` + +``<template>`` (child template) - This tag allows another template to be applied +from within a template file. WARNING: A template can include any other template +(including it self) so it is possible to create an infinite loop of template +application. + + - Attributes: + * ``file``: The file that contains the template that should be expanded + here. When used within a ``<namelist>`` tag, the namelist portion of + the template will be applied. When used within a ``<stream>`` tag, + the streams portion of the template will be applied. Additionally, + ``<template>`` tags can be used within ``<compare_fields>`` and + ``<compare_timers>`` tags to define template fields and timers to + compare. + + * ``path_base``: The base that the path attribute should be used relative + to. Can be a pre-defined paths (see :ref:`compass_config` for more + information) + + * ``path``: The path that the file lives in, relative to ``path_base``. + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000..5d13fca7dd --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# +# MPAS-Model documentation build configuration file, created by +# sphinx-quickstart on Sat Mar 25 14:39:11 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'sphinx.ext.napoleon'] + +autosummary_generate = True + +# Otherwise, the Return parameter list looks different from the Parameters list +napoleon_use_rtype = False +# Otherwise, the Attributes parameter list looks different from the Parameters +# list +napoleon_use_ivar = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = ['.rst'] +# source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'MPAS-Model' +copyright = u'Copyright (c) 2013-2020, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047;' \ + u'Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR).' +author = u'Xylar Asay-Davis, Doug Jacobsen, Michael Duda, Mark Petersen, ' \ + u'Matt Hoffman, Adridan Turner, Philip Wolfram' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +if 'DOCS_VERSION' in os.environ: + version = os.environ.get('DOCS_VERSION') + release = version +else: + # The short X.Y.Z version. + version = '7.0' + # The full version, including alpha/beta/rc tags. + release = '7.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', + 'design_docs/template.md'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# + +# on_rtd is whether we are on readthedocs.org, this line of code grabbed from +# docs.readthedocs.org +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'mpas_model_doc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'mpas_model.tex', u'MPAS-Model Documentation', + author, 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'mpas_model', u'MPAS-Model Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'mpas_model', u'MPAS-Model Documentation', + author, 'MPAS-Model', 'One line description of project.', + 'Miscellaneous'), +] + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/', None), + 'numpy': ('http://docs.scipy.org/doc/numpy/', None), + 'xarray': ('http://xarray.pydata.org/en/stable/', None)} + + +github_doc_root = 'https://github.com/rtfd/recommonmark/tree/master/doc/' diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000..94dcfca054 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,24 @@ +MPAS-Model +========== + +The Model for Prediction Across Scales (MPAS) is a collaborative project for +developing atmosphere, ocean, and other earth-system simulation components for +use in climate, regional climate, and weather studies. The primary development +partners are the climate modeling group at Los Alamos National Laboratory +(COSIM) and the National Center for Atmospheric Research. Both primary +partners are responsible for the MPAS framework, operators, and tools common to +the applications; LANL has primary responsibility for the ocean, sea-ice and +land-ice models, and NCAR has primary responsibility for the atmospheric model. + +The MPAS framework facilitates the rapid development and prototyping of models +by providing infrastructure typically required by model developers, including +high-level data types, communication routines, and I/O routines. By using MPAS, +developers can leverage pre-existing code and focus more on development of + +.. toctree:: + :titlesonly: + + compass/index + ocean/index + + diff --git a/docs/ocean/design_docs/index.rst b/docs/ocean/design_docs/index.rst new file mode 100644 index 0000000000..3f845b901f --- /dev/null +++ b/docs/ocean/design_docs/index.rst @@ -0,0 +1,9 @@ +Design Docs +=========== + +Design document describing new capabilities added to MPAS-Ocean. + +.. toctree:: + :titlesonly: + + time-varying-wind diff --git a/docs/ocean/index.rst b/docs/ocean/index.rst new file mode 100644 index 0000000000..d5f9bf70ea --- /dev/null +++ b/docs/ocean/index.rst @@ -0,0 +1,7 @@ +MPAS-Ocean +========== + +.. toctree:: + :titlesonly: + + design_docs/index diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 0000000000..41375a53d2 --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,97 @@ +# +# This is the interface between E3SM's new CMake-based build system and MPAS. +# +# The following CMake variables are expected to be defined: +# * CORES : A list of CORES to build, comma-separated (e.g. "ocean,seaice,landice") +# * Whatever CIME settings are setting to correctly resolve the ${CASEROOT}/Macros.cmake file +# - COMPILER, DEBUG, MPILIB, MACH, OS +# + +# Source CIME-generated Macros +include(${CASEROOT}/Macros.cmake) +# Load machine/compiler specific settings +set(COMPILER_SPECIFIC_DEPENDS ${CASEROOT}/Depends.${COMPILER}.cmake) +set(MACHINE_SPECIFIC_DEPENDS ${CASEROOT}/Depends.${MACH}.cmake) +set(PLATFORM_SPECIFIC_DEPENDS ${CASEROOT}/Depends.${MACH}.${COMPILER}.cmake) +set(TRY_TO_LOAD ${COMPILER_SPECIFIC_DEPENDS} ${MACHINE_SPECIFIC_DEPENDS} ${PLATFORM_SPECIFIC_DEPENDS}) +foreach(ITEM IN LISTS TRY_TO_LOAD) + if (EXISTS ${ITEM}) + include(${ITEM}) + endif() +endforeach() + +# +# General setup +# + +if (USE_ESMF_LIB) + set(ESMFDIR "esmf") +else() + set(ESMFDIR "noesmf") +endif() + +set(CMAKE_C_COMPILER ${MPICC}) +set(CMAKE_CXX_COMPILER ${MPICXX}) +set(CMAKE_Fortran_COMPILER ${MPIFC}) +set(CMAKE_EXE_LINKER_FLAGS "${LDFLAGS}") +set(CMAKE_VERBOSE_MAKEFILE TRUE) + +# Set up CPPDEFS +set(FILE_OFFSET "-DOFFSET64BIT") +if (CPPDEFS) + separate_arguments(CPPDEFS UNIX_COMMAND "${CPPDEFS}") +endif() +list(APPEND CPPDEFS "-DMPAS_NO_LOG_REDIRECT" "-DUSE_PIO2" "-DMPAS_NO_ESMF_INIT" "-DMPAS_ESM_SHR_CONST" "-DMPAS_PERF_MOD_TIMERS" "${MODEL_FORMULATION}" "${FILE_OFFSET}" "${ZOLTAN_DEFINE}" "-D_MPI" "-DMPAS_NAMELIST_SUFFIX=${NAMELIST_SUFFIX}" "-DMPAS_EXE_NAME=${EXE_NAME}") +if (DEBUG) + list(APPEND CPPDEFS "-DMPAS_DEBUG") +endif() +if (compile_threaded) + list(APPEND CPPDEFS "-DMPAS_OPENMP") +endif() + +set(INCLUDES "${INSTALL_SHAREDPATH}/include" "${INSTALL_SHAREDPATH}/${COMP_INTERFACE}/${ESMFDIR}/${NINST_VALUE}/csm_share" "${INSTALL_SHAREDPATH}/pio" "${PNETCDF_PATH}/include" "${CMAKE_CURRENT_SOURCE_DIR}/external/ezxml" "${CMAKE_BINARY_DIR}/framework" "${CMAKE_BINARY_DIR}/operators") +if (NETCDF_PATH) + list(APPEND INCLUDES ${NETCDF_PATH}/include) +else() + if (NETCDF_C_PATH) + list(APPEND INCLUDES ${NETCDF_C_PATH}/include) + endif() + if (NETCDF_FORTRAN_PATH) + list(APPEND INCLUDES ${NETCDF_FORTRAN_PATH}/include) + endif() +endif() + +if (USE_KOKKOS) + include(${INSTALL_SHAREDPATH}/kokkos_generated_settings.cmake) + string (REPLACE ";" " " KOKKOS_CXXFLAGS_STR "${KOKKOS_CXXFLAGS}") + set(CXXFLAGS "${CXXFLAGS} ${KOKKOS_CXXFLAGS_STR}") +endif() + +set(CMAKE_Fortran_FLAGS "${FFLAGS}") +set(CMAKE_C_FLAGS "${CFLAGS}") +set(CMAKE_CXX_FLAGS "${CXXFLAGS}") + +# Include custom cmake libraries used for mpas +include(${CMAKE_CURRENT_SOURCE_DIR}/cmake_utils.cmake) +include(${CMAKE_CURRENT_SOURCE_DIR}/build_core.cmake) + +# Add tools +add_subdirectory(tools) + +# Gather sources that are needed for all cores into "common" library + +set(COMMON_RAW_SOURCES external/ezxml/ezxml.c) +include(${CMAKE_CURRENT_SOURCE_DIR}/framework/framework.cmake) +include(${CMAKE_CURRENT_SOURCE_DIR}/operators/operators.cmake) + +add_library(common OBJECT) +target_compile_definitions(common PRIVATE ${CPPDEFS}) +target_include_directories(common PRIVATE ${INCLUDES}) + +genf90_targets("${COMMON_RAW_SOURCES}" "${INCLUDES}" "${CPPDEFS}" "" "") +target_sources(common PRIVATE ${SOURCES}) + +# Build cores! +foreach(CORE IN LISTS CORES) + build_core(${CORE}) +endforeach() diff --git a/src/Makefile.in.E3SM b/src/Makefile.in.E3SM index 1988564a83..dabf51adac 100644 --- a/src/Makefile.in.E3SM +++ b/src/Makefile.in.E3SM @@ -14,6 +14,10 @@ endif # End duplicated logic include $(CASEROOT)/Macros.make +# Load machine/compiler specific settings +-include $(CASEROOT)/Depends.$(COMPILER) +-include $(CASEROOT)/Depends.$(MACH) +-include $(CASEROOT)/Depends.$(MACH).$(COMPILER) ifneq ($(wildcard core_$(CORE)/build_options.mk), ) # Check for build_options.mk include core_$(CORE)/build_options.mk @@ -46,9 +50,9 @@ NETCDF=$(NETCDF_PATH) PNETCDF=$(PNETCDF_PATH) PIO=$(INSTALL_SHAREDPATH)/pio FILE_OFFSET = -DOFFSET64BIT -override CFLAGS += -DMPAS_NO_LOG_REDIRECT -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -DMPAS_PERF_MOD_TIMERS -override FFLAGS += -DMPAS_NO_LOG_REDIRECT -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -DMPAS_PERF_MOD_TIMERS -override CPPFLAGS += $(CPPDEFS) $(MODEL_FORMULATION) $(FILE_OFFSET) $(ZOLTAN_DEFINE) -DMPAS_NO_LOG_REDIRECT -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -D_MPI -DMPAS_NAMELIST_SUFFIX=$(NAMELIST_SUFFIX) -DMPAS_EXE_NAME=$(EXE_NAME) -DMPAS_PERF_MOD_TIMERS +override CFLAGS += -DMPAS_NO_LOG_REDIRECT -DUSE_PIO2 -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -DMPAS_PERF_MOD_TIMERS +override FFLAGS += -DMPAS_NO_LOG_REDIRECT -DUSE_PIO2 -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -DMPAS_PERF_MOD_TIMERS +override CPPFLAGS += $(CPPDEFS) $(MODEL_FORMULATION) $(FILE_OFFSET) $(ZOLTAN_DEFINE) -DMPAS_NO_LOG_REDIRECT -DUSE_PIO2 -DMPAS_NO_ESMF_INIT -DMPAS_ESM_SHR_CONST -D_MPI -DMPAS_NAMELIST_SUFFIX=$(NAMELIST_SUFFIX) -DMPAS_EXE_NAME=$(EXE_NAME) -DMPAS_PERF_MOD_TIMERS override CPPINCLUDES += -I$(EXEROOT)/$(COMPONENT)/source/inc -I$(INSTALL_SHAREDPATH)/include -I$(INSTALL_SHAREDPATH)/$(COMP_INTERFACE)/$(ESMFDIR)/$(NINST_VALUE)/csm_share -I$(NETCDF)/include -I$(PIO) -I$(PNETCDF)/include override FCINCLUDES += -I$(EXEROOT)/$(COMPONENT)/source/inc -I$(INSTALL_SHAREDPATH)/include -I$(INSTALL_SHAREDPATH)/$(COMP_INTERFACE)/$(ESMFDIR)/$(NINST_VALUE)/csm_share -I$(NETCDF)/include -I$(PIO) -I$(PNETCDF)/include LIBS += -L$(PIO) -L$(PNETCDF)/lib -L$(NETCDF)/lib -L$(LIBROOT) -L$(INSTALL_SHAREDPATH)/lib -lpio -lpnetcdf -lnetcdf diff --git a/src/build_core.cmake b/src/build_core.cmake new file mode 100644 index 0000000000..c2c36464cb --- /dev/null +++ b/src/build_core.cmake @@ -0,0 +1,67 @@ +function(build_core CORE) + set(EXE_NAME ${CORE}_model) + set(NAMELIST_SUFFIX ${CORE}) + + # Map the ESM component corresponding to each MPAS core + if (CORE STREQUAL "ocean") + set(COMPONENT "ocn") + elseif(CORE STREQUAL "landice") + set(COMPONENT "glc") + elseif(CORE STREQUAL "seaice") + set(COMPONENT "ice") + else() + message(FATAL_ERROR "Unrecognized core: ${CORE}") + endif() + + # Gather sources + set(CORE_BLDDIR ${CMAKE_BINARY_DIR}/core_${CORE}) + if (NOT EXISTS ${CORE_BLDDIR}) + file(MAKE_DIRECTORY ${CORE_BLDDIR}) + endif() + + set(CORE_INPUT_DIR ${CORE_BLDDIR}/default_inputs) + if (NOT EXISTS ${CORE_INPUT_DIR}) + file(MAKE_DIRECTORY ${CORE_INPUT_DIR}) + endif() + + # Provides us RAW_SOURCES, CPPDEFS, and INCLUDES + include(${CMAKE_CURRENT_SOURCE_DIR}/core_${CORE}/${CORE}.cmake) + + add_library(${COMPONENT}) + target_compile_definitions(${COMPONENT} PRIVATE ${CPPDEFS}) + target_include_directories(${COMPONENT} PRIVATE ${INCLUDES}) + + # Make .inc files + add_custom_command ( + OUTPUT ${CORE_BLDDIR}/Registry_processed.xml + COMMAND cpp -P -traditional ${CPPDEFS} -Uvector + ${CMAKE_CURRENT_SOURCE_DIR}/core_${CORE}/Registry.xml > Registry_processed.xml + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/core_${CORE}/Registry.xml + WORKING_DIRECTORY ${CORE_BLDDIR} + ) + + set(INC_DIR ${CORE_BLDDIR}/inc) + if (NOT EXISTS ${INC_DIR}) + file(MAKE_DIRECTORY ${INC_DIR}) + endif() + + add_custom_command( + OUTPUT ${INC_DIR}/core_variables.inc + COMMAND ${CMAKE_BINARY_DIR}/mpas-source/src/tools/parse < ${CORE_BLDDIR}/Registry_processed.xml + DEPENDS parse ${CORE_BLDDIR}/Registry_processed.xml + WORKING_DIRECTORY ${INC_DIR} + ) + + # Disable qsmp for some files + if (FFLAGS MATCHES ".*-qsmp.*") + foreach(DISABLE_QSMP_FILE IN LISTS DISABLE_QSMP) + get_filename_component(SOURCE_EXT ${DISABLE_QSMP_FILE} EXT) + string(REPLACE "${SOURCE_EXT}" ".f90" SOURCE_F90 ${DISABLE_QSMP_FILE}) + set_property(SOURCE ${CMAKE_BINARY_DIR}/${SOURCE_F90} APPEND_STRING PROPERTY COMPILE_FLAGS " -qnosmp") + endforeach() + endif() + + genf90_targets("${RAW_SOURCES}" "${INCLUDES}" "${CPPDEFS}" "${NO_PREPROCESS}" "${INC_DIR}") + target_sources(${COMPONENT} PRIVATE ${SOURCES} $<TARGET_OBJECTS:common>) + +endfunction(build_core) diff --git a/src/cmake_utils.cmake b/src/cmake_utils.cmake new file mode 100644 index 0000000000..c3a25f238d --- /dev/null +++ b/src/cmake_utils.cmake @@ -0,0 +1,74 @@ +# Function for handling nl and st gen +function(handle_st_nl_gen NL_GEN_ARGS ST_GEN_ARGS CORE_INPUT_DIR_ARG CORE_BLDDIR_ARG) + foreach(NL_GEN_ARG IN LISTS NL_GEN_ARGS) + separate_arguments(ITEMS UNIX_COMMAND "${NL_GEN_ARG}") + list(GET ITEMS 0 ITEM) + list(APPEND INPUTS ${ITEM}) + add_custom_command( + OUTPUT ${CORE_INPUT_DIR_ARG}/${ITEM} + COMMAND ${CMAKE_BINARY_DIR}/tools/namelist_gen ${CORE_BLDDIR_ARG}/Registry_processed.xml ${NL_GEN_ARG} + DEPENDS namelist_gen ${CORE_BLDDIR_ARG}/Registry_processed.xml + WORKING_DIRECTORY ${CORE_INPUT_DIR_ARG} + ) + endforeach() + + foreach(ST_GEN_ARG IN LISTS ST_GEN_ARGS) + separate_arguments(ITEMS UNIX_COMMAND "${ST_GEN_ARG}") + list(GET ITEMS 0 ITEM) + list(APPEND INPUTS ${ITEM}) + add_custom_command( + OUTPUT ${CORE_INPUT_DIR_ARG}/${ITEM} + COMMAND ${CMAKE_BINARY_DIR}/tools/streams_gen ${CORE_BLDDIR_ARG}/Registry_processed.xml ${ST_GEN_ARG} + DEPENDS streams_gen ${CORE_BLDDIR_ARG}/Registry_processed.xml + WORKING_DIRECTORY ${CORE_INPUT_DIR_ARG} + ) + endforeach() + + foreach(INPUT IN LISTS INPUTS) + add_custom_command( + OUTPUT ${CORE_BLDDIR_ARG}/${INPUT} + COMMAND ${CMAKE_COMMAND} -E copy ${CORE_INPUT_DIR_ARG}/${INPUT} ${CORE_BLDDIR_ARG}/${INPUT} + DEPENDS ${CORE_INPUT_DIR_ARG}/${INPUT} + WORKING_DIRECTORY ${CORE_BLDDIR_ARG} + ) + endforeach() +endfunction() + +# Function for generating f90 file targets, will add to parent's SOURCES var +function(genf90_targets RAW_SOURCES_ARG INCLUDES_ARG CPPDEFS_ARG NO_PREPROCESS_ARG CORE_INC_DIR_ARG) + # Add -I to includes so that they can used for cpp command + foreach(ITEM IN LISTS INCLUDES_ARG) + list(APPEND INCLUDES_I "-I${ITEM}") + endforeach() + + # Run all .F files through cpp to generate the f90 file + foreach(RAW_SOURCE_FILE IN LISTS RAW_SOURCES_ARG) + get_filename_component(SOURCE_EXT ${RAW_SOURCE_FILE} EXT) + if ( (SOURCE_EXT STREQUAL ".F" OR SOURCE_EXT STREQUAL ".F90") AND NOT RAW_SOURCE_FILE IN_LIST NO_PREPROCESS_ARG) + string(REPLACE "${SOURCE_EXT}" ".f90" SOURCE_F90 ${RAW_SOURCE_FILE}) + get_filename_component(DIR_RELATIVE ${SOURCE_F90} DIRECTORY) + set(DIR_ABSOLUTE ${CMAKE_BINARY_DIR}/${DIR_RELATIVE}) + if (NOT EXISTS ${DIR_ABSOLUTE}) + file(MAKE_DIRECTORY ${DIR_ABSOLUTE}) + endif() + if (CORE_INC_DIR_ARG) + add_custom_command ( + OUTPUT ${CMAKE_BINARY_DIR}/${SOURCE_F90} + COMMAND cpp -P -traditional ${CPPDEFS_ARG} ${INCLUDES_I} -Uvector + ${CMAKE_CURRENT_SOURCE_DIR}/${RAW_SOURCE_FILE} > ${CMAKE_BINARY_DIR}/${SOURCE_F90} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${RAW_SOURCE_FILE} ${CORE_INC_DIR_ARG}/core_variables.inc) + else() + add_custom_command ( + OUTPUT ${CMAKE_BINARY_DIR}/${SOURCE_F90} + COMMAND cpp -P -traditional ${CPPDEFS_ARG} ${INCLUDES_I} -Uvector + ${CMAKE_CURRENT_SOURCE_DIR}/${RAW_SOURCE_FILE} > ${CMAKE_BINARY_DIR}/${SOURCE_F90}) + endif() + list(APPEND LOCAL_SOURCES ${CMAKE_BINARY_DIR}/${SOURCE_F90}) + else() + list(APPEND LOCAL_SOURCES ${RAW_SOURCE_FILE}) + endif() + endforeach() + + set(SOURCES ${LOCAL_SOURCES} PARENT_SCOPE) + +endfunction(genf90_targets) diff --git a/src/core_atmosphere/diagnostics/soundings.F b/src/core_atmosphere/diagnostics/soundings.F index 166921eb59..a287f0f4b8 100644 --- a/src/core_atmosphere/diagnostics/soundings.F +++ b/src/core_atmosphere/diagnostics/soundings.F @@ -54,8 +54,8 @@ subroutine soundings_setup(configs, all_pools, simulation_clock, dminfo) use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array, mpas_pool_get_config, & mpas_pool_get_error_level, mpas_pool_set_error_level use mpas_io_units, only : mpas_new_unit, mpas_release_unit - use mpas_timekeeping, only : MPAS_timeInterval_type, MPAS_time_type, MPAS_set_timeInterval, & - MPAS_get_clock_time, MPAS_add_clock_alarm, MPAS_NOW + use mpas_derived_types, only : MPAS_Time_type, MPAS_TimeInterval_type, MPAS_NOW + use mpas_timekeeping, only : MPAS_set_timeInterval, MPAS_get_clock_time, MPAS_add_clock_alarm use mpas_dmpar, only : IO_NODE, mpas_dmpar_bcast_int, mpas_dmpar_bcast_logical, mpas_dmpar_bcast_char implicit none @@ -229,8 +229,8 @@ subroutine soundings_compute() use mpas_derived_types, only : MPAS_pool_type use mpas_pool_routines, only : MPAS_pool_get_dimension, MPAS_pool_get_array - use mpas_timekeeping, only : MPAS_time_type, MPAS_is_alarm_ringing, MPAS_reset_clock_alarm, MPAS_get_clock_time, & - MPAS_get_time, MPAS_NOW + use mpas_derived_types, only : MPAS_Time_type, MPAS_NOW + use mpas_timekeeping, only : MPAS_is_alarm_ringing, MPAS_reset_clock_alarm, MPAS_get_clock_time, MPAS_get_time use mpas_constants, only : rvord implicit none diff --git a/src/core_atmosphere/dynamics/mpas_atm_time_integration.F b/src/core_atmosphere/dynamics/mpas_atm_time_integration.F index e286ee0f8f..0c6a8d7594 100644 --- a/src/core_atmosphere/dynamics/mpas_atm_time_integration.F +++ b/src/core_atmosphere/dynamics/mpas_atm_time_integration.F @@ -15,8 +15,8 @@ module atm_time_integration use mpas_dmpar use mpas_vector_reconstruction ! Added only clause to keep xlf90 from getting confused from the overloaded abs intrinsic in mpas_timekeeping - use mpas_timekeeping, only: MPAS_Time_type, MPAS_TimeInterval_type, & - mpas_set_time, mpas_set_timeInterval, mpas_get_time, operator(+), add_t_ti + use mpas_derived_types, only : MPAS_Time_type, MPAS_TimeInterval_type, MPAS_NOW + use mpas_timekeeping, only: mpas_set_time, mpas_set_timeInterval, mpas_get_time, operator(+) use mpas_timer #ifdef DO_PHYSICS diff --git a/src/core_atmosphere/mpas_atm_core.F b/src/core_atmosphere/mpas_atm_core.F index 84754e94f4..b5a4639da9 100644 --- a/src/core_atmosphere/mpas_atm_core.F +++ b/src/core_atmosphere/mpas_atm_core.F @@ -36,7 +36,6 @@ function atm_core_init(domain, startTimeStamp) result(ierr) real (kind=RKIND), pointer :: dt type (block_type), pointer :: block - integer :: i logical, pointer :: config_do_restart type (mpas_pool_type), pointer :: state @@ -104,6 +103,7 @@ function atm_core_init(domain, startTimeStamp) result(ierr) call MPAS_stream_mgr_reset_alarms(domain % streamManager, streamID='static', direction=MPAS_STREAM_INPUT, ierr=ierr) call MPAS_stream_mgr_reset_alarms(domain % streamManager, streamID='input', direction=MPAS_STREAM_INPUT, ierr=ierr) call MPAS_stream_mgr_reset_alarms(domain % streamManager, streamID='restart', direction=MPAS_STREAM_INPUT, ierr=ierr) + ! ! Read all other inputs ! For now we don't do this here to match results with previous code; to match requires @@ -203,8 +203,8 @@ subroutine atm_simulation_clock_init(core_clock, configs, ierr) type (mpas_pool_type), intent(inout) :: configs integer, intent(out) :: ierr - type (MPAS_Time_Type) :: startTime, stopTime, alarmStartTime - type (MPAS_TimeInterval_type) :: runDuration, timeStep, alarmTimeStep + type (MPAS_Time_Type) :: startTime, stopTime + type (MPAS_TimeInterval_type) :: runDuration, timeStep integer :: local_err real (kind=RKIND), pointer :: config_dt character (len=StrKIND), pointer :: config_start_time @@ -506,9 +506,6 @@ function atm_core_run(domain) result(ierr) type (mpas_pool_type), pointer :: state, diag, mesh, diag_physics, tend, tend_physics - ! For high-frequency diagnostics output - character (len=StrKIND) :: tempfilename - ! For timing information real (kind=R8KIND) :: integ_start_time, integ_stop_time real (kind=R8KIND) :: diag_start_time, diag_stop_time @@ -724,9 +721,8 @@ function atm_core_run(domain) result(ierr) block_ptr => domain % blocklist do while (associated(block_ptr)) - call mpas_pool_get_subpool(block_ptr % structs, 'diag', diag) call mpas_pool_get_subpool(block_ptr % structs, 'diag_physics', diag_physics) - call atm_reset_diagnostics(diag, diag_physics) + call atm_reset_diagnostics(diag_physics) block_ptr => block_ptr % next end do @@ -804,19 +800,17 @@ subroutine atm_compute_output_diagnostics(state, time_lev, diag, mesh) end subroutine atm_compute_output_diagnostics - subroutine atm_reset_diagnostics(diag, diag_physics) + subroutine atm_reset_diagnostics(diag_physics) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ! reset some diagnostics after output ! - ! Input: diag - contains dynamics diagnostic fields - ! daig_physics - contains physics diagnostic fields + ! Input: diag_physics - contains physics diagnostic fields ! ! Output: whatever diagnostics need resetting after output !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! implicit none - type (mpas_pool_type), intent(inout) :: diag type (mpas_pool_type), intent(inout) :: diag_physics real (kind=RKIND), dimension(:), pointer :: refl10cm_1km_max diff --git a/src/core_atmosphere/physics/mpas_atmphys_camrad_init.F b/src/core_atmosphere/physics/mpas_atmphys_camrad_init.F index 2c661302f0..48cb5a4413 100644 --- a/src/core_atmosphere/physics/mpas_atmphys_camrad_init.F +++ b/src/core_atmosphere/physics/mpas_atmphys_camrad_init.F @@ -143,7 +143,7 @@ subroutine camradinit(dminfo,mesh,atm_input,diag,diag_physics,state,time_lev) ! call mpas_log_write(' end subroutine esinti') !initialization of ozone mixing ratios: - call oznini(dminfo,mesh,atm_input) + call oznini(mesh,atm_input) ! call mpas_log_write(' end subroutine oznini') !initialization of aerosol concentrations: @@ -736,14 +736,13 @@ subroutine aer_optics_initialize(dminfo) end subroutine aer_optics_initialize !================================================================================================================= - subroutine oznini(dminfo,mesh,atm_input) + subroutine oznini(mesh,atm_input) !================================================================================================================= !This subroutine assumes a uniform distribution of ozone concentration. It should be replaced !with monthly climatology varying ozone distribution. !input arguments: - type(dm_info):: dminfo type(mpas_pool_type),intent(in):: mesh !inout arguments: @@ -779,15 +778,10 @@ subroutine oznini(dminfo,mesh,atm_input) call mpas_pool_get_array(atm_input,'ozmixm',ozmixm) !-- read in ozone pressure data: - if(dminfo % my_proc_id == IO_NODE) then - call mpas_new_unit(read_unit) - if(read_unit < 0) & - call physics_error_fatal('module_ra_cam: oznini: Cannot find unused '//& - 'fortran unit to read in lookup table.') - end if - -!distribute unit to other processors: - call mpas_dmpar_bcast_int(dminfo,read_unit) + call mpas_new_unit(read_unit) + if(read_unit < 0) & + call physics_error_fatal('module_ra_cam: oznini: Cannot find unused '//& + 'fortran unit to read in lookup table.') open(read_unit,file='OZONE_PLEV.TBL',action='READ',status='OLD',iostat=istat) if(istat /= open_ok) & @@ -829,9 +823,7 @@ subroutine oznini(dminfo,mesh,atm_input) enddo close(read_unit) - if(dminfo % my_proc_id == IO_NODE) then - call mpas_release_unit(read_unit) - end if + call mpas_release_unit(read_unit) !INTERPOLATION OF INPUT OZONE DATA TO MPAS GRID: !call mpas_log_write('max latCell= $r', realArgs=(/maxval(latCell)/degrad/)) diff --git a/src/core_atmosphere/physics/mpas_atmphys_manager.F b/src/core_atmosphere/physics/mpas_atmphys_manager.F index 2d8301afb6..fe8ee5c27c 100644 --- a/src/core_atmosphere/physics/mpas_atmphys_manager.F +++ b/src/core_atmosphere/physics/mpas_atmphys_manager.F @@ -20,7 +20,7 @@ module mpas_atmphys_manager implicit none private - public:: physics_timetracker,physics_run_init,physics_run_finalize + public:: physics_timetracker,physics_run_init integer, public:: year !Current year. integer, public:: julday !Initial Julian day. @@ -756,141 +756,6 @@ subroutine physics_run_init(configs,mesh,state,clock,stream_manager) end subroutine physics_run_init -!================================================================================================================= - subroutine physics_run_finalize(configs,clock,stream_manager) -!================================================================================================================= - -!input arguments: - type(mpas_pool_type),intent(in):: configs - type(MPAS_Clock_type):: clock - type (MPAS_streamManager_type), intent(inout) :: stream_manager - -!local pointers: - character(len=StrKIND),pointer:: config_radt_lw_scheme, & - config_radt_sw_scheme - - character(len=StrKIND),pointer:: config_conv_interval, & - config_pbl_interval, & - config_radtlw_interval, & - config_radtsw_interval, & - config_bucket_update - - logical,pointer:: config_sst_update - logical,pointer:: config_microp_re - - character(len=StrKIND) :: stream_interval - integer:: ierr - -!----------------------------------------------------------------------------------------------------------------- -!call mpas_log_write('') -!call mpas_log_write('--- enter subroutine physics_run_finalize:') - - call mpas_pool_get_config(configs,'config_radt_lw_scheme' ,config_radt_lw_scheme ) - call mpas_pool_get_config(configs,'config_radt_sw_scheme' ,config_radt_sw_scheme ) - - call mpas_pool_get_config(configs,'config_conv_interval' ,config_conv_interval ) - call mpas_pool_get_config(configs,'config_pbl_interval' ,config_pbl_interval ) - call mpas_pool_get_config(configs,'config_radtlw_interval' ,config_radtlw_interval ) - call mpas_pool_get_config(configs,'config_radtsw_interval' ,config_radtsw_interval ) - call mpas_pool_get_config(configs,'config_bucket_update' ,config_bucket_update ) - call mpas_pool_get_config(configs,'config_sst_update' ,config_sst_update ) - call mpas_pool_get_config(configs,'config_microp_re' ,config_microp_re ) - -!remove alarms for calling the longwave and shortwave radiation schemes, the convection schemes, -!and the PBL schemes: - - if(trim(config_radtlw_interval) /= "none") then - call mpas_remove_clock_alarm(clock,radtlwAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_run_finalize: error removing radtlwAlarmID') - endif - - if(trim(config_radtsw_interval) /= "none") then - call mpas_remove_clock_alarm(clock,radtswAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_run_finalize: error removing alarm radtsw') - endif - - if(trim(config_conv_interval) /= "none") then - call mpas_remove_clock_alarm(clock,convAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing convAlarmID') - endif - - if(trim(config_pbl_interval) /= "none") then - call mpas_remove_clock_alarm(clock,pblAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing pblAlarmID') - endif - -!remove alarm for updating the background surface albedo and the greeness fraction: - call mpas_remove_clock_alarm(clock,greenAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm greeness') - -!remove alarm for updating the surface boundary conditions: - if (config_sst_update) then - call mpas_remove_clock_alarm(clock,sfcbdyAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm sfcbdy') - endif - -!remove alarm to update the ozone path length, the trace gas path length, the total emissivity, -!and the total absorptivity in the "CAM" long-wave radiation codes. - if(trim(config_radt_lw_scheme) .eq. "cam_lw" .or. & - trim(config_radt_sw_scheme) .eq. "cam_sw" ) then - call mpas_remove_clock_alarm(clock,camAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm CAM') - endif - -!remove alarm to write the "CAM" local arrays absnst_p, absnxt_p, and emstot_p to the MPAS arrays -!for writing to the restart file at the bottom of the time-step: - if(trim(config_radt_lw_scheme) .eq. "cam_lw" ) then - call MPAS_stream_mgr_get_property(stream_manager, 'restart', MPAS_STREAM_PROPERTY_RECORD_INTV, stream_interval, & - direction=MPAS_STREAM_OUTPUT, ierr=ierr) - if(trim(stream_interval) /= 'none') then - call mpas_remove_clock_alarm(clock,camlwAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm CAMLW') - endif - endif - -!remove alarm to check if the accumulated rain due to cloud microphysics and convection is -!greater than its maximum allowed value: - if(config_bucket_update /= "none") then - call mpas_remove_clock_alarm(clock,acrainAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm rain limit') - endif - -!remove alarm to check if the accumulated radiation diagnostics due to long- and short-wave radiation -!is greater than its maximum allowed value: - if(config_bucket_update /= "none") then - call mpas_remove_clock_alarm(clock,acradtAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm radiation limit') - endif - -!remove alarm to calculate physics diagnostics on IO outpt only: - call MPAS_stream_mgr_get_property(stream_manager, 'output', MPAS_STREAM_PROPERTY_RECORD_INTV, stream_interval, & - direction=MPAS_STREAM_OUTPUT, ierr=ierr) - if(trim(stream_interval) /= 'none') then - call mpas_remove_clock_alarm(clock,diagAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm diag') - else - call MPAS_stream_mgr_get_property(stream_manager, 'diagnostics', MPAS_STREAM_PROPERTY_RECORD_INTV, stream_interval, & - direction=MPAS_STREAM_OUTPUT, ierr=ierr) - if(trim(stream_interval) /= 'none') then - call mpas_remove_clock_alarm(clock,diagAlarmID,ierr=ierr) - if(ierr /= 0) & - call physics_error_fatal('subroutine physics_init: error removing alarm diag') - end if - endif - - end subroutine physics_run_finalize - !================================================================================================================= end module mpas_atmphys_manager !================================================================================================================= diff --git a/src/core_init_atmosphere/CMakeLists.txt b/src/core_init_atmosphere/CMakeLists.txt index 2464163f3d..815d7db291 100644 --- a/src/core_init_atmosphere/CMakeLists.txt +++ b/src/core_init_atmosphere/CMakeLists.txt @@ -22,6 +22,7 @@ set(init_atm_core_srcs mpas_atmphys_functions.F mpas_atmphys_initialize_real.F mpas_atmphys_utilities.F + mpas_geotile_manager.F mpas_init_atm_bitarray.F mpas_init_atm_cases.F mpas_init_atm_core.F @@ -34,6 +35,9 @@ set(init_atm_core_srcs mpas_init_atm_static.F mpas_init_atm_surface.F mpas_init_atm_vinterp.F + mpas_kd_tree.F + mpas_parse_geoindex.F + mpas_stack.F read_geogrid.c) add_library(core_init_atmosphere ${init_atm_core_srcs}) diff --git a/src/core_init_atmosphere/Makefile b/src/core_init_atmosphere/Makefile index e8f71becfc..9494a5b7c2 100644 --- a/src/core_init_atmosphere/Makefile +++ b/src/core_init_atmosphere/Makefile @@ -19,7 +19,11 @@ OBJS = \ mpas_atmphys_date_time.o \ mpas_atmphys_functions.o \ mpas_atmphys_initialize_real.o \ - mpas_atmphys_utilities.o + mpas_atmphys_utilities.o \ + mpas_stack.o \ + mpas_kd_tree.o \ + mpas_parse_geoindex.o \ + mpas_geotile_manager.o all: core_hyd @@ -66,17 +70,29 @@ mpas_init_atm_read_met.o: read_geogrid.o: +mpas_kd_tree.o: + mpas_init_atm_llxy.o: mpas_init_atm_core_interface.o: mpas_init_atm_core.o mpas_init_atm_core.o: mpas_advection.o mpas_init_atm_cases.o +mpas_stack.o: + +mpas_parse_geoindex.o: + +mpas_geotile_manager.o: mpas_parse_geoindex.o + mpas_init_atm_static.o: \ mpas_atm_advection.o \ mpas_init_atm_hinterp.o \ mpas_init_atm_llxy.o \ - mpas_atmphys_utilities.o + mpas_atmphys_utilities.o \ + mpas_stack.o \ + mpas_kd_tree.o \ + mpas_parse_geoindex.o \ + mpas_geotile_manager.o mpas_init_atm_surface.o: \ mpas_init_atm_hinterp.o \ diff --git a/src/core_init_atmosphere/mpas_geotile_manager.F b/src/core_init_atmosphere/mpas_geotile_manager.F new file mode 100644 index 0000000000..0aa3298153 --- /dev/null +++ b/src/core_init_atmosphere/mpas_geotile_manager.F @@ -0,0 +1,1109 @@ +module mpas_geotile_manager + + use iso_c_binding, only : c_float, c_char + + use mpas_constants, only : pii + use mpas_kind_types, only : RKIND, StrKIND + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_ERR, MPAS_POOL_SILENT + use mpas_pool_routines, only : mpas_pool_type, mpas_pool_destroy_pool, mpas_pool_create_pool + use mpas_pool_routines, only : mpas_pool_add_config, mpas_pool_get_config + use mpas_pool_routines, only : mpas_pool_get_error_level, mpas_pool_set_error_level + use mpas_stack + + implicit none + + public :: mpas_geotile_mgr_type + public :: mpas_geotile_type + public :: mpas_latlon_to_xyz + + private + + type mpas_geotile_mgr_type + type (mpas_pool_type), pointer :: pool + type (tile_hash), dimension(:,:), pointer :: hash + type (mpas_stack_type), pointer :: stack + + character (len=StrKIND) :: directory ! Path to the dataset directory + character (len=StrKIND) :: index ! Path the index file of the dataset directory + + integer :: nTileX ! Number of tiles in the X direction + integer :: nTileY ! Number of tiles in the Y direction + integer :: pixel_nx ! Total number of pixels in the x direction + integer :: pixel_ny ! Total number of pixels in the y direction + contains + ! Public Procedures + procedure, public :: init => mpas_geotile_mgr_init + procedure, public :: finalize => mpas_geotile_mgr_finalize + procedure, public :: get_tile => mpas_geotile_mgr_get_tile + procedure, public :: latlon_to_pixel => mpas_geotile_mgr_latlon_to_pixel + procedure, public :: tile_to_latlon => mpas_geotile_mgr_tile_to_latlon + procedure, public :: push_neighbors => mpas_geotile_mgr_push_neighbors + + ! Stack Procedures + procedure, public :: push_tile => mpas_geotile_mgr_push_tile + procedure, public :: pop_tile => mpas_geotile_mgr_pop_tile + procedure, public :: is_stack_empty => mpas_geotile_mgr_stack_is_empty + + ! Private Procedures + procedure, private :: search_tile => mpas_geotile_mgr_search_tile + procedure, private :: add_tile => mpas_geotile_mgr_add_tile + procedure, private :: gen_filename => mpas_geotile_mgr_gen_tile_name + procedure, private :: hash_to_ll => mpas_geotile_mgr_hash_to_latlon + end type mpas_geotile_mgr_type + + + type, extends(mpas_stack_payload_type) :: mpas_geotile_type + real (c_float), dimension(:,:,:), pointer :: tile + + character (len=StrKIND) :: fname ! Path to the file that contains the data for this tile + integer :: hash_x ! The x offset of this tile in the hash table + integer :: hash_y ! The y offset of this tile in the hash table + + integer :: x, y ! The tiles range, in pixels + logical :: is_processed = .false. + end type mpas_geotile_type + + + type tile_hash + type(mpas_geotile_type), pointer :: ptr => null() + end type tile_hash + + + contains + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_init => init + ! + !> \brief Initialize a mpas_geotile_mgr class + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Initialize a geotile manager class by parsing the index file located + !> within path and allocated needed data structures for static interpolation. + !> Init should be called before calling any other mpas_geotile_mgr_type + !> procedures. If path is not a directory or no index file is found in path, + !> 1 will be returned. Upon success 0 will be returned. + !> + !> This function will also allocate the following variables in the pool attribute + !> of this geotile manager instance if they are not found within the index file: + !> * tile_bdr = 0 + !> * signed = 0 ! No + !> * scalefactor = 1.0_RKIND + !> * endian = "big" + !> * iswater = 16 + !> * islake = -1 + !> * isice = 24 + !> * isurban = 1 + !> * isoilwater = 14 + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_init(mgr, path) result(ierr) + + use mpas_parse_geoindex, only : mpas_parse_index + + implicit none + + ! Input variables + class (mpas_geotile_mgr_type) :: mgr + character (len=*), intent(in) :: path + + ! Local variables + character (len=StrKIND), pointer :: fieldType + character (len=StrKIND), pointer :: endian + integer, pointer :: tile_nx ! Number of pixels in the x-direction for a single tile + integer, pointer :: tile_ny ! Number of pixels in the y-direction for a single tile + integer, pointer :: signed + integer, pointer :: tile_bdr + integer, pointer :: iswater, islake, isice, isurban, isoilwater + integer, pointer :: category_min, category_max + integer :: err_level + real (kind=RKIND), pointer :: dx ! Grid spacing in the x-direction + real (kind=RKIND), pointer :: dy ! Grid spacing in the y-direction + real (kind=RKIND), pointer :: scalefactor + logical :: res + + ! Return variable + integer :: ierr + + ierr = 0 + + mgr % directory = path + + ! Check to see if the index file exists in the directory + inquire(file=trim(mgr % directory)//"index", exist=res) + if (.not. res) then + call mpas_log_write("Could not find an 'index' file in geotile directory: "//trim(path), messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + mgr % index = trim(mgr % directory)//"index" + + ! Create the pool for this geotile and call mpas_parse_index + call mpas_pool_create_pool(mgr % pool) + ierr = mpas_parse_index(mgr % index, mgr % pool) + if (ierr /= 0) then + call mpas_log_write("Error parsing geotile index file: "//trim(mgr % index), messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + + signed => null() + endian => null() + scalefactor => null() + tile_bdr => null() + + err_level = mpas_pool_get_error_level() + call mpas_pool_set_error_level(MPAS_POOL_SILENT) + + call mpas_pool_get_config(mgr % pool, 'signed', signed) + call mpas_pool_get_config(mgr % pool, 'endian', endian) + call mpas_pool_get_config(mgr % pool, 'scale_factor', scalefactor) + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + + ! + ! tile_bdr, signed, endian, and scale_factor all have default values, so if they + ! are not present in the index file then set them as the default values, as + ! reported in section 3-53 of the WRF-ARW User's Guide + ! + if (.not. associated(endian)) then + allocate(endian) + endian = "big" + call mpas_pool_add_config(mgr % pool, 'endian', endian) + endif + + if (.not. associated(scalefactor)) then + allocate(scalefactor) + scalefactor = 1.0_RKIND + call mpas_pool_add_config(mgr % pool, 'scale_factor', scalefactor) + endif + + if (.not. associated(signed)) then + allocate(signed) + signed = 0 ! 0 = 'no', 1 = 'yes' + call mpas_pool_add_config(mgr % pool, 'signed', signed) + endif + + if (.not. associated(tile_bdr)) then + allocate(tile_bdr) + tile_bdr = 0 + call mpas_pool_add_config(mgr % pool, 'tile_bdr', tile_bdr) + endif + + ! + ! If this is a categorical field, then check to see if it has category_max and category_min, + ! and then set the defaults of iswater, islake, isice, isurban and isoilwater + ! + call mpas_pool_get_config(mgr % pool, 'type', fieldType) + if (fieldType == 'categorical') then + category_max => null() + category_min => null() + + call mpas_pool_get_config(mgr % pool, 'category_max', category_max) + call mpas_pool_get_config(mgr % pool, 'category_min', category_min) + + if (.not. associated(category_max)) then + call mpas_log_write("The index file of this categorical dataset did not contain a category_max parameter", & + messageType=MPAS_LOG_ERR) + call mpas_pool_set_error_level(err_level) ! Reset pool error level + ierr = 1 + return + endif + + if (.not. associated(category_min)) then + call mpas_log_write("The index file of this categorical dataset did not contain a category_min parameter", & + messageType=MPAS_LOG_ERR) + call mpas_pool_set_error_level(err_level) ! Reset pool error level + ierr = 1 + return + endif + + iswater => null() + islake => null() + isice => null() + isurban => null() + isoilwater => null() + + call mpas_pool_get_config(mgr % pool, 'iswater', iswater) + call mpas_pool_get_config(mgr % pool, 'islake', islake) + call mpas_pool_get_config(mgr % pool, 'isice', isice) + call mpas_pool_get_config(mgr % pool, 'isurban', isurban) + call mpas_pool_get_config(mgr % pool, 'isoilwater', isoilwater) + + if (.not. associated(iswater)) then + allocate(iswater) + iswater = 16 + call mpas_pool_add_config(mgr % pool, 'iswater', iswater) + endif + + if (.not. associated(islake)) then + allocate(islake) + islake = -1 + call mpas_pool_add_config(mgr % pool, 'islake', islake) + endif + + if (.not. associated(isice)) then + allocate(isice) + isice = 24 + call mpas_pool_add_config(mgr % pool, 'isice', isice) + endif + + if (.not. associated(isurban)) then + allocate(isurban) + isurban = 1 + call mpas_pool_add_config(mgr % pool, 'isurban', isurban) + endif + + if (.not. associated(isoilwater)) then + allocate(isoilwater) + isoilwater = 14 + call mpas_pool_add_config(mgr % pool, 'isoilwater', isoilwater) + endif + endif + + ! Reset the pool's error level + call mpas_pool_set_error_level(err_level) + + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'dx', dx) + call mpas_pool_get_config(mgr % pool, 'dy', dy) + + ! Calculate the total number of pixels in x dir + ! NOTE: This calculation assumes that a dataset is a global dataset and may + ! not work correctly for non-global datasets + mgr % pixel_nx = nint(360.0_RKIND / dx) + mgr % pixel_ny = nint(180.0_RKIND / dy) + + ! Calculate the number of tiles in the x, y directions + ! NOTE: This calculation assumes that a dataset is a global dataset and may + ! not work correctly for non-global datasets + mgr % nTileX = mgr % pixel_nx / tile_nx + mgr % nTileY = mgr % pixel_ny / tile_ny + + ! Allocate hash table + allocate(mgr % hash(0: mgr % nTileX, 0: mgr % nTileY)) + + ! Mark the stack as empty + mgr % stack => null() + + end function mpas_geotile_mgr_init + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_finalize => finalize + ! + !> \brief Free all memory used by the mpas_geotile_mgr_type + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Deallocated all memory used by this geotile_mgr_type and destroy the + !> associated pool. After calling this function, none of the methods + !> should be used, unless the class is reinitialized by recalling + !> mpas_geotile_mgr_init. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_finalize(mgr) result(ierr) + + implicit none + + ! Input variable + class (mpas_geotile_mgr_type) :: mgr + + ! Return variable + integer :: ierr + + ! Local variable + integer :: i + integer :: j + + ierr = 0 + + ! Loop through the hash table and deallocate any loaded tiles + ! Then deallocate the hash table + do i = 0, mgr % nTileX + do j = 0, mgr % nTileY + if (associated(mgr % hash(i, j) % ptr)) then + deallocate(mgr % hash(i, j) % ptr % tile) + deallocate(mgr % hash(i, j) % ptr) + endif + enddo + enddo + deallocate(mgr % hash) + + if (associated(mgr % hash)) then + call mpas_log_write("Problem deallocating the geotile hash table", messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + call mpas_pool_destroy_pool(mgr % pool) + if (associated(mgr % pool)) then + call mpas_log_write("Problem deallocating the geotile pool", messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + call mpas_stack_free(mgr % stack) + if (associated(mgr % stack)) then + call mpas_log_write("Problem deallocating the stack", messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + end function mpas_geotile_mgr_finalize + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_get_tile => get_tile + ! + !> \brief Return an array containing the values of a datatile + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Retrieve the datatile that contains the coordinate lat, lon of the dataset + !> that this mpas_geotile_manager instance was initalized with. Both lat, + !> lon should be in radians and lon should be in the range of -1/2 * pi to + !> 1/2 * pi. lat values that are greater than 2.0 * pi or less than -2.0 * pi + !> will be normalized to be between -pi and pi. Upon success 0 will be returned + !> and tile will point to the mpas_geotile_type that holds the datatile which + !> contains the coordinate lat, lon. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_get_tile(mgr, lat, lon, tile) result(ierr) + implicit none + + ! Input variables + class (mpas_geotile_mgr_type) :: mgr + real (kind=RKIND), value :: lat + real (kind=RKIND), value :: lon + type (mpas_geotile_type), pointer :: tile + + ! Return variable + integer :: ierr + + ierr = 0 + tile => null() + + ! Normalize longitude to be between -pi and pi + call normalize_lon(lon) + + if (.not. mgr % search_tile(lat, lon, tile)) then + ierr = mgr % add_tile(lat, lon, tile) + endif + + end function mpas_geotile_mgr_get_tile + + + !*********************************************************************** + ! + ! private function mpas_geotile_mgr_search_tile => search_tile + ! + !> \brief Search to see if a tile has already been loaded + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Private function that searches to see if the datatile that contains + !> the coordinate lat, lon has already been loaded. If the datatile has been + !> loaded, .true. will be returned and tile will point to the mpas_geotile_type + !> that contains the datatile. If the datatile has not been loaded, .false. + !> will be returned and tile will be unassociated. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_search_tile(mgr, lat, lon, tile) result(loaded) + + implicit none + + ! Input variables + class (mpas_geotile_mgr_type) :: mgr + real (kind=RKIND), value :: lat + real (kind=RKIND), value :: lon + type (mpas_geotile_type), pointer :: tile + + ! Return variable + logical :: loaded + + ! Local variables + integer, pointer :: tile_nx + integer, pointer :: tile_ny + character (len=StrKIND) :: fname + integer :: x, y + integer :: start_x + integer :: start_y + integer :: ierr + + loaded = .false. + tile => null() + + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + + ! + ! Using gen_filename, get the tiles start x and y pixel values of the tile + ! + ierr = mgr % gen_filename(lat, lon, fname, start_x, start_y) + if (ierr /= 0) then + call mpas_log_write("Error generating filename", messageType=MPAS_LOG_ERR) + return + endif + + ! + ! Access the tile in the hash table (-1 here as the hash table is from + ! 0:tile_nx, and 0:tile_ny). + ! + x = (start_x - 1) / tile_nx + if (x < 0 .or. x > size(mgr % hash, 1)) then + return + endif + + y = (start_y - 1) / tile_ny + if (y < 0 .or. y > size(mgr % hash, 2)) then + return + endif + + tile => mgr % hash(x,y) % ptr + if (associated(tile)) then + loaded = .true. + endif + + end function mpas_geotile_mgr_search_tile + + + !*********************************************************************** + ! + ! private function mpas_geotile_mgr_add_tile => add_tile + ! + !> \brief Read in a datatile file and store a reference to it + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Read the datatile that contains the coordinate lat, lon. Open success, + !> 0 will be returned and tile will point to the mpas_geotile_type which + !> contains the coordiate lat, lon. Upon success a reference to that + !> mpas_geotile_type will be placed into the hash table, which can later + !> be searched via search_tile. On error, 1 will be returned and tile + !> will be unassociated. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_add_tile(mgr, lat, lon, tile) result(ierr) + + use iso_c_binding, only : c_loc, c_ptr, c_int, c_float + use mpas_c_interfacing, only : mpas_f_to_c_string + + implicit none + + interface + subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & + wordsize, status) bind(C) + use iso_c_binding, only : c_char, c_int, c_float, c_ptr + character (c_char), dimension(*), intent(in) :: fname + type (c_ptr), value :: rarray + integer (c_int), intent(in), value :: nx + integer (c_int), intent(in), value :: ny + integer (c_int), intent(in), value :: nz + integer (c_int), intent(in), value :: isigned + integer (c_int), intent(in), value :: endian + integer (c_int), intent(in), value :: wordsize + integer (c_int), intent(inout) :: status + end subroutine read_geogrid + end interface + + ! Arguments + class (mpas_geotile_mgr_type) :: mgr + real (kind=RKIND), intent(in) :: lat + real (kind=RKIND), intent(in) :: lon + type (mpas_geotile_type), intent(inout), pointer :: tile + integer :: ierr + + ! Local variables + integer, pointer :: tile_nx, tile_ny, tile_nz + integer, pointer :: tile_bdr + integer, pointer :: wordsize + integer :: start_x, start_y + integer, pointer :: signed + character (len=StrKIND), pointer :: endian + logical :: res + + character (len=StrKIND) :: fname + character (kind=c_char), dimension(StrKIND+1) :: c_fname + integer (c_int) :: c_tile_nx, c_tile_ny, c_tile_nz + integer (c_int) :: c_endian + integer (c_int) :: c_wordsize + integer (c_int) :: c_signed + integer (c_int) :: status + type (c_ptr) :: c_tile_ptr + + ierr = 0 + + tile => null() + endian => null() + signed => null() + + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'tile_z', tile_nz) + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'wordsize', wordsize) + call mpas_pool_get_config(mgr % pool, 'signed', signed) + call mpas_pool_get_config(mgr % pool, 'endian', endian) + + c_tile_nx = tile_nx + 2 * tile_bdr ! The number of pixels in the x direction, including halo cells + c_tile_ny = tile_ny + 2 * tile_bdr ! The number of pixels in the y direction, including halo cells + c_tile_nz = tile_nz ! The number of pixels in the z direction + c_wordsize = wordsize + c_signed = signed + + if (endian == "big") then + c_endian = 0 + else if (endian == "little") then + c_endian = 1 + endif + + ! + ! Determine the file that contains lat, lon. + ! + ierr = mgr % gen_filename(lat, lon, fname, start_x, start_y) + if (ierr /= 0) then + call mpas_log_write("Error creating filename for coordinate: ($r, $r)", realArgs=(/lat, lon/), messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + + ! + ! See if this file actually exists + ! + fname = trim(mgr % directory)//trim(fname) + inquire(file=trim(fname), exist=res) + if (.not. res) then + call mpas_log_write("This geotile file does not exist: "//trim(fname), messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + + call mpas_f_to_c_string(fname, c_fname) + + ! + ! Allocate and read the tile + ! + allocate(tile) + allocate(tile % tile(tile_nx + (tile_bdr * 2), tile_ny + (tile_bdr * 2), tile_nz)) + c_tile_ptr = c_loc(tile % tile) + call read_geogrid(c_fname, c_tile_ptr, c_tile_nx, c_tile_ny, c_tile_nz, c_signed, c_endian, c_wordsize, status) + if (status /= 0) then + call mpas_log_write("Error reading this geogrid file: "//trim(fname), messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + + tile % fname = fname + tile % hash_x = (start_x - 1) / tile_nx + tile % hash_y = (start_y - 1) / tile_ny + tile % x = start_x + tile % y = start_y + + ! + ! Add the tile to the hash table + ! + mgr % hash(tile % hash_x, tile % hash_y) % ptr => tile + + end function mpas_geotile_mgr_add_tile + + + !*********************************************************************** + ! + ! private function mpas_geotile_mgr_gen_tile_name => gen_filename + ! + !> \brief Generate the filename of the tile at lat, lon + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Generate the name of the file that contains the coordinate lat, lon + !> (in radians) and optionally return start_x and start_y (the location + !> of the first global pixel coordinate of tile). Warning: This function + !> can produce filenames that may not exist (For lon less than -.5 * pi and + !> greater than .5 * pi and lat less than -pi and greater than pi). + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_gen_tile_name(mgr, lat, lon, fname, start_x, start_y) result(ierr) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + real (kind=RKIND), value :: lat + real (kind=RKIND), value :: lon + character (len=StrKIND), intent(out) :: fname + integer, intent(out), optional :: start_x + integer, intent(out), optional :: start_y + + character (len=StrKIND), parameter :: fname_format = "(i5.5, '-', i5.5, '.', i5.5, '-', i5.5)" + + real (kind=RKIND), pointer :: dx + real (kind=RKIND), pointer :: dy + integer, pointer :: tile_nx + integer, pointer :: tile_ny + integer, dimension(2) :: x + integer, dimension(2) :: y + + integer :: ierr + ierr = 0 + + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'dx', dx) + call mpas_pool_get_config(mgr % pool, 'dy', dy) + + ! Find the global pixel location that contains lat, lon + call mgr % latlon_to_pixel(lat, lon, x(1), y(1)) + + ! Calculate the range of this tile, which will be its filename + x(1) = (x(1) - modulo(x(1), tile_nx)) + 1 + x(2) = x(1) + tile_nx - 1 + + y(1) = (y(1) - modulo(y(1), tile_ny)) + 1 + y(2) = y(1) + tile_ny - 1 + + write(fname, fname_format) x(1), x(2), y(1), y(2) + + if (present(start_x)) then + start_x = x(1) + endif + if (present(start_y)) then + start_y = y(1) + endif + + end function mpas_geotile_mgr_gen_tile_name + + + !*********************************************************************** + ! + ! public subroutine mpas_geotile_mgr_tile_to_latlon => tile_to_latlon + ! + !> \brief Translate a tile indices to latitude and longitude, + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Given a tile, translate the relative tile coordinates, i, j, of that + !> tile to a latitude and longitude coordinate. Upon success, lat, lon + !> will be in the range of -1/2 * pi to 1/2 * pi and 0 to 2.0 * pi, respectively. + ! + !----------------------------------------------------------------------- + subroutine mpas_geotile_mgr_tile_to_latlon(mgr, tile, j, i, lat, lon) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + type (mpas_geotile_type), pointer, intent(in) :: tile + integer, value :: j + integer, value :: i + real (kind=RKIND), intent(out) :: lat + real (kind=RKIND), intent(out) :: lon + + integer, pointer :: tile_bdr + real (kind=RKIND), pointer :: known_lon + real (kind=RKIND), pointer :: known_lat + real (kind=RKIND), pointer :: dx + real (kind=RKIND), pointer :: dy + integer :: ierr + + ierr = 0 + + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'known_lat', known_lat) + call mpas_pool_get_config(mgr % pool, 'known_lon', known_lon) + call mpas_pool_get_config(mgr % pool, 'dx', dx) + call mpas_pool_get_config(mgr % pool, 'dy', dy) + + lat = real((j - (tile_bdr + 1) + tile % y - 1), kind=RKIND) * dy + known_lat + lon = real((i - (tile_bdr + 1) + tile % x - 1), kind=RKIND) * dx + known_lon + + call deg2Rad(lat) + call deg2Rad(lon) + + end subroutine mpas_geotile_mgr_tile_to_latlon + + + !*********************************************************************** + ! + ! public subroutine mpas_geotile_mgr_latlon_to_pixel => latlon_to_pixel + ! + !> \brief Translate a latitude, longitude coordinate to pixel coordinates + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Translate a latitude, longitude coordinate into global pixel coordinates. lat + !> should be in the range of -.5 * pi to .5 * pi and lon should be between -pi + !> and pi. + ! + !----------------------------------------------------------------------- + subroutine mpas_geotile_mgr_latlon_to_pixel(mgr, lat, lon, pixel_x, pixel_y) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + real (kind=RKIND), value :: lat + real (kind=RKIND), value :: lon + integer, intent(out) :: pixel_x + integer, intent(out) :: pixel_y + + integer, pointer :: tile_bdr + real (kind=RKIND), pointer :: known_lon + real (kind=RKIND), pointer :: known_lat + real, pointer :: dx + real, pointer :: dy + integer :: ierr + + ierr = 0 + + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'known_lon', known_lon) + call mpas_pool_get_config(mgr % pool, 'known_lat', known_lat) + call mpas_pool_get_config(mgr % pool, 'dx', dx) + call mpas_pool_get_config(mgr % pool, 'dy', dy) + + call rad2Deg(lat) + call rad2Deg(lon) + + pixel_x = nint((lon - known_lon) / dx) + pixel_y = nint((lat - known_lat) / dy) + + if (pixel_x < 0) then + pixel_x = pixel_x + mgr % pixel_nx + endif + + if (pixel_y < 0) then + pixel_y = 0 + else if (pixel_y >= mgr % pixel_ny) then + pixel_y = mgr % pixel_ny - 1 + endif + + end subroutine mpas_geotile_mgr_latlon_to_pixel + + + !*********************************************************************** + ! + ! private function mpas_geotile_mgr_hash_to_latlon => hash_to_ll + ! + !> \brief Find the lat, lon center from a hash entry + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Translate the index within the hash table into the latitude and longitude + !> coordinate (in radians) of the center of the datatile at that index. + ! + !----------------------------------------------------------------------- + subroutine mpas_geotile_mgr_hash_to_latlon(mgr, xHash, yHash, lat, lon) + + implicit none + + class(mpas_geotile_mgr_type) :: mgr + integer, intent(in), value :: xHash + integer, intent(in), value :: yHash + real, intent(out) :: lat + real, intent(out) :: lon + + integer, pointer :: tile_nx + integer, pointer :: tile_ny + real (kind=RKIND), pointer :: known_lat + real (kind=RKIND), pointer :: known_lon + real (kind=RKIND), pointer :: dx + real (kind=RKIND), pointer :: dy + + integer :: x + integer :: y + + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'known_lat', known_lat) + call mpas_pool_get_config(mgr % pool, 'known_lon', known_lon) + call mpas_pool_get_config(mgr % pool, 'dx', dx) + call mpas_pool_get_config(mgr % pool, 'dy', dy) + + x = (xHash * tile_nx) + (tile_nx / 2) + y = (yHash * tile_ny) + (tile_ny / 2) + + lon = (real((x), kind=RKIND) * dx ) + known_lon + lat = (real((y), kind=RKIND) * dy ) + known_lat + + call deg2Rad(lat) + call deg2Rad(lon) + + end subroutine mpas_geotile_mgr_hash_to_latlon + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_push_neighbors => push_neighbors + ! + !> \brief Determine the tile nighbors and push them onto the stack + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Determine the neighbors of a tile and push them onto the stack. If the + !> tile neighbors have not been loaded, via add_tile, then they will be. + !> Upon success, the neighbors of a tile will be pushed onto the stack and + !> 0 will be returned. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_push_neighbors(mgr, tile) result(ierr) + + implicit none + + class(mpas_geotile_mgr_type) :: mgr + type (mpas_geotile_type), pointer, intent(in) :: tile + + integer :: ierr + type (mpas_geotile_type), pointer :: neighbor + real (kind=RKIND) :: lat + real (kind=RKIND) :: lon + integer :: xHash + integer :: yHash + + ierr = 0 + + ! Up + ! Calculate the tile's hash coordinates + neighbor => null() + if (tile % hash_y + 1 > mgr % nTileY - 1) then + xHash = modulo(tile % hash_x + (mgr % nTileX / 2), mgr % nTileX) + yHash = tile % hash_y + else + xHash = tile % hash_x + yHash = tile % hash_y + 1 + endif + call mgr % hash_to_ll(xHash, yHash, lat, lon) + + ierr = mgr % get_tile(lat, lon, neighbor) + if (ierr /= 0) then + call mpas_log_write("There was a problem getting the up tile", messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + ierr = mgr % push_tile(neighbor) + + ! Down + neighbor => null() + if (tile % hash_y - 1 < 0) then + xHash = modulo(tile % hash_x + (mgr % nTileX / 2), mgr % nTileX) + yHash = tile % hash_y + else + xHash = tile % hash_x + yHash = tile % hash_y - 1 + endif + call mgr % hash_to_ll(xHash, yHash, lat, lon) + + ierr = mgr % get_tile(lat, lon, neighbor) + if (ierr /= 0) then + call mpas_log_write("There was a problem getting the down tile", messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + ierr = mgr % push_tile(neighbor) + + ! Right + neighbor => null() + if (tile % hash_x + 1 > mgr % nTileX - 1) then + yHash = tile % hash_y + xHash = 0 + else + xHash = tile % hash_x + 1 + yHash = tile % hash_y + endif + call mgr % hash_to_ll(xHash, yHash, lat, lon) + + ierr = mgr % get_tile(lat, lon, neighbor) + if (ierr /= 0) then + call mpas_log_write("There was a problem getting the right tile", messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + ierr = mgr % push_tile(neighbor) + + ! Left + neighbor => null() + if (tile % hash_x -1 < 0) then + xHash = mgr % nTileX - 1 + yHash = tile % hash_y + else + xHash = tile % hash_x - 1 + yHash = tile % hash_y + endif + call mgr % hash_to_ll(xHash, yHash, lat, lon) + + ierr = mgr % get_tile(lat, lon, neighbor) + if (ierr /= 0) then + call mpas_log_write("There was a problem getting the left tile", messageType=MPAS_LOG_ERR) + ierr = 1 + return + endif + ierr = mgr % push_tile(neighbor) + + end function mpas_geotile_mgr_push_neighbors + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!! Stack wrappers and helper functions !!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_push_tile => push_tile + ! + !> \brief Push a mpas_geotile_type onto the stack + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Wrapper subroutine for mpas_stack_push from mpas_stack.F. Aftering calling + !> this subroutine, the tile pushed will be on the top of the stack associated + !> with mpas_geotile_mgr instance (TODO: Is instance the correct term??) and pop_tile + !> can be used to retrive the tile that was last pushed onto the stack. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_push_tile(mgr, tile) result(ierr) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + type (mpas_geotile_type), pointer :: tile + integer :: ierr + + ierr = 0 + + mgr % stack => mpas_stack_push(mgr % stack, tile) + + end function mpas_geotile_mgr_push_tile + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_pop_tile => pop_tile + ! + !> \brief Pop the top mpas_geotile_type off of the stack + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Retrieve and remove the last tile that was pushed onto the stack that + !> is associated with this mpas_geotile_mgr instance (TODO: Is instance the correct term?). If the stack is empty, + !> then tile will be unassociated. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_pop_tile(mgr) result(tile) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + class (mpas_stack_payload_type), pointer :: top + type (mpas_geotile_type), pointer :: tile + + tile => null() + + if (mpas_stack_is_empty(mgr % stack)) then + return + endif + + top => mpas_stack_pop(mgr % stack) + + select type(top) + type is(mpas_geotile_type) + tile => top + return + class default + ! Should not get here + end select + + end function mpas_geotile_mgr_pop_tile + + + !*********************************************************************** + ! + ! public function mpas_geotile_mgr_stack_is_empty => is_stack_empty + ! + !> \brief Return .true. if stack is empty and .false. otherwise + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Return .true. if the stack associated with this mpas_geotile_mgr instance (TODO: Is instance the correct term?) + !> is empty, and .false. if it is not empty. + ! + !----------------------------------------------------------------------- + function mpas_geotile_mgr_stack_is_empty(mgr) result(empty) + + implicit none + + class (mpas_geotile_mgr_type) :: mgr + logical :: empty + + empty = mpas_stack_is_empty(mgr % stack) + + end function mpas_geotile_mgr_stack_is_empty + + + !*********************************************************************** + ! + ! public subroutine mpas_latlon_to_xyz + ! + !> \brief Convert a latitude, longitude coordinate into its Cartesian equivalent + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Given a latitude, longitude coordinate and a radius, convert the latitude, + !> longitude coordinate into the equivalent Cartesian coordinate. + ! + !----------------------------------------------------------------------- + subroutine mpas_latlon_to_xyz(x, y, z, radius, lat, lon) + + implicit none + + real (kind=RKIND), intent(in) :: radius + real (kind=RKIND), intent(in) :: lat + real (kind=RKIND), intent(in) :: lon + real (kind=RKIND), intent(out) :: x, y, z + + z = radius * sin(lat) + x = radius * cos(lon) * cos(lat) + y = radius * sin(lon) * cos(lat) + + end subroutine mpas_latlon_to_xyz + + + ! Convert radians to degrees + subroutine rad2Deg(rad) + + implicit none + real (kind=RKIND), intent(inout) :: rad + + rad = rad * (180.0_RKIND / pii) + + end subroutine rad2Deg + + + ! Convert degrees to radians + subroutine deg2Rad(deg) + + implicit none + real (kind=RKIND), intent(inout) :: deg + + deg = deg * (pii / 180.0_RKIND) + + end subroutine deg2Rad + + + ! Normalize logitude (in radians) to be between -pi and pi. + subroutine normalize_lon(lon) + + implicit none + real (kind=RKIND), intent(inout) :: lon + + if (lon > pii) then + lon = lon - (2.0 * pii) + else if (lon < -pii) then + lon = lon + (2.0 * pii) + endif + + end subroutine normalize_lon + +end module mpas_geotile_manager diff --git a/src/core_init_atmosphere/mpas_init_atm_gwd.F b/src/core_init_atmosphere/mpas_init_atm_gwd.F index 6d1632f9b2..25ef93c8c6 100644 --- a/src/core_init_atmosphere/mpas_init_atm_gwd.F +++ b/src/core_init_atmosphere/mpas_init_atm_gwd.F @@ -21,7 +21,7 @@ module mpas_init_atm_gwd interface subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & - scalefactor, wordsize, status) bind(C) + wordsize, status) bind(C) use iso_c_binding, only : c_char, c_int, c_float, c_ptr character (c_char), dimension(*), intent(in) :: fname type (c_ptr), value :: rarray @@ -30,7 +30,6 @@ subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & integer (c_int), intent(in), value :: nz integer (c_int), intent(in), value :: isigned integer (c_int), intent(in), value :: endian - real (c_float), intent(in), value :: scalefactor integer (c_int), intent(in), value :: wordsize integer (c_int), intent(inout) :: status end subroutine read_geogrid @@ -116,6 +115,7 @@ function compute_gwd_fields(domain) result(iErr) character(len=StrKIND), pointer :: config_geog_data_path character(len=StrKIND), pointer :: config_topo_data character(len=StrKIND) :: geog_sub_path + character(len=StrKIND+1) :: geog_data_path ! same as config_geog_data_path, but guaranteed to have a trailing slash ! Variables for smoothing variance integer, dimension(:,:), pointer:: cellsOnCell @@ -134,6 +134,12 @@ function compute_gwd_fields(domain) result(iErr) call mpas_pool_get_config(domain % configs, 'config_topo_data', config_topo_data) call mpas_pool_get_config(domain % configs, 'config_gwd_cell_scaling', config_gwd_cell_scaling) + write(geog_data_path, '(a)') config_geog_data_path + i = len_trim(geog_data_path) + if (geog_data_path(i:i) /= '/') then + geog_data_path(i+1:i+1) = '/' + end if + select case(trim(config_topo_data)) case('GTOPO30') call mpas_log_write('--- Using GTOPO30 terrain dataset for GWDO static fields') @@ -188,13 +194,13 @@ function compute_gwd_fields(domain) result(iErr) allocate(hlanduse(nCells+1)) ! +1, since we access hlanduse(cellsOnCell(i,iCell)) later on for iCell=1,nCells - iErr = read_global_30s_topo(config_geog_data_path, geog_sub_path) + iErr = read_global_30s_topo(geog_data_path, geog_sub_path) if (iErr /= 0) then call mpas_log_write('Error reading global 30-arc-sec topography for GWD statistics', messageType=MPAS_LOG_ERR) return end if - iErr = read_global_30s_landuse(config_geog_data_path) + iErr = read_global_30s_landuse(geog_data_path) if (iErr /= 0) then call mpas_log_write('Error reading global 30-arc-sec landuse for GWD statistics', messageType=MPAS_LOG_ERR) return @@ -370,7 +376,8 @@ function read_global_30s_topo(path, sub_path) result(iErr) iy, '-', (iy+tile_y-1) call mpas_f_to_c_string(filename, c_filename) call read_geogrid(c_filename, tile_ptr, nx, ny, nz, isigned, endian, & - scalefactor, wordsize, istatus) + wordsize, istatus) + tile(:,:,:) = tile(:,:,:) * scalefactor if (istatus /= 0) then call mpas_log_write('Error reading topography tile '//trim(filename), messageType=MPAS_LOG_ERR) iErr = 1 @@ -439,7 +446,8 @@ function read_global_30s_landuse(path) result(iErr) iy, '-', (iy+tile_y-1) call mpas_f_to_c_string(filename, c_filename) call read_geogrid(c_filename, tile_ptr, nx, ny, nz, isigned, endian, & - scalefactor, wordsize, istatus) + wordsize, istatus) + tile(:,:,:) = tile(:,:,:) * scalefactor if (istatus /= 0) then call mpas_log_write('Error reading landuse tile '//trim(filename)) iErr = 1 diff --git a/src/core_init_atmosphere/mpas_init_atm_static.F b/src/core_init_atmosphere/mpas_init_atm_static.F index e72dca987f..021b36c96e 100644 --- a/src/core_init_atmosphere/mpas_init_atm_static.F +++ b/src/core_init_atmosphere/mpas_init_atm_static.F @@ -17,8 +17,12 @@ module mpas_init_atm_static use init_atm_llxy use mpas_c_interfacing, only : mpas_f_to_c_string + use mpas_geometry_utils, only : mpas_in_cell use mpas_atmphys_utilities + use mpas_kd_tree, only : mpas_kd_type, mpas_kd_construct, mpas_kd_free, mpas_kd_search + use mpas_geotile_manager, only : mpas_geotile_mgr_type, mpas_geotile_type, mpas_latlon_to_xyz + use iso_c_binding, only : c_char, c_int, c_float, c_loc, c_ptr implicit none @@ -30,7 +34,7 @@ module mpas_init_atm_static interface subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & - scalefactor, wordsize, status) bind(C) + wordsize, status) bind(C) use iso_c_binding, only : c_char, c_int, c_float, c_ptr character (c_char), dimension(*), intent(in) :: fname type (c_ptr), value :: rarray @@ -39,7 +43,6 @@ subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & integer (c_int), intent(in), value :: nz integer (c_int), intent(in), value :: isigned integer (c_int), intent(in), value :: endian - real (c_float), intent(in), value :: scalefactor integer (c_int), intent(in), value :: wordsize integer (c_int), intent(inout) :: status end subroutine read_geogrid @@ -75,8 +78,6 @@ subroutine init_atm_static(mesh, dims, configs) character(len=StrKIND+1) :: geog_data_path ! same as config_geog_data_path, but guaranteed to have a trailing slash character(len=StrKIND+1) :: geog_sub_path ! subdirectory names in config_geog_data_path, with trailing slash - integer:: ismax_lu - integer(c_int):: nx,ny,nz integer(c_int):: endian,isigned,istatus,wordsize integer:: i,j,k @@ -86,6 +87,7 @@ subroutine init_atm_static(mesh, dims, configs) integer,dimension(:),allocatable :: nhs integer,dimension(:,:),allocatable:: ncat + real(kind=RKIND), pointer :: scalefactor_ptr real(kind=c_float):: scalefactor real(kind=c_float),dimension(:,:,:),pointer,contiguous :: rarray type(c_ptr) :: rarray_ptr @@ -102,7 +104,10 @@ subroutine init_atm_static(mesh, dims, configs) real(kind=RKIND),dimension(:,:,:),allocatable:: vegfra integer, pointer :: isice_lu, iswater_lu - integer, pointer :: nCells, nEdges, nVertices, maxEdges + integer, pointer :: isice_lu_ptr, iswater_lu_ptr + integer :: iswater_soil + integer, pointer :: iswater_soil_ptr + integer, pointer :: nCells, nCellsSolve, nEdges, nVertices, maxEdges logical, pointer :: on_a_sphere real (kind=RKIND), pointer :: sphere_radius @@ -121,12 +126,14 @@ subroutine init_atm_static(mesh, dims, configs) real (kind=RKIND), dimension(:), pointer :: fEdge, fVertex real (kind=RKIND), dimension(:), pointer :: ter + integer (kind=I8KIND), dimension(:), pointer :: ter_integer real (kind=RKIND), dimension(:), pointer :: soiltemp real (kind=RKIND), dimension(:), pointer :: snoalb real (kind=RKIND), dimension(:), pointer :: shdmin, shdmax real (kind=RKIND), dimension(:,:), pointer :: greenfrac real (kind=RKIND), dimension(:,:), pointer :: albedo12m real (kind=RKIND) :: msgval, fillval + integer, pointer :: category_min, category_max integer, dimension(:), pointer :: lu_index integer, dimension(:), pointer :: soilcat_top integer, dimension(:), pointer :: landmask @@ -135,6 +142,20 @@ subroutine init_atm_static(mesh, dims, configs) real (kind=RKIND) :: xPixel, yPixel, zPixel + type (mpas_kd_type), dimension(:), pointer :: kd_points + type (mpas_kd_type), pointer :: tree + type (mpas_kd_type), pointer :: res + + type (mpas_geotile_mgr_type) :: mgr + type (mpas_geotile_type), pointer :: tile + + real (kind=RKIND) :: tval + integer, pointer :: tile_bdr + integer, pointer :: tile_nx, tile_ny + + logical :: all_pixels_mapped_to_halo_cells + integer :: ierr + !-------------------------------------------------------------------------------------------------- @@ -206,6 +227,7 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_pool_get_config(mesh, 'sphere_radius', sphere_radius) call mpas_pool_get_dimension(dims, 'nCells', nCells) + call mpas_pool_get_dimension(dims, 'nCellsSolve', nCellsSolve) call mpas_pool_get_dimension(dims, 'nEdges', nEdges) call mpas_pool_get_dimension(dims, 'nVertices', nVertices) call mpas_pool_get_dimension(dims, 'maxEdges', maxEdges) @@ -225,6 +247,21 @@ subroutine init_atm_static(mesh, dims, configs) areaTriangle = areaTriangle * sphere_radius**2.0 kiteAreasOnVertex = kiteAreasOnVertex * sphere_radius**2.0 +! +! Initialize the KD-Tree +! + allocate(kd_points(nCells)) + do i = 1, nCells + allocate(kd_points(i) % point(3)) + kd_points(i) % point = (/xCell(i), yCell(i), zCell(i)/) + kd_points(i) % id = i ! Cell ID + enddo + tree => null() + tree => mpas_kd_construct(kd_points, 3) + if (.not. associated(tree)) then + call mpas_log_write('Error creating the KD-Tree for static interpolation', messageType=MPAS_LOG_CRIT) + endif + ! ! Initialize Coriolis parameter field on edges and vertices @@ -249,14 +286,8 @@ subroutine init_atm_static(mesh, dims, configs) ! surface_input_select0: select case(trim(config_landuse_data)) case('USGS') - isice_lu = 24 - iswater_lu = 16 - ismax_lu = 24 write(mminlu,'(a)') 'USGS' case('MODIFIED_IGBP_MODIS_NOAH') - isice_lu = 15 - iswater_lu = 17 - ismax_lu = 20 write(mminlu,'(a)') 'MODIFIED_IGBP_MODIS_NOAH' case default call mpas_log_write('*****************************************************************', messageType=MPAS_LOG_ERR) @@ -271,32 +302,13 @@ subroutine init_atm_static(mesh, dims, configs) ! ! Interpolate HGT ! -!nx = 126 -!ny = 126 - nx = 1206 - ny = 1206 - nz = 1 - isigned = 1 - endian = 0 - wordsize = 2 - scalefactor = 1.0 - allocate(rarray(nx,ny,nz)) - allocate(nhs(nCells)) - nhs(:) = 0 - ter(:) = 0.0 - - rarray_ptr = c_loc(rarray) - - start_lat = -89.99583 select case(trim(config_topo_data)) case('GTOPO30') call mpas_log_write('Using GTOPO30 terrain dataset') geog_sub_path = 'topo_30s/' - start_lon = -179.99583 case('GMTED2010') call mpas_log_write('Using GMTED2010 terrain dataset') geog_sub_path = 'topo_gmted2010_30s/' - start_lon = 0.004166667 case('default') call mpas_log_write('*****************************************************************', messageType=MPAS_LOG_ERR) call mpas_log_write('Invalid topography dataset '''//trim(config_topo_data) & @@ -306,66 +318,130 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_log_write('Please correct the namelist.', messageType=MPAS_LOG_CRIT) end select - do jTileStart = 1,20401,ny-6 - jTileEnd = jTileStart + ny - 1 - 6 - - do iTileStart=1,42001,nx-6 - iTileEnd = iTileStart + nx - 1 - 6 - write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)//trim(geog_sub_path), & - iTileStart,'-',iTileEnd,'.',jTileStart,'-',jTileEnd - call mpas_log_write(trim(fname)) - call mpas_f_to_c_string(fname, c_fname) - - call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) - call init_atm_check_read_error(istatus, fname) - - iPoint = 1 - do j=4,ny-3 - do i=4,nx-3 - lat_pt = start_lat + (jTileStart + j - 5) * 0.0083333333 - lon_pt = start_lon + (iTileStart + i - 5) * 0.0083333333 - lat_pt = lat_pt * PI / 180.0 - lon_pt = lon_pt * PI / 180.0 - - iPoint = nearest_cell(lat_pt,lon_pt,iPoint,nCells,maxEdges, & - nEdgesOnCell,cellsOnCell, & - latCell,lonCell) - - ! - ! For all but the outermost boundary cells, we can safely assume that the nearest - ! model grid cell contains the pixel (else, a different cell would be nearest) - ! - if (bdyMaskCell(iPoint) < nBdyLayers) then - ter(iPoint) = ter(iPoint) + rarray(i,j,1) - nhs(iPoint) = nhs(iPoint) + 1 - - ! For outermost boundary cells, additional work is needed to verify that the pixel - ! actually lies within the nearest cell - else - zPixel = sphere_radius * sin(lat_pt) ! Model cell coordinates assume a "full" sphere radius - xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius - yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates - - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & - nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then - - ter(iPoint) = ter(iPoint) + rarray(i,j,1) - nhs(iPoint) = nhs(iPoint) + 1 + ierr = mgr % init(trim(config_geog_data_path)//trim(geog_sub_path)) + if (ierr /= 0) then + call mpas_log_write('Error occured when initalizing the interpolation of terrain height (ter)', messageType=MPAS_LOG_CRIT) + endif - end if - end if - end do - end do - end do + allocate(ter_integer(nCells)) + allocate(nhs(nCells)) + + ! + ! Store tile values as a I8KIND integer temporarily to avoid floating + ! point round off differences and to have +/- 9.22x10^18 range of representative + ! values. For example, a 120 km mesh with a 1 meter data set with 6 decimal of + ! precision will allow for values of 180x10^12. + ! + ter_integer(:) = 0_I8KIND + ter(:) = 0.0_RKIND + nhs(:) = 0 + + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'scale_factor', scalefactor_ptr) + scalefactor = scalefactor_ptr + + do iCell = 1, nCells + ! + ! Load the tile at each cell center. This insures all cells receive values + ! + if (nhs(iCell) == 0) then + tile => null() + ierr = mgr % get_tile(latCell(iCell), lonCell(iCell), tile) + if (ierr /= 0 .or. .not. associated(tile)) then + call mpas_log_write('Could not get tile that contained cell $i', intArgs=(/iCell/), messageType=MPAS_LOG_CRIT) + end if + + ierr = mgr % push_tile(tile) + if (ierr /= 0) then + call mpas_log_write("Error pushing this tile onto the stack: "//trim(tile%fname), messageType=MPAS_LOG_CRIT) + end if + end if + + ! + ! Process each tile by removing it from the stack. Determine the closest cell center to each tile + ! pixel by using a KD search and assign its value to that cell. + ! + do while (.not. mgr % is_stack_empty()) + tile => mgr % pop_tile() + + if (tile % is_processed) then + cycle + end if + + call mpas_log_write('Processing tile: '//trim(tile%fname)) + + all_pixels_mapped_to_halo_cells = .true. + + do j = 1 + tile_bdr, tile_ny + tile_bdr, 1 + do i = 1 + tile_bdr, tile_nx + tile_bdr, 1 + + tval = tile % tile(i, j, 1) + + call mgr % tile_to_latlon(tile, j, i, lat_pt, lon_pt) + call mpas_latlon_to_xyz(xPixel, yPixel, zPixel, sphere_radius, lat_pt, lon_pt) + call mpas_kd_search(tree, (/xPixel, yPixel, zPixel/), res) + + ! + ! For all but the outermost boundary cells, we can safely assume that the nearest + ! model grid cell contains the pixel (else, a different cell would be nearest) + ! + if (bdyMaskCell(res % id) < nBdyLayers) then + ter_integer(res % id) = ter_integer(res % id) + int(tval, kind=I8KIND) + nhs(res % id) = nhs(res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + + ! For outermost boundary cells, additional work is needed to verify that the pixel + ! actually lies within the nearest cell + else + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(res % id), yCell(res % id), zCell(res % id), & + nEdgesOnCell(res % id), verticesOnCell(:,res % id), xVertex, yVertex, zVertex)) then + + ter_integer(res % id) = ter_integer(res % id) + int(tval, kind=I8KIND) + nhs(res % id) = nhs(res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + end if + end if + end do + end do + + tile % is_processed = .true. + + ! + ! If a single pixel value maps to an owned cell (i.e. <= nCellsSolve) then + ! it is possible that the neighboring tiles might contain pixels that map + ! to this process' compute cells, so add them to the stack. + ! + if (.not. all_pixels_mapped_to_halo_cells) then + ierr = mgr % push_neighbors(tile) + if (ierr /= 0) then + call mpas_log_write("Error pushing the tile neighbors of: "//trim(tile%fname), messageType=MPAS_LOG_CRIT) + end if + end if + end do end do - do iCell = 1,nCells - ter(iCell) = ter(iCell) / real(nhs(iCell)) + do iCell = 1, nCells + ter(iCell) = real(ter_integer(iCell), kind=R8KIND) / real(nhs(iCell), kind=R8KIND) + ter(iCell) = ter(iCell) * scalefactor end do - deallocate(rarray) + + deallocate(ter_integer) deallocate(nhs) + + ierr = mgr % finalize() + if (ierr /= 0) then + call mpas_log_write('Error occured when finalizing the interpolation of terrain height (ter)', messageType=MPAS_LOG_CRIT) + endif + call mpas_log_write('--- end interpolate TER') @@ -387,171 +463,252 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_log_write('*****************************************************************', messageType=MPAS_LOG_ERR) call mpas_log_write('Please correct the namelist.', messageType=MPAS_LOG_CRIT) end select surface_input_select1 - nx = 1200 - ny = 1200 - nz = 1 - isigned = 1 - endian = 0 - wordsize = 1 - scalefactor = 1.0 - allocate(rarray(nx,ny,nz)) - allocate(ncat(ismax_lu,nCells)) + + ierr = mgr % init(trim(config_geog_data_path)//trim(geog_sub_path)) + if (ierr /= 0) then + call mpas_log_write('Error occurred when initializing the interpolation of landuse category (lu_index)', & + messageType=MPAS_LOG_CRIT) + endif + + call mpas_pool_get_config(mgr % pool, 'iswater', iswater_lu_ptr) + call mpas_pool_get_config(mgr % pool, 'isice', isice_lu_ptr) + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'category_min', category_min) + call mpas_pool_get_config(mgr % pool, 'category_max', category_max) + + ! Transfer iswater_lu_ptr, isice_lu_ptr to iswater_lu and isice_lu because + ! isawter_lu_ptr and isice_lu_ptr will become unassociated when calling + ! mgr % finalize. iswater_lu and isice_lu will be written to the output + ! stream + iswater_lu = iswater_lu_ptr + isice_lu = isice_lu_ptr + + allocate(ncat(category_min:category_max, nCells)) ncat(:,:) = 0 - lu_index(:) = 0.0 + lu_index(:) = 0 - rarray_ptr = c_loc(rarray) + do iCell = 1, nCells - do jTileStart = 1,20401,ny - jTileEnd = jTileStart + ny - 1 + if (all(ncat(:,iCell) == 0)) then + tile => null() + ierr = mgr % get_tile(latCell(iCell), lonCell(iCell), tile) + if (ierr /= 0 .or. .not. associated(tile)) then + call mpas_log_write('Could not get the tile that contained cell $i', intArgs=(/iCell/), messageType=MPAS_LOG_CRIT) + end if - do iTileStart = 1,42001,nx - iTileEnd = iTileStart + nx - 1 - write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & - trim(geog_sub_path),iTileStart,'-',iTileEnd,'.',jTileStart,'-',jTileEnd - call mpas_log_write(trim(fname)) - call mpas_f_to_c_string(fname, c_fname) + ierr = mgr % push_tile(tile) + if (ierr /= 0) then + call mpas_log_write('Error pushing this tile onto the stack: '//trim(tile%fname), messageType=MPAS_LOG_CRIT) + end if + end if - call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) - call init_atm_check_read_error(istatus, fname) - iPoint = 1 - do j=1,ny - do i=1,nx -! -! The MODIS dataset appears to have zeros at the South Pole, possibly other places, too -! -if (rarray(i,j,1) == 0) cycle + do while (.not. mgr % is_stack_empty()) + tile => mgr % pop_tile() - lat_pt = -89.99583 + (jTileStart + j - 2) * 0.0083333333 - lon_pt = -179.99583 + (iTileStart + i - 2) * 0.0083333333 - lat_pt = lat_pt * PI / 180.0 - lon_pt = lon_pt * PI / 180.0 + if (tile % is_processed) then + cycle + endif - iPoint = nearest_cell(lat_pt,lon_pt,iPoint,nCells,maxEdges, & - nEdgesOnCell,cellsOnCell, & - latCell,lonCell) + call mpas_log_write('Processing tile: '//trim(tile%fname)) - ! - ! For all but the outermost boundary cells, we can safely assume that the nearest - ! model grid cell contains the pixel (else, a different cell would be nearest) - ! - if (bdyMaskCell(iPoint) < nBdyLayers) then - ncat(int(rarray(i,j,1)),iPoint) = ncat(int(rarray(i,j,1)),iPoint) + 1 + all_pixels_mapped_to_halo_cells = .true. - ! For outermost boundary cells, additional work is needed to verify that the pixel - ! actually lies within the nearest cell - else - zPixel = sphere_radius * sin(lat_pt) ! Model cell coordinates assume a "full" sphere radius - xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius - yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates + do j = 1 + tile_bdr, tile_ny + tile_bdr, 1 + do i = 1 + tile_bdr, tile_nx + tile_bdr, 1 - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & - nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then + ! + ! The MODIS dataset has zeros at the South Pole, and possibly other places, so + ! skip any value that is less than category_min or greater than category_max as + ! reported in the index file. + ! + if (tile % tile(i,j,1) < category_min .or. tile % tile(i,j,1) > category_max) then + cycle + end if - ncat(int(rarray(i,j,1)),iPoint) = ncat(int(rarray(i,j,1)),iPoint) + 1 + call mgr % tile_to_latlon(tile, j, i, lat_pt, lon_pt) + call mpas_latlon_to_xyz(xPixel, yPixel, zPixel, sphere_radius, lat_pt, lon_pt) + call mpas_kd_search(tree, (/xPixel, yPixel, zPixel/), res) - end if - end if - end do - end do + ! + ! For all but the outermost boundary cells, we can safely assume that the nearest + ! model grid cell contains the pixel (else, a different cell would be nearest) + ! + if (bdyMaskCell(res % id) < nBdyLayers) then - end do + ncat(int(tile % tile(i,j,1)), res % id) = ncat(int(tile % tile(i,j,1)), res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + + ! For outermost boundary cells, additional work is needed to verify that the pixel + ! actually lies within the nearest cell + else + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(res % id), yCell(res % id), zCell(res % id), & + nEdgesOnCell(res % id), verticesOnCell(:,res % id), xVertex, yVertex, zVertex)) then + + ncat(int(tile % tile(i,j,1)), res % id) = ncat(int(tile % tile(i,j,1)), res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + end if + end if + end do + end do + + tile % is_processed = .true. + + if (.not. all_pixels_mapped_to_halo_cells) then + ierr = mgr % push_neighbors(tile) + if (ierr /= 0) then + call mpas_log_write("Error pushing the tile neighbors of: "//trim(tile%fname), messageType=MPAS_LOG_CRIT) + endif + end if + + end do end do - do iCell = 1,nCells - lu_index(iCell) = 1 - do i = 2,ismax_lu - if(ncat(i,iCell) > ncat(lu_index(iCell),iCell)) then - lu_index(iCell) = i - end if - end do + do iCell = 1, nCells + ! Because maxloc returns the location of the maximum value of an array as if the + ! starting index of the array is 1, and dataset categories do not necessarily start + ! at 1, we need to use category_min to ensure the correct category location is chosen. + lu_index(iCell) = maxloc(ncat(:,iCell), dim=1) - 1 + category_min end do - deallocate(rarray) deallocate(ncat) - call mpas_log_write('--- end interpolate LU_INDEX') + ierr = mgr % finalize() + if (ierr /= 0) then + call mpas_log_write('Error occurred when finalizing the interpolation of landuse category (lu_index)', & + messageType=MPAS_LOG_CRIT) + endif + + call mpas_log_write('--- end interpolate LU_INDEX') ! ! Interpolate SOILCAT_TOP ! - nx = 1200 - ny = 1200 - nz = 1 - isigned = 1 - endian = 0 - wordsize = 1 - scalefactor = 1.0 - allocate(rarray(nx,ny,nz)) - allocate(ncat(16,nCells)) + geog_sub_path = 'soiltype_top_30s/' + + ierr = mgr % init(trim(config_geog_data_path)//trim(geog_sub_path)) + if (ierr /= 0) then + call mpas_log_write('Error occurred when initalizing the interpolation of soil top category (soilcat_top)', & + messageType=MPAS_LOG_CRIT) + endif + + call mpas_pool_get_config(mgr % pool, 'tile_bdr', tile_bdr) + call mpas_pool_get_config(mgr % pool, 'tile_x', tile_nx) + call mpas_pool_get_config(mgr % pool, 'tile_y', tile_ny) + call mpas_pool_get_config(mgr % pool, 'category_min', category_min) + call mpas_pool_get_config(mgr % pool, 'category_max', category_max) + call mpas_pool_get_config(mgr % pool, 'isoilwater', iswater_soil_ptr) + + iswater_soil = iswater_soil_ptr + + allocate(ncat(category_min:category_max, nCells)) + soilcat_top(:) = 0 ncat(:,:) = 0 - soilcat_top(:) = 0.0 - rarray_ptr = c_loc(rarray) + do iCell = 1, nCells - do jTileStart = 1,20401,ny - jTileEnd = jTileStart + ny - 1 - - do iTileStart = 1,42001,nx - iTileEnd = iTileStart + nx - 1 - write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & - 'soiltype_top_30s/',iTileStart,'-',iTileEnd,'.',jTileStart,'-',jTileEnd - call mpas_log_write(trim(fname)) - call mpas_f_to_c_string(fname, c_fname) - - call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) - call init_atm_check_read_error(istatus, fname) - - iPoint = 1 - do j=1,ny - do i=1,nx - lat_pt = -89.99583 + (jTileStart + j - 2) * 0.0083333333 - lon_pt = -179.99583 + (iTileStart + i - 2) * 0.0083333333 - lat_pt = lat_pt * PI / 180.0 - lon_pt = lon_pt * PI / 180.0 - - iPoint = nearest_cell(lat_pt,lon_pt,iPoint,nCells,maxEdges, & - nEdgesOnCell,cellsOnCell, & - latCell,lonCell) - - ! - ! For all but the outermost boundary cells, we can safely assume that the nearest - ! model grid cell contains the pixel (else, a different cell would be nearest) - ! - if (bdyMaskCell(iPoint) < nBdyLayers) then - ncat(int(rarray(i,j,1)),iPoint) = ncat(int(rarray(i,j,1)),iPoint) + 1 - - ! For outermost boundary cells, additional work is needed to verify that the pixel - ! actually lies within the nearest cell - else - zPixel = sphere_radius * sin(lat_pt) ! Model cell coordinates assume a "full" sphere radius - xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius - yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates - - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & - nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then - - ncat(int(rarray(i,j,1)),iPoint) = ncat(int(rarray(i,j,1)),iPoint) + 1 + if (all(ncat(:,iCell) == 0)) then + tile => null() + ierr = mgr % get_tile(latCell(iCell), lonCell(iCell), tile) + if (ierr /= 0 .or. .not. associated(tile)) then + call mpas_log_write('Could not get the tile that contained cell $i', intArgs=(/iCell/), messageType=MPAS_LOG_CRIT) + end if - end if - end if - end do - end do + ierr = mgr % push_tile(tile) + if (ierr /= 0) then + call mpas_log_write('Error pushing this tile onto the stack: '//trim(tile%fname), messageType=MPAS_LOG_CRIT) + end if + end if + + do while(.not. mgr % is_stack_empty()) + tile => mgr % pop_tile() + + if (tile % is_processed) then + cycle + end if + + call mpas_log_write('Processing tile: '//trim(tile%fname)) + + all_pixels_mapped_to_halo_cells = .true. + + do j = 1 + tile_bdr, tile_ny + tile_bdr, 1 + do i = 1 + tile_bdr, tile_nx + tile_bdr, 1 + + ! + ! While the currently used soil type dataset, soiltype_top_30s, does not contain any + ! invalid values, have this sanity check for any future soil type datasets + ! + if (tile % tile(i,j,1) < category_min .or. tile % tile(i,j,1) > category_max) then + cycle + end if + + call mgr % tile_to_latlon(tile, j, i, lat_pt, lon_pt) + call mpas_latlon_to_xyz(xPixel, yPixel, zPixel, sphere_radius, lat_pt, lon_pt) + + res => null() + call mpas_kd_search(tree, (/xPixel, yPixel, zPixel/), res) + + ! + ! For all but the outermost boundary cells, we can safely assume that the nearest + ! model grid cell contains the pixel (else, a different cell would be nearest) + ! + if (bdyMaskCell(res % id) < nBdyLayers) then + + ncat(int(tile % tile(i,j,1)), res % id) = ncat(int(tile % tile(i,j,1)), res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + + ! For outermost boundary cells, additional work is needed to verify that the pixel + ! actually lies within the nearest cell + else + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(res % id), yCell(res % id), zCell(res % id), & + nEdgesOnCell(res % id), verticesOnCell(:,res % id), xVertex, yVertex, zVertex)) then + + ncat(int(tile % tile(i,j,1)), res % id) = ncat(int(tile % tile(i,j,1)), res % id) + 1 + + if (res % id <= nCellsSolve) then + all_pixels_mapped_to_halo_cells = .false. + end if + end if + end if + end do + end do + + tile % is_processed = .true. + + if (.not. all_pixels_mapped_to_halo_cells) then + ierr = mgr % push_neighbors(tile) + if (ierr /=0) then + call mpas_log_write("Error pushing the tile neighbors of: "//trim(tile%fname), messageType=MPAS_LOG_CRIT) + end if + end if end do end do - do iCell = 1,nCells - soilcat_top(iCell) = 1 - do i = 2,16 - if(ncat(i,iCell) > ncat(soilcat_top(iCell),iCell)) then - soilcat_top(iCell) = i - end if - end do + do iCell = 1, nCells + ! Because maxloc returns the location of the maximum value of an array as if the + ! starting index of the array is 1, and dataset categories do not necessarily start + ! at 1, we need to use category_min to ensure the correct category location is chosen. + soilcat_top(iCell) = maxloc(ncat(:,iCell), dim=1) - 1 + category_min end do - deallocate(rarray) + deallocate(ncat) + + ierr = mgr % finalize() + if (ierr /= 0) then + call mpas_log_write('Error occurred when finalizing the interpolation of soil top category (soilcat_top)', & + messageType=MPAS_LOG_CRIT) + endif call mpas_log_write('--- end interpolate SOILCAT_TOP') @@ -565,14 +722,14 @@ subroutine init_atm_static(mesh, dims, configs) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! do iCell = 1,nCells if (lu_index(iCell) == iswater_lu .or. & - soilcat_top(iCell) == 14) then + soilcat_top(iCell) == iswater_soil) then if (lu_index(iCell) /= iswater_lu) then call mpas_log_write('Turning lu_index into water at $i', intArgs=(/iCell/)) lu_index(iCell) = iswater_lu end if - if (soilcat_top(iCell) /= 14) then + if (soilcat_top(iCell) /= iswater_soil) then call mpas_log_write('Turning soilcat_top into water at $i', intArgs=(/iCell/)) - soilcat_top(iCell) = 14 + soilcat_top(iCell) = iswater_soil end if end if end do @@ -618,8 +775,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned, endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus, fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor soiltemp_1deg(-2:180,-2:183) = rarray(1:183,1:186,1) write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & @@ -628,8 +786,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor soiltemp_1deg(181:363,-2:183) = rarray(4:186,1:186,1) interp_list(1) = FOUR_POINT @@ -706,8 +865,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus, fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor iPoint = 1 do j=supersample_fac * 3 + 1, supersample_fac * (ny-3) @@ -741,7 +901,7 @@ subroutine init_atm_static(mesh, dims, configs) xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then snoalb(iPoint) = snoalb(iPoint) + rarray(ii,jj,1) nhs(iPoint) = nhs(iPoint) + 1 @@ -802,8 +962,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor maxsnowalb(-2:180,-2:183) = rarray(1:183,1:186,1) write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & @@ -812,8 +973,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus, fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor maxsnowalb(181:363,-2:183) = rarray(4:186,1:186,1) interp_list(1) = FOUR_POINT @@ -900,8 +1062,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus, fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor iPoint = 1 do j=1,ny @@ -936,7 +1099,7 @@ subroutine init_atm_static(mesh, dims, configs) xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then do k=1,nz if (rarray(i,j,k) == msgval) then @@ -998,8 +1161,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor vegfra(-2:1250,-2:1253,1:12) = rarray(1:1253,1:1256,1:12) write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & @@ -1008,8 +1172,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor vegfra(1251:2503,-2:1253,1:12) = rarray(4:1256,1:1256,1:12) do iCell = 1,nCells @@ -1096,8 +1261,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus, fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor iPoint = 1 do j=supersample_fac * 3 + 1, supersample_fac * (ny-3) @@ -1135,7 +1301,7 @@ subroutine init_atm_static(mesh, dims, configs) xPixel = sphere_radius * cos(lon_pt) * cos(lat_pt) ! at this point, so we need to ues the same radius yPixel = sphere_radius * sin(lon_pt) * cos(lat_pt) ! for source pixel coordinates - if (in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & + if (mpas_in_cell(xPixel, yPixel, zPixel, xCell(iPoint), yCell(iPoint), zCell(iPoint), & nEdgesOnCell(iPoint), verticesOnCell(:,iPoint), xVertex, yVertex, zVertex)) then do k=1,nz if (rarray(ii,jj,k) == msgval) then @@ -1198,8 +1364,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor, wordsize, istatus) + wordsize, istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor vegfra(-2:1250,-2:1253,1:12) = rarray(1:1253,1:1256,1:12) write(fname,'(a,i5.5,a1,i5.5,a1,i5.5,a1,i5.5)') trim(geog_data_path)// & @@ -1208,8 +1375,9 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_f_to_c_string(fname, c_fname) call read_geogrid(c_fname,rarray_ptr,nx,ny,nz,isigned,endian, & - scalefactor,wordsize,istatus) + wordsize,istatus) call init_atm_check_read_error(istatus,fname) + rarray(:,:,:) = rarray(:,:,:) * scalefactor vegfra(1251:2503,-2:1253,1:12) = rarray(4:1256,1:1256,1:12) do iCell = 1,nCells @@ -1251,6 +1419,12 @@ subroutine init_atm_static(mesh, dims, configs) call mpas_log_write('--- end interpolate ALBEDO12M') +! +! Deallocate and free the KD Tree +! + call mpas_kd_free(tree) + deallocate(kd_points) + end subroutine init_atm_static @@ -1328,146 +1502,6 @@ real (kind=RKIND) function sphere_distance(lat1, lon1, lat2, lon2, radius) end function sphere_distance -!----------------------------------------------------------------------- -! routine mirror_point -! -!> \brief Finds the "mirror" of a point about a great-circle arc -!> \author Michael Duda -!> \date 7 March 2019 -!> \details -!> Given the endpoints of a great-circle arc (A,B) and a point, computes -!> the location of the point on the opposite side of the arc along a great- -!> circle arc that intersects (A,B) at a right angle, and such that the arc -!> between the point and its mirror is bisected by (A,B). -!> -!> Assumptions: A, B, and the point to be reflected all lie on the surface -!> of the unit sphere. -! -!----------------------------------------------------------------------- -subroutine mirror_point(xPoint, yPoint, zPoint, xA, yA, zA, xB, yB, zB, xMirror, yMirror, zMirror) - - implicit none - - real(kind=RKIND), intent(in) :: xPoint, yPoint, zPoint - real(kind=RKIND), intent(in) :: xA, yA, zA - real(kind=RKIND), intent(in) :: xB, yB, zB - real(kind=RKIND), intent(out) :: xMirror, yMirror, zMirror - - real(kind=RKIND) :: alpha - - ! - ! Find the spherical angle between arcs AP and AB (where P is the point to be reflected) - ! - alpha = sphere_angle(xA, yA, zA, xPoint, yPoint, zPoint, xB, yB, zB) - - ! - ! Rotate the point to be reflected by twice alpha about the vector from the origin to A - ! - call rotate_about_vector(xPoint, yPoint, zPoint, 2.0_RKIND * alpha, 0.0_RKIND, 0.0_RKIND, 0.0_RKIND, & - xA, yA, zA, xMirror, yMirror, zMirror) - -end subroutine mirror_point - - -!----------------------------------------------------------------------- -! routine rotate_about_vector -! -!> \brief Rotates a point about a vector in R3 -!> \author Michael Duda -!> \date 7 March 2019 -!> \details -!> Rotates the point (x,y,z) through an angle theta about the vector -!> originating at (a, b, c) and having direction (u, v, w). -! -!> Reference: https://sites.google.com/site/glennmurray/Home/rotation-matrices-and-formulas/rotation-about-an-arbitrary-axis-in-3-dimensions -! -!----------------------------------------------------------------------- -subroutine rotate_about_vector(x, y, z, theta, a, b, c, u, v, w, xp, yp, zp) - - implicit none - - real (kind=RKIND), intent(in) :: x, y, z, theta, a, b, c, u, v, w - real (kind=RKIND), intent(out) :: xp, yp, zp - - real (kind=RKIND) :: vw2, uw2, uv2 - real (kind=RKIND) :: m - - vw2 = v**2.0 + w**2.0 - uw2 = u**2.0 + w**2.0 - uv2 = u**2.0 + v**2.0 - m = sqrt(u**2.0 + v**2.0 + w**2.0) - - xp = (a*vw2 + u*(-b*v-c*w+u*x+v*y+w*z) + ((x-a)*vw2+u*(b*v+c*w-v*y-w*z))*cos(theta) + m*(-c*v+b*w-w*y+v*z)*sin(theta))/m**2.0 - yp = (b*uw2 + v*(-a*u-c*w+u*x+v*y+w*z) + ((y-b)*uw2+v*(a*u+c*w-u*x-w*z))*cos(theta) + m*( c*u-a*w+w*x-u*z)*sin(theta))/m**2.0 - zp = (c*uv2 + w*(-a*u-b*v+u*x+v*y+w*z) + ((z-c)*uv2+w*(a*u+b*v-u*x-v*y))*cos(theta) + m*(-b*u+a*v-v*x+u*y)*sin(theta))/m**2.0 - -end subroutine rotate_about_vector - - -!----------------------------------------------------------------------- -! routine in_cell -! -!> \brief Determines whether a point is within a Voronoi cell -!> \author Michael Duda -!> \date 7 March 2019 -!> \details -!> Given a point on the surface of the sphere, the corner points of a Voronoi -!> cell, and the generating point for that Voronoi cell, determines whether -!> the given point is within the Voronoi cell. -! -!----------------------------------------------------------------------- -logical function in_cell(xPoint, yPoint, zPoint, xCell, yCell, zCell, & - nEdgesOnCell, verticesOnCell, xVertex, yVertex, zVertex) - - use mpas_geometry_utils, only : mpas_arc_length - - implicit none - - real(kind=RKIND), intent(in) :: xPoint, yPoint, zPoint - real(kind=RKIND), intent(in) :: xCell, yCell, zCell - integer, intent(in) :: nEdgesOnCell - integer, dimension(:), intent(in) :: verticesOnCell - real(kind=RKIND), dimension(:), intent(in) :: xVertex, yVertex, zVertex - - integer :: i - integer :: vtx1, vtx2 - real(kind=RKIND) :: xNeighbor, yNeighbor, zNeighbor - real(kind=RKIND) :: inDist, outDist - real(kind=RKIND) :: radius - real(kind=RKIND) :: radius_inv - - radius = sqrt(xCell * xCell + yCell * yCell + zCell * zCell) - radius_inv = 1.0_RKIND / radius - - inDist = mpas_arc_length(xPoint, yPoint, zPoint, xCell, yCell, zCell) - - in_cell = .true. - - do i=1,nEdgesOnCell - vtx1 = verticesOnCell(i) - vtx2 = verticesOnCell(mod(i,nEdgesOnCell)+1) - - call mirror_point(xCell*radius_inv, yCell*radius_inv, zCell*radius_inv, & - xVertex(vtx1)*radius_inv, yVertex(vtx1)*radius_inv, zVertex(vtx1)*radius_inv, & - xVertex(vtx2)*radius_inv, yVertex(vtx2)*radius_inv, zVertex(vtx2)*radius_inv, & - xNeighbor, yNeighbor, zNeighbor) - - xNeighbor = xNeighbor * radius - yNeighbor = yNeighbor * radius - zNeighbor = zNeighbor * radius - - outDist = mpas_arc_length(xPoint, yPoint, zPoint, xNeighbor, yNeighbor, zNeighbor) - - if (outDist < inDist) then - in_cell = .false. - return - end if - - end do - -end function in_cell - - !================================================================================================== end module mpas_init_atm_static !================================================================================================== diff --git a/src/core_init_atmosphere/mpas_kd_tree.F b/src/core_init_atmosphere/mpas_kd_tree.F new file mode 100644 index 0000000000..638595d3c6 --- /dev/null +++ b/src/core_init_atmosphere/mpas_kd_tree.F @@ -0,0 +1,457 @@ +module mpas_kd_tree + + !*********************************************************************** + ! + ! module mpas_kd_tree + ! + !> \brief MPAS KD-Tree module + !> \author Miles A. Curry + !> \date 01/28/20 + !> A KD-Tree implementation to create and search perfectly balanced + !> KD-Trees. + !> + !> Use `mpas_kd_type` dervied type to construct points for mpas_kd_construct: + !> + !> real (kind=RKIND), dimension(:,:), allocatable :: array + !> type (mpas_kd_type), pointer :: tree => null() + !> type (mpas_kd_type), dimension(:), pointer :: points => null() + !> + !> allocate(array(k,n)) ! K dims and n points + !> allocate(points(n)) + !> array(:,:) = (/.../) ! Fill array with values + !> + !> do i = 1, n + !> allocate(points(i) % point(k)) ! Allocate point with k dimensions + !> points(i) % point(:) = array(:,i) + !> points(i) % id = i ! Or a value of your choice + !> enddo + !> + !> tree => mpas_kd_construct(points, k) + !> + !> call mpas_kd_free(tree) + !> deallocate(points) + !> deallocate(array) + !> + ! + !----------------------------------------------------------------------- + use mpas_kind_types, only : RKIND + + implicit none + + private + + public :: mpas_kd_type + + ! Public Subroutines + public :: mpas_kd_construct + public :: mpas_kd_search + public :: mpas_kd_free + + type mpas_kd_type + type (mpas_kd_type), pointer :: left => null() + type (mpas_kd_type), pointer :: right => null() + + integer :: split_dim + real (kind=RKIND), dimension(:), pointer :: point => null() + + integer :: id + end type mpas_kd_type + + contains + + !*********************************************************************** + ! + ! recursive routine mpas_kd_construct_internal + ! + !> \brief Create a KD-Tree from a set of k-Dimensional points + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Private, recursive function to construct a KD-Tree from an array + !> of mpas_kd_type, points, and return the root of the tree. + !> + !> ndims should be the dimensioned of each individual point found + !> in points and npoints should be the number of points. dim represents + !> the current split dimensioned and is used internally. Upon calling + !> this function, dim should always be set to 0. + ! + !----------------------------------------------------------------------- + recursive function mpas_kd_construct_internal(points, ndims, npoints, dim) result(tree) + + implicit none + + ! Input Variables + type (mpas_kd_type), dimension(:), target :: points + integer, intent(in) :: ndims + integer, value :: npoints + integer, value :: dim + + ! Return Value + type (mpas_kd_type), pointer :: tree + + ! Local Variables + integer :: median + + if (npoints < 1) then + tree => null() + return + endif + + ! Sort the points at the split dimension + dim = mod(dim, ndims) + 1 + call quickSort(points, dim, 1, npoints, ndims) + + median = (1 + npoints) / 2 + + points(median) % split_dim = dim + tree => points(median) + + ! Build the right and left sub-trees but do not include the median + ! point (the root of the current tree) + if (npoints /= 1) then + points(median) % left => mpas_kd_construct_internal(points(1:median-1), ndims, median - 1, points(median) % split_dim) + points(median) % right => mpas_kd_construct_internal(points(median+1:npoints), ndims, npoints - median, & + points(median) % split_dim) + endif + + end function mpas_kd_construct_internal + + + !*********************************************************************** + ! + ! routine mpas_kd_construct + ! + !> \brief Construct a balanced KD-Tree + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Create and return a perfectly balanced KD-Tree from an array of + !> mpas_kd_type, points. The point member of every element of the points + !> array should be allocated and set to the points desired to be in the + !> KD-Tree and ndims should be the dimensions of the points. + !> + !> Upon error, the returned tree will be unassociated. + ! + !----------------------------------------------------------------------- + function mpas_kd_construct(points, ndims) result(tree) + + implicit none + + ! Input Varaibles + type (mpas_kd_type), dimension(:) :: points + integer, intent(in) :: ndims + + ! Return Value + type (mpas_kd_type), pointer :: tree + + ! Local Varaibles + integer :: npoints + + npoints = size(points) + + if (npoints < 1) then + tree => null() + return + endif + + tree => mpas_kd_construct_internal(points(:), ndims, npoints, 0) + + end function mpas_kd_construct + + !*********************************************************************** + ! + ! routine break_tie + ! + !> \brief Break a tie for two n-dim points + !> \author Miles A. Curry + !> \date 07/07/20 + !> \details + !> Compare 1..n dimensions of p1 and p2 and return -1 if p1(i) is less than + !> p2(i) and return 1 if p1(i) is greater than p2(i). If p1(i) and p2(i) are + !> equal, then the same comparison will be done on p1(i+1) and p2(i+1) until + !> p1(n) and p2(n). If p1(:) and p2(:) are equal across all n, then 0 will + !> be returned. + ! + !----------------------------------------------------------------------- + function break_tie(p1, p2) result(tie) + + implicit none + + ! Input Variables + type (mpas_kd_type), intent(in) :: p1 + type (mpas_kd_type), intent(in) :: p2 + integer :: tie + + integer :: i + + tie = 0 + do i = 1, size(p1 % point(:)) + if (p1 % point(i) < p2 % point(i)) then + tie = -1 + return + else if (p1 % point(i) > p2 % point(i)) then + tie = 1 + return + endif + enddo + + end function break_tie + + + !*********************************************************************** + ! + ! recursive routine mpas_kd_search_internal + ! + !> \brief Recursively search the KD-Tree for query + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Private, recursive function to search kdtree for query. Upon succes + !> res will point to the nearest neighbor to query and distance will hold + !> the squared distance between query and res. + !> + !> Distance is calculated and compared as squared distance to increase + !> efficiency. + ! + !----------------------------------------------------------------------- + recursive subroutine mpas_kd_search_internal(kdtree, query, res, distance) + + implicit none + + ! Input Variables + type (mpas_kd_type), pointer, intent(in) :: kdtree + real (kind=RKIND), dimension(:), intent(in) :: query + type (mpas_kd_type), pointer, intent(inout) :: res + real (kind=RKIND), intent(inout) :: distance + + ! Local Values + real (kind=RKIND) :: current_distance + + current_distance = sum((kdtree % point(:) - query(:))**2) + if (current_distance < distance) then + distance = current_distance + res => kdtree + else if (current_distance == distance) then + ! + ! Consistently break a tie if a query is equidistant from two points + ! + if (associated(res)) then + if (break_tie(res, kdtree) == 1) then + res => kdtree + endif + endif + endif + + ! + ! To find the nearest neighbor, first serach the tree in a similar manner + ! as a single dimensioned BST, by comparing points on the current split + ! dimension. + ! + ! If the distance between the current node and the query is less then the + ! minimum distance found within the subtree we just searched, then the nearest + ! neighbor might be in the opposite subtree, so search it. + ! + + if (query(kdtree % split_dim) > kdtree % point(kdtree % split_dim)) then + if (associated(kdtree % right)) then ! Search right + call mpas_kd_search_internal(kdtree % right, query, res, distance) + endif + if ((kdtree % point(kdtree % split_dim) - query(kdtree % split_dim))**2 <= distance .and. associated(kdtree % left)) then + call mpas_kd_search_internal(kdtree % left, query, res, distance) ! Check the other subtree + endif + else if (query(kdtree % split_dim) < kdtree % point(kdtree % split_dim)) then + if (associated(kdtree % left)) then ! Search left + call mpas_kd_search_internal(kdtree % left, query, res, distance) + endif + if ((kdtree % point(kdtree % split_dim) - query(kdtree % split_dim))**2 <= distance .and. associated(kdtree % right)) then + call mpas_kd_search_internal(kdtree % right, query, res, distance) ! Check the other subtree + endif + else ! Nearest point could be in either left or right subtree, so search both + if (associated(kdtree % right)) then + call mpas_kd_search_internal(kdtree % right, query, res, distance) + endif + if (associated(kdtree % left)) then + call mpas_kd_search_internal(kdtree % left, query, res, distance) + endif + endif + + end subroutine mpas_kd_search_internal + + !*********************************************************************** + ! + ! routine mpas_kd_search + ! + !> \brief Find the nearest point in a KD-Tree to a query + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Search kdtree and returned the nearest point to query into the + !> res argument. Optionally, if distance is present, returned the + !> squared distance between query and res. + !> + !> If the dimension of query does not match the dimensions of points + !> within kdtree, then res will be returned as unassociated. Likewise, + !> if kdtree is empty/unassociated, res will be returned as unassociated. + ! + !----------------------------------------------------------------------- + subroutine mpas_kd_search(kdtree, query, res, distance) + + implicit none + type (mpas_kd_type), pointer, intent(in) :: kdtree + real (kind=RKIND), dimension(:), intent(in) :: query + type (mpas_kd_type), pointer, intent(inout) :: res + real (kind=RKIND), intent(out), optional :: distance + + real (kind=RKIND) :: dis + + if (.not. associated(kdtree)) then + res => null() + return + end if + + if (size(kdtree % point) /= size(query)) then + res => null() + return + endif + + dis = huge(dis) + call mpas_kd_search_internal(kdtree, query, res, dis) + + if (present(distance)) then + distance = dis + endif + + end subroutine mpas_kd_search + + !*********************************************************************** + ! + ! routine mpas_kd_free + ! + !> \brief Free all nodes within a tree. + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Deallocate and nullify all point nodes of kdtree and nullify the + !> left and right pointers. + !> + !> After calling this function, the array of mpas_kd_type that was used + !> to construct kdtree will still be allocated and will need to be + !> deallocated separate from this routine. + ! + !----------------------------------------------------------------------- + recursive subroutine mpas_kd_free(kdtree) + + implicit none + type (mpas_kd_type), pointer :: kdtree + + if (.not. associated(kdtree)) then + return + endif + + if (associated(kdtree % left)) then + call mpas_kd_free(kdtree % left) + endif + + if (associated(kdtree % right)) then + call mpas_kd_free(kdtree % right) + endif + + deallocate(kdtree % point) + nullify(kdtree % left) + nullify(kdtree % right) + nullify(kdtree) + + end subroutine mpas_kd_free + + + !*********************************************************************** + ! + ! routine mpas_kd_quicksort + ! + !> \brief Sort an array along a dimension + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Sort points starting from arrayStart, to arrayEnd along the given dimension + !> `dim`. If two points are swapped, the entire K-Coordinate point are swapped. + ! + !----------------------------------------------------------------------- + recursive subroutine quickSort(array, dim, arrayStart, arrayEnd, ndims) + + implicit none + + ! Input Variables + type (mpas_kd_type), dimension(:) :: array + integer, intent(in), value :: dim + integer, intent(in), value :: arrayStart, arrayEnd + integer, intent(in) :: ndims + + ! Local Variables + type (mpas_kd_type) :: temp + real (kind=RKIND), dimension(ndims) :: pivot_value + + integer :: l, r, pivot, s + + if ((arrayEnd - arrayStart) < 1) then + return + endif + + ! Create the left, right, and start pointers + l = arrayStart + r = arrayEnd - 1 + s = l + + pivot = (l+r)/2 + pivot_value = array(pivot) % point + + ! Move the pivot to the far right + temp = array(pivot) + array(pivot) = array(arrayEnd) + array(arrayEnd) = temp + + do while (.true.) + ! Advance the left pointer until it is a value less then our pivot_value(dim) + do while (.true.) + if (array(l) % point(dim) < pivot_value(dim)) then + l = l + 1 + else + exit + endif + enddo + + ! Advance the right pointer until it is a value more then our pivot_value(dim) + do while (.true.) + if (r <= 0) then + exit + endif + + if(array(r) % point(dim) >= pivot_value(dim)) then + r = r - 1 + else + exit + endif + enddo + + if (l >= r) then + exit + else ! Swap elements about the pivot + temp = array(l) + array(l) = array(r) + array(r) = temp + endif + enddo + + ! Move the pivot to l ended up + temp = array(l) + array(l) = array(arrayEnd) + array(arrayEnd) = temp + + ! Quick Sort on the lower partition + call quickSort(array(:), dim, s, l-1, ndims) + + ! Quick sort on the upper partition + call quickSort(array(:), dim, l+1, arrayEnd, ndims) + + end subroutine quicksort + +end module mpas_kd_tree diff --git a/src/core_init_atmosphere/mpas_parse_geoindex.F b/src/core_init_atmosphere/mpas_parse_geoindex.F new file mode 100644 index 0000000000..eef129b2db --- /dev/null +++ b/src/core_init_atmosphere/mpas_parse_geoindex.F @@ -0,0 +1,263 @@ +module mpas_parse_geoindex + + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_ERR, MPAS_LOG_WARN + use mpas_pool_routines + + implicit none + + private + + public :: mpas_parse_index + + contains + + !*********************************************************************** + ! + ! subroutine mpas_parse_index + ! + !> \brief Parse a geogrid's index file and put the results into an MPAS pool + !> \author Miles A. Curry + !> \date 02/20/2020 + !> \details + !> Parse an index file of a static data set into an MPAS pool, allocating + !> each keyword=value pair into the pool with the pool member key being + !> keyword, and the value being value. + !> + !> This function can parse index files with one keyword=value pair + !> per line; a "#" at the start of a line, which will cause the line to be + !> ignored; or an empty line containing only a newline/return character, which + !> will also be ignored. Spaces or tabs before, between or after the + !> keyword=value tokens are > ignored. + !> + !> If a line contains anything but the above valid syntaxes, a syntax + !> error will raised and -1 will be returned. + !> + !> Case is ignored. + !> + !> The definitions of a keyword, which can found in section 3-53 + !> of the WRF-AWR User's Guide, will determine the corresponding type + !> of that keyword. A keyword that has been assigned the wrong type + !> will raise a type error and -1 will be returned. + !> + !> Keywords that are not handled explicitly by this function will produce + !> a warning that the keyword is unrecognized. + ! + !----------------------------------------------------------------------- + function mpas_parse_index(path, geo_pool) result(ierr) + + use mpas_io_units + + implicit none + ! Input Variables + character (len=*), intent(in) :: path + type (mpas_pool_type), intent(inout) :: geo_pool + integer :: ierr + + ! Local Variables + character (len=StrKIND) :: line, lhs, rhs + character (len=StrKIND) :: read_err_msg, open_msg + integer :: geo_unit + integer :: open_stat, read_stat, line_read_stat + integer :: i, k + logical :: res + + character (len=StrKIND), pointer :: char_t + integer :: iceiling, ifloor + integer, pointer :: int_t + real(kind=RKIND), pointer :: real_t + + ierr = 0 + + inquire(file=trim(path), exist=res) + if ( .not. res) then + call mpas_log_write("Could not find or open the file at: "//trim(path), messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + call mpas_new_unit(geo_unit) + open_stat = 0 + open(geo_unit, FILE=trim(path), action='READ', iostat=open_stat, iomsg=open_msg) + if (open_stat /= 0) then + call mpas_release_unit(geo_unit) + call mpas_log_write("Could not open 'index' file at:'"//trim(path)//"'", messageType=MPAS_LOG_ERR) + call mpas_log_write(trim(open_msg), messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + line_read_stat = 0 + read_stat = 0 + k = 1 ! Keep track of line numbers for error reporting + read(geo_unit,'(a)', iostat=line_read_stat) line + do while ( line_read_stat == 0 ) + line = lowercase(line) + + ! + ! If a blank or comment line is encountered, read the next line + ! + if (line(1:1) == '#' .or. len_trim(line) == 0) then + k = k + 1 + read(geo_unit,'(a)', iostat=line_read_stat) line + cycle + endif + + do i = 1, len(trim(line)), 1 + if (line(i:i) == '=') then + lhs = adjustl(trim(line(1:i-1))) + rhs = adjustl(trim(line(i+1:len(trim(line))))) + exit + endif + ! If i is at the end of the string, and we haven't broken out of this loop, + ! then we do not have a '=' present in this line, thus we have a syntax error + if (i == len(trim(line))) then + close(geo_unit) + call mpas_release_unit(geo_unit) + call mpas_log_write("Syntax error on line $i of index file: '"//trim(path)//"'", & + intArgs=(/k/), messageType=MPAS_LOG_ERR) + call mpas_log_write("Line $i: '"//trim(line)//"'", intArgs=(/k/), messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + enddo + + ! + ! Strings + ! + if ( trim(lhs) == 'type' & + .or. trim(lhs) == 'projection' & + .or. trim(lhs) == 'units' & + .or. trim(lhs) == 'description' & + .or. trim(lhs) == 'row_order' & + .or. trim(lhs) == 'endian' & + .or. trim(lhs) == 'mminlu' ) then + + allocate(char_t) + char_t = rhs + call mpas_pool_add_config(geo_pool, trim(lhs), char_t) + + ! + ! Reals + ! + else if ( trim(lhs) == 'dx' & + .or. trim(lhs) == 'dy' & + .or. trim(lhs) == 'known_x' & + .or. trim(lhs) == 'known_y' & + .or. trim(lhs) == 'known_lat' & + .or. trim(lhs) == 'known_lon' & + .or. trim(lhs) == 'scale_factor' & + .or. trim(lhs) == 'stdlon' & + .or. trim(lhs) == 'truelat1' & + .or. trim(lhs) == 'truelat2' & + .or. trim(lhs) == 'missing_value' ) then + + allocate(real_t) + read(rhs, *, iostat=read_stat, iomsg=read_err_msg) real_t + call mpas_pool_add_config(geo_pool, trim(lhs), real_t) + + ! + ! Integers + ! + else if ( trim(lhs) == 'tile_x' & + .or. trim(lhs) == 'tile_y' & + .or. trim(lhs) == 'tile_z' & + .or. trim(lhs) == 'tile_z_start' & + .or. trim(lhs) == 'tile_z_end' & + .or. trim(lhs) == 'tile_bdr' & + .or. trim(lhs) == 'wordsize' & + .or. trim(lhs) == 'category_max' & + .or. trim(lhs) == 'category_min' & + .or. trim(lhs) == 'iswater' & + .or. trim(lhs) == 'islake' & + .or. trim(lhs) == 'isice' & + .or. trim(lhs) == 'isurban' & + .or. trim(lhs) == 'isoilwater' & + .or. trim(lhs) == 'filename_digits' ) then + + ! Because each compiler handles reporting type errors when transferring + ! data in a read statement a little bit differently, we will have to type check + ! integer values ourselves. + allocate(real_t) + read(rhs, *, iostat=read_stat, iomsg=read_err_msg) real_t + iceiling = ceiling(real_t) + ifloor = floor(real_t) + if (iceiling /= ifloor) then + close(geo_unit) + call mpas_release_unit(geo_unit) + call mpas_log_write("Type error while reading '"//trim(path)//"'.", messageType=MPAS_LOG_ERR) + call mpas_log_write("Could not convert '"//trim(rhs)//"' to an integer on line $i: '"//trim(line)//"'", & + intArgs=(/k/), messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + allocate(int_t) + int_t = int(real_t) + deallocate(real_t) + call mpas_pool_add_config(geo_pool, trim(lhs), int_t) + + ! + ! Booleans - Yes will be assigned 1, and no will be assigned to 0 + ! + else if (lhs == 'signed') then + if (trim(rhs) == 'yes') then + allocate(int_t) + int_t = 1 + call mpas_pool_add_config(geo_pool, trim(lhs), int_t) + else if (trim(rhs) == 'no') then + allocate(int_t) + int_t = 0 + call mpas_pool_add_config(geo_pool, trim(lhs), int_t) + else + read_stat = -1 + read_err_msg = "Logical was not correct type" + endif + else + call mpas_log_write("Unrecognized keyword: '"//trim(lhs)//"' on line $i of '"//trim(path)//"'", intArgs=(/k/), & + messageType=MPAS_LOG_WARN) + endif + ! Since read gives us an error string in iomsg on a type error, we + ! can handle all errors for any type in one place + if ( read_stat /= 0) then + close(geo_unit) + call mpas_release_unit(geo_unit) + call mpas_log_write("Type error on line $i of: '"//trim(path)//"'.", intArgs=(/k/), messageType=MPAS_LOG_ERR) + call mpas_log_write(trim(read_err_msg)//": '"//trim(line)//"'", messageType=MPAS_LOG_ERR) + ierr = -1 + return + endif + + k = k + 1 + read(geo_unit,'(a)', iostat=line_read_stat) line + enddo + + close(geo_unit) + call mpas_release_unit(geo_unit) + + end function mpas_parse_index + + + ! Returns a copy of 'str' in which all upper-case letters have been converted + ! to lower-case letters. + function lowercase(str) result(lowerStr) + + character(len=*), intent(in) :: str + character(len=len(str)) :: lowerStr + + integer :: i + integer, parameter :: offset = (iachar('a') - iachar('A')) + + + do i=1,len(str) + if (iachar(str(i:i)) >= iachar('A') .and. iachar(str(i:i)) <= iachar('Z')) then + lowerStr(i:i) = achar(iachar(str(i:i)) + offset) + else + lowerStr(i:i) = str(i:i) + end if + end do + + end function lowercase + + +end module mpas_parse_geoindex diff --git a/src/core_init_atmosphere/mpas_stack.F b/src/core_init_atmosphere/mpas_stack.F new file mode 100644 index 0000000000..7227295a9f --- /dev/null +++ b/src/core_init_atmosphere/mpas_stack.F @@ -0,0 +1,280 @@ +module mpas_stack + + implicit none + + private + + ! Public Subroutines and Structures + public :: mpas_stack_is_empty + public :: mpas_stack_push + public :: mpas_stack_pop + public :: mpas_stack_free + + public :: mpas_stack_type, mpas_stack_payload_type + + type mpas_stack_payload_type + end type mpas_stack_payload_type + + type mpas_stack_type + type (mpas_stack_type), pointer :: next => null() + class (mpas_stack_payload_type), pointer :: payload => null() + end type mpas_stack_type + + !*********************************************************************** + ! + ! module mpas_stack + ! + !> \brief MPAS Stack module + !> \author Miles A. Curry + !> \date 04/04/19 + !> \details + !> + !> Introduction + !> ============== + !> The MPAS stack is a simple, extensible data stack data structure for use + !> within the MPAS atmospheric model. It functions as a wrapper around a + !> polymorphic data structure to provide usage in different areas. + !> + !> + !> Creating a Stack + !> ================== + !> The stack data structure (`type (mpas_stack_type)`) is defined by a single + !> `next` pointer > and a pointer to a `type (mpas_stack_payload_type)`, which + !> is defined as a empty derived type. + !> + !> To use the stack, create a derived type that extends the `mpas_stack_payload_type` + !> type. Define your extended derived type with members that meets your application. + !> + !> For instance: + !> ``` + !> type, extends(mpas_stack_payload_type) :: my_payload_name + !> ! Define the members of your type as you wish + !> end type my_payload_name + !> + !> class (my_payload_name), pointer :: item1 => null(), item2 => null() + !> ``` + !> + !> The extended mpas_stack_payload_type will enable a user defined type to be + !> associated with a stack item. The stack stores references of a payload, thus + !> a single payload can be used in multiple push operations. + !> + !> You will then need to create a stack (or multiple stacks if you desire) as + !> the following: + !> + !> ``` + !> type (mpas_stack_type), pointer :: stack1 => null(), stack2 => null() + !> ``` + !> + !> Pushing onto a Stack + !> ==================== + !> You can push your items onto a stack as: + !> + !> ``` + !> allocate(item1) + !> stack1 => mpas_stack_push(stack1, item1) + !> allocate(item2) + !> stack1 => mpas_stack_push(stack1, item2) + !> ``` + !> + !> Popping an item off of the stack + !> ================================ + !> Popping an item off of the stack will require a bit more work than pushing. + !> Because the payload is a polymorphic class , we will need to use the select + !> case to get our type (or multiple types) back into a usable object: + !> ``` + !> ! The item to pop items into + !> class (mpas_stack_payload_type), pointer :: top + !> type (my_payload_name), pointer :: my_item + !> + !> top => mpas_stack_pop(stack1) + !> select type(top) + !> type is(my_payload_name) + !> my_item => top + !> end select + !> ``` + !> + !> Note: It is recommended to create your own `pop` function so you can reduce + !> the amount of coded needed. An example is provided at the bottom of + !> this module as the function `user_pop(..)` + ! + !----------------------------------------------------------------------- + + contains + + !*********************************************************************** + ! + ! routine mpas_stack_is_empty + ! + !> \brief Returns .true. if the stack is empty, otherwise .false. + !> \author Miles A. Curry + !> \date 01/28/20 + !> Returns .true. If the stack is empty and/or if the stack is unassociated. + ! + !----------------------------------------------------------------------- + function mpas_stack_is_empty(stack) result(is_empty) + + implicit none + type (mpas_stack_type), intent(in), pointer :: stack + logical :: is_empty + + is_empty = .true. + if (associated(stack)) then + is_empty = .false. + return + endif + + end function mpas_stack_is_empty + + !*********************************************************************** + ! + ! routine mpas_stack_push + ! + !> \brief Push an item onto stack + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> + !> Push a mpas_stack_payload_type type, onto `stack` and return the new stack. If + !> `payload` is the first item to be pushed onto the stack, then `stack` + !> should be unassociated. + ! + !----------------------------------------------------------------------- + function mpas_stack_push(stack, payload) result(new_stack) + + implicit none + + type(mpas_stack_type), intent(inout), pointer :: stack + class(mpas_stack_payload_type), intent(inout), target :: payload + + type(mpas_stack_type), pointer :: new_stack + + allocate(new_stack) + new_stack % payload => payload + new_stack % next => stack + + return + + end function mpas_stack_push + + !*********************************************************************** + ! + ! function mpas_stack_pop + ! + !> \brief Pop off the last item added from a stack + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Pop off and return the top item of the stack as a `class mpas_stack_payload_type`. + !> If the stack is empty (or unassociated), then a null `class mpas_stack_payload_type` + !> pointer will be returned. `select type` will need to be used to retrieve + !> any extended members. + ! + !----------------------------------------------------------------------- + function mpas_stack_pop(stack) result(top) + + implicit none + + type (mpas_stack_type), intent(inout), pointer :: stack + type (mpas_stack_type), pointer :: next => null() + class(mpas_stack_payload_type), pointer :: top + + if ( .not. associated(stack)) then + top => null() + return + endif + + top => stack % payload + next => stack % next + deallocate(stack) + stack => next + return + + end function mpas_stack_pop + + !*********************************************************************** + ! + ! function mpas_stack_free + ! + !> \brief Deallocate the entire stack. Optionally deallocate payloads + !> \author Miles A. Curry + !> \date 01/28/20 + !> \details + !> Deallocate the entire stack. If free_payload is set to `.true.` or if + !> absent then the payload will be deallocated. If not, then the payload will not + !> be deallocated. Upon success, the stack will be unassociated. + ! + !----------------------------------------------------------------------- + subroutine mpas_stack_free(stack, free_payload) + + implicit none + + type(mpas_stack_type), intent(inout), pointer :: stack + logical, intent(in), optional :: free_payload + logical :: fpl + + type(mpas_stack_type), pointer :: cur + + if (present(free_payload)) then + fpl = free_payload + else + fpl = .true. + endif + + cur => stack + do while(associated(stack)) + stack => stack % next + if ( fpl ) then + deallocate(cur % payload) + endif + deallocate(cur) + cur => stack + enddo + + end subroutine mpas_stack_free + + + !*********************************************************************** + ! + ! Example user-defined pop function + ! + !> \brief Pop off the last item added from a stack and return it as our + !> defined type + !> \author Miles A. Curry + !> \date 01/28/20 + ! + !----------------------------------------------------------------------- + ! function user_pop(stack) result(item) + ! + ! use mpas_stack, only : mpas_stack_type, mpas_stack_payload_type, mpas_stack_pop + ! + ! implicit none + ! + ! type(mpas_stack_type), intent(inout), pointer :: stack + ! + ! type(my_item), pointer :: item ! Our user defined mpas_stack_type + ! + ! ! We will need to use the mpas_stack_payload_type type to use mpas_stack_pop(...) + ! class(mpas_stack_payload_type), pointer :: top + ! + ! ! + ! ! Handle a pop on an empty stack if we want to here + ! ! Note the stack will return null if it is empty. + ! ! + ! if (mpas_stack_is_empty(stack)) then + ! item => null() + ! return + ! endif + ! + ! top => mpas_stack_pop(stack) + ! + ! select type(top) + ! type is(my_item) + ! item => top + ! class default + ! write(0,*) "We got an Error and we should handle it if we need to!!" + ! stop + ! end select + ! + ! end function user_pop + +end module mpas_stack diff --git a/src/core_init_atmosphere/read_geogrid.c b/src/core_init_atmosphere/read_geogrid.c index e6ffc6d305..ec66892bea 100644 --- a/src/core_init_atmosphere/read_geogrid.c +++ b/src/core_init_atmosphere/read_geogrid.c @@ -26,7 +26,7 @@ interface subroutine read_geogrid(fname, rarray, nx, ny, nz, isigned, endian, & - scalefactor, wordsize, status) bind(C) + wordsize, status) bind(C) use iso_c_binding, only : c_char, c_int, c_float, c_ptr character (c_char), dimension(*), intent(in) :: fname type (c_ptr), value :: rarray @@ -35,7 +35,6 @@ integer (c_int), intent(in), value :: nz integer (c_int), intent(in), value :: isigned integer (c_int), intent(in), value :: endian - real (c_float), intent(in), value :: scalefactor integer (c_int), intent(in), value :: wordsize integer (c_int), intent(inout) :: status end subroutine read_geogrid @@ -51,7 +50,6 @@ int read_geogrid( int nz, /* z-dimension of the array */ int isigned, /* 0=unsigned data, 1=signed data */ int endian, /* 0=big endian, 1=little endian */ - float scalefactor, /* value to multiply array elements by before truncation to integers */ int wordsize, /* number of bytes to use for each array element */ int * status) { @@ -142,12 +140,5 @@ int read_geogrid( free(c); - /* Scale real-valued array by scalefactor */ - if (scalefactor != 1.0) - { - for (i=0; i<narray; i++) - rarray[i] = rarray[i] * (scalefactor); - } - return 0; } diff --git a/src/core_landice/landice.cmake b/src/core_landice/landice.cmake new file mode 100644 index 0000000000..0d580d7800 --- /dev/null +++ b/src/core_landice/landice.cmake @@ -0,0 +1,79 @@ + +# build_options.mk stuff handled here +list(APPEND CPPDEFS "-DCORE_LANDICE") +list(APPEND INCLUDES "${CMAKE_BINARY_DIR}/core_landice/shared" "${CMAKE_BINARY_DIR}/core_landice/analysis_members" "${CMAKE_BINARY_DIR}/core_landice/mode_forward") + +# +# Check if building with LifeV, Albany, and/or PHG external libraries +# + +if (LIFEV) + # LifeV can solve L1L2 or FO + list(APPEND CPPDEFS "-DLIFEV" "-DUSE_EXTERNAL_L1L2" "-DUSE_EXTERNAL_FIRSTORDER" "-DMPAS_LI_BUILD_INTERFACE") +endif() + +# Albany can only solve FO at present +if (ALBANY) + list(APPEND CPPDEFS "-DUSE_EXTERNAL_FIRSTORDER" "-DMPAS_LI_BUILD_INTERFACE") +endif() + +if (LIFEV AND ALBANY) + message(FATAL "Compiling with both LifeV and Albany is not allowed at this time.") +endif() + +# PHG currently requires LifeV +if (PHG AND NOT LIFEV) + message(FATAL "Compiling with PHG requires LifeV at this time.") +endif() + +# PHG can only Stokes at present +if (PHG) + list(APPEND CPPDEFS "-DUSE_EXTERNAL_STOKES" "-DMPAS_LI_BUILD_INTERFACE") +endif() + +# driver (files live in E3SM) +list(APPEND RAW_SOURCES + ../../mpas-albany-landice/driver/glc_comp_mct.F + ../../mpas-albany-landice/driver/glc_cpl_indices.F + ../../mpas-albany-landice/driver/glc_mct_vars.F +) + +# shared +list(APPEND RAW_SOURCES + core_landice/shared/mpas_li_constants.F + core_landice/shared/mpas_li_mask.F + core_landice/shared/mpas_li_setup.F +) + +# analysis members +list(APPEND RAW_SOURCES + core_landice/analysis_members/mpas_li_analysis_driver.F + core_landice/analysis_members/mpas_li_global_stats.F + core_landice/analysis_members/mpas_li_regional_stats.F +) + +# mode forward +list(APPEND RAW_SOURCES + core_landice/mode_forward/mpas_li_core.F + core_landice/mode_forward/mpas_li_core_interface.F + core_landice/mode_forward/mpas_li_time_integration.F + core_landice/mode_forward/mpas_li_time_integration_fe.F + core_landice/mode_forward/mpas_li_diagnostic_vars.F + core_landice/mode_forward/mpas_li_advection.F + core_landice/mode_forward/mpas_li_calving.F + core_landice/mode_forward/mpas_li_statistics.F + core_landice/mode_forward/mpas_li_velocity.F + core_landice/mode_forward/mpas_li_thermal.F + core_landice/mode_forward/mpas_li_iceshelf_melt.F + core_landice/mode_forward/mpas_li_sia.F + core_landice/mode_forward/mpas_li_velocity_simple.F + core_landice/mode_forward/mpas_li_velocity_external.F + core_landice/mode_forward/mpas_li_subglacial_hydro.F +) + +if (CPPDEFS MATCHES ".*MPAS_LI_BUILD_INTERFACE.*") + list(APPEND RAW_SOURCES core_landice/mode_forward/Interface_velocity_solver.cpp) +endif() + +# Generate core input +handle_st_nl_gen("namelist.landice" "streams.landice stream_list.landice. listed" ${CORE_INPUT_DIR} ${CORE_BLDDIR}) diff --git a/src/core_ocean/ocean.cmake b/src/core_ocean/ocean.cmake new file mode 100644 index 0000000000..287dbb523b --- /dev/null +++ b/src/core_ocean/ocean.cmake @@ -0,0 +1,207 @@ + +# build_options.mk stuff handled here +list(APPEND CPPDEFS "-DCORE_OCEAN") +list(APPEND INCLUDES "${CMAKE_BINARY_DIR}/core_ocean/BGC" "${CMAKE_BINARY_DIR}/core_ocean/shared" "${CMAKE_BINARY_DIR}/core_ocean/analysis_members" "${CMAKE_BINARY_DIR}/core_ocean/cvmix" "${CMAKE_BINARY_DIR}/core_ocean/mode_forward" "${CMAKE_BINARY_DIR}/core_ocean/mode_analysis" "${CMAKE_BINARY_DIR}/core_ocean/mode_init") + +# driver (files live in E3SM) +list(APPEND RAW_SOURCES + ../../mpas-ocean/driver/ocn_comp_mct.F + ../../mpas-ocean/driver/mpaso_cpl_indices.F + ../../mpas-ocean/driver/mpaso_mct_vars.F +) + +# dycore +list(APPEND RAW_SOURCES + core_ocean/mode_forward/mpas_ocn_forward_mode.F + core_ocean/mode_forward/mpas_ocn_time_integration.F + core_ocean/mode_forward/mpas_ocn_time_integration_rk4.F + core_ocean/mode_forward/mpas_ocn_time_integration_split.F + + core_ocean/mode_analysis/mpas_ocn_analysis_mode.F + + core_ocean/mode_init/mpas_ocn_init_mode.F + core_ocean/mode_init/mpas_ocn_init_spherical_utils.F + core_ocean/mode_init/mpas_ocn_init_vertical_grids.F + core_ocean/mode_init/mpas_ocn_init_cell_markers.F + core_ocean/mode_init/mpas_ocn_init_interpolation.F + core_ocean/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F + core_ocean/mode_init/mpas_ocn_init_baroclinic_channel.F + core_ocean/mode_init/mpas_ocn_init_lock_exchange.F + core_ocean/mode_init/mpas_ocn_init_dam_break.F + core_ocean/mode_init/mpas_ocn_init_internal_waves.F + core_ocean/mode_init/mpas_ocn_init_overflow.F + core_ocean/mode_init/mpas_ocn_init_cvmix_WSwSBF.F + core_ocean/mode_init/mpas_ocn_init_iso.F + core_ocean/mode_init/mpas_ocn_init_soma.F + core_ocean/mode_init/mpas_ocn_init_ziso.F + core_ocean/mode_init/mpas_ocn_init_sub_ice_shelf_2D.F + core_ocean/mode_init/mpas_ocn_init_periodic_planar.F + core_ocean/mode_init/mpas_ocn_init_ecosys_column.F + core_ocean/mode_init/mpas_ocn_init_sea_mount.F + core_ocean/mode_init/mpas_ocn_init_global_ocean.F + core_ocean/mode_init/mpas_ocn_init_isomip.F + core_ocean/mode_init/mpas_ocn_init_hurricane.F + core_ocean/mode_init/mpas_ocn_init_isomip_plus.F + core_ocean/mode_init/mpas_ocn_init_tidal_boundary.F + + core_ocean/shared/mpas_ocn_init_routines.F + core_ocean/shared/mpas_ocn_gm.F + core_ocean/shared/mpas_ocn_diagnostics.F + core_ocean/shared/mpas_ocn_diagnostics_routines.F + core_ocean/shared/mpas_ocn_thick_ale.F + core_ocean/shared/mpas_ocn_equation_of_state.F + core_ocean/shared/mpas_ocn_equation_of_state_jm.F + core_ocean/shared/mpas_ocn_equation_of_state_linear.F + core_ocean/shared/mpas_ocn_thick_hadv.F + core_ocean/shared/mpas_ocn_thick_vadv.F + core_ocean/shared/mpas_ocn_thick_surface_flux.F + core_ocean/shared/mpas_ocn_vel_hadv_coriolis.F + core_ocean/shared/mpas_ocn_vel_vadv.F + core_ocean/shared/mpas_ocn_vel_hmix.F + core_ocean/shared/mpas_ocn_vel_hmix_del2.F + core_ocean/shared/mpas_ocn_vel_hmix_leith.F + core_ocean/shared/mpas_ocn_vel_hmix_del4.F + core_ocean/shared/mpas_ocn_vel_forcing.F + core_ocean/shared/mpas_ocn_vel_forcing_surface_stress.F + core_ocean/shared/mpas_ocn_vel_forcing_explicit_bottom_drag.F + core_ocean/shared/mpas_ocn_vel_pressure_grad.F + core_ocean/shared/mpas_ocn_vmix.F + core_ocean/shared/mpas_ocn_vmix_coefs_const.F + core_ocean/shared/mpas_ocn_vmix_coefs_rich.F + core_ocean/shared/mpas_ocn_vmix_coefs_tanh.F + core_ocean/shared/mpas_ocn_vmix_coefs_redi.F + core_ocean/shared/mpas_ocn_vmix_cvmix.F + core_ocean/shared/mpas_ocn_tendency.F + core_ocean/shared/mpas_ocn_tracer_hmix.F + core_ocean/shared/mpas_ocn_tracer_hmix_del2.F + core_ocean/shared/mpas_ocn_tracer_hmix_del4.F + core_ocean/shared/mpas_ocn_tracer_hmix_redi.F + core_ocean/shared/mpas_ocn_tracer_advection.F + core_ocean/shared/mpas_ocn_tracer_advection_mono.F + core_ocean/shared/mpas_ocn_tracer_advection_std.F + core_ocean/shared/mpas_ocn_tracer_nonlocalflux.F + core_ocean/shared/mpas_ocn_tracer_short_wave_absorption.F + core_ocean/shared/mpas_ocn_tracer_short_wave_absorption_jerlov.F + core_ocean/shared/mpas_ocn_tracer_short_wave_absorption_variable.F + core_ocean/shared/mpas_ocn_tracer_surface_restoring.F + core_ocean/shared/mpas_ocn_tracer_interior_restoring.F + core_ocean/shared/mpas_ocn_tracer_exponential_decay.F + core_ocean/shared/mpas_ocn_tracer_ideal_age.F + core_ocean/shared/mpas_ocn_tracer_TTD.F + core_ocean/shared/mpas_ocn_tracer_ecosys.F + core_ocean/shared/mpas_ocn_tracer_DMS.F + core_ocean/shared/mpas_ocn_tracer_MacroMolecules.F + core_ocean/shared/mpas_ocn_high_freq_thickness_hmix_del2.F + core_ocean/shared/mpas_ocn_tracer_surface_flux_to_tend.F + core_ocean/shared/mpas_ocn_test.F + core_ocean/shared/mpas_ocn_constants.F + core_ocean/shared/mpas_ocn_forcing.F + core_ocean/shared/mpas_ocn_surface_bulk_forcing.F + core_ocean/shared/mpas_ocn_surface_land_ice_fluxes.F + core_ocean/shared/mpas_ocn_effective_density_in_land_ice.F + core_ocean/shared/mpas_ocn_frazil_forcing.F + core_ocean/shared/mpas_ocn_tidal_forcing.F + core_ocean/shared/mpas_ocn_time_average_coupled.F + core_ocean/shared/mpas_ocn_sea_ice.F + core_ocean/shared/mpas_ocn_framework_forcing.F + core_ocean/shared/mpas_ocn_time_varying_forcing.F + core_ocean/shared/mpas_ocn_wetting_drying.F + core_ocean/shared/mpas_ocn_tidal_potential_forcing.F +) + +set(OCEAN_DRIVER + core_ocean/driver/mpas_ocn_core.F + core_ocean/driver/mpas_ocn_core_interface.F +) +list(APPEND RAW_SOURCES ${OCEAN_DRIVER}) +list(APPEND DISABLE_QSMP ${OCEAN_DRIVER}) + +# Get CVMix +execute_process(COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/core_ocean/get_cvmix.sh + WORKING_DIRECTORY ${CORE_BLDDIR}) + +# Get BGC +execute_process(COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/core_ocean/get_BGC.sh + WORKING_DIRECTORY ${CORE_BLDDIR}) + +# Add CVMix +set(CVMIX_FILES + ${CORE_BLDDIR}/cvmix/cvmix_kinds_and_types.F90 + ${CORE_BLDDIR}/cvmix/cvmix_background.F90 + ${CORE_BLDDIR}/cvmix/cvmix_convection.F90 + ${CORE_BLDDIR}/cvmix/cvmix_ddiff.F90 + ${CORE_BLDDIR}/cvmix/cvmix_kpp.F90 + ${CORE_BLDDIR}/cvmix/cvmix_math.F90 + ${CORE_BLDDIR}/cvmix/cvmix_put_get.F90 + ${CORE_BLDDIR}/cvmix/cvmix_shear.F90 + ${CORE_BLDDIR}/cvmix/cvmix_tidal.F90 + ${CORE_BLDDIR}/cvmix/cvmix_utils.F90 +) + +# Add BGC +set(BGC_FILES + ${CORE_BLDDIR}/BGC/BGC_mod.F90 + ${CORE_BLDDIR}/BGC/BGC_parms.F90 + ${CORE_BLDDIR}/BGC/DMS_mod.F90 + ${CORE_BLDDIR}/BGC/DMS_parms.F90 + ${CORE_BLDDIR}/BGC/MACROS_mod.F90 + ${CORE_BLDDIR}/BGC/MACROS_parms.F90 + ${CORE_BLDDIR}/BGC/co2calc.F90 +) + +list(APPEND RAW_SOURCES ${CVMIX_FILES} ${BGC_FILES}) +list(APPEND NO_PREPROCESS ${CVMIX_FILES} ${BGC_FILES}) + +# Add analysis members +list(APPEND RAW_SOURCES + core_ocean/analysis_members/mpas_ocn_global_stats.F + core_ocean/analysis_members/mpas_ocn_okubo_weiss.F + core_ocean/analysis_members/mpas_ocn_okubo_weiss_eigenvalues.c + core_ocean/analysis_members/mpas_ocn_layer_volume_weighted_averages.F + core_ocean/analysis_members/mpas_ocn_surface_area_weighted_averages.F + core_ocean/analysis_members/mpas_ocn_water_mass_census.F + core_ocean/analysis_members/mpas_ocn_meridional_heat_transport.F + core_ocean/analysis_members/mpas_ocn_test_compute_interval.F + core_ocean/analysis_members/mpas_ocn_high_frequency_output.F + core_ocean/analysis_members/mpas_ocn_zonal_mean.F + core_ocean/analysis_members/mpas_ocn_lagrangian_particle_tracking_interpolations.F + core_ocean/analysis_members/mpas_ocn_particle_list.F + core_ocean/analysis_members/mpas_ocn_lagrangian_particle_tracking_reset.F + core_ocean/analysis_members/mpas_ocn_lagrangian_particle_tracking.F + core_ocean/analysis_members/mpas_ocn_eliassen_palm.F + core_ocean/analysis_members/mpas_ocn_time_filters.F + core_ocean/analysis_members/mpas_ocn_mixed_layer_depths.F + core_ocean/analysis_members/mpas_ocn_pointwise_stats.F + core_ocean/analysis_members/mpas_ocn_debug_diagnostics.F + core_ocean/analysis_members/mpas_ocn_time_series_stats.F + core_ocean/analysis_members/mpas_ocn_regional_stats.F + core_ocean/analysis_members/mpas_ocn_rpn_calculator.F + core_ocean/analysis_members/mpas_ocn_transect_transport.F + core_ocean/analysis_members/mpas_ocn_eddy_product_variables.F + core_ocean/analysis_members/mpas_ocn_moc_streamfunction.F + core_ocean/analysis_members/mpas_ocn_analysis_driver.F +) + +# add accelerator/gpu flags +list(APPEND ADD_ACC_FLAGS + core_ocean/shared/mpas_ocn_equation_of_state_jm.f90 + core_ocean/shared/mpas_ocn_mesh.f90 + core_ocean/shared/mpas_ocn_surface_bulk_forcing.f90 + core_ocean/shared/mpas_ocn_surface_land_ice_fluxes.f90 + core_ocean/shared/mpas_ocn_tendency.f90 + core_ocean/shared/mpas_ocn_vel_forcing_explicit_bottom_drag.f90 + core_ocean/shared/mpas_ocn_vel_forcing_surface_stress.f90 + core_ocean/shared/mpas_ocn_vel_hadv_coriolis.f90 + core_ocean/shared/mpas_ocn_vel_hmix_del2.f90 + core_ocean/shared/mpas_ocn_vel_hmix_del4.f90 + core_ocean/shared/mpas_ocn_vel_hmix_leith.f90 + core_ocean/shared/mpas_ocn_vel_pressure_grad.f90 + core_ocean/shared/mpas_ocn_vel_vadv.f90 +) + +# Generate core input +handle_st_nl_gen( + "namelist.ocean;namelist.ocean.forward mode=forward;namelist.ocean.analysis mode=analysis;namelist.ocean.init mode=init" + "streams.ocean stream_list.ocean. mutable;streams.ocean.forward stream_list.ocean.forward. mutable mode=forward;streams.ocean.analysis stream_list.ocean.analysis. mutable mode=analysis;streams.ocean.init stream_list.ocean.init. mutable mode=init" + ${CORE_INPUT_DIR} ${CORE_BLDDIR} +) diff --git a/src/core_seaice/seaice.cmake b/src/core_seaice/seaice.cmake new file mode 100644 index 0000000000..0ac2b0dd49 --- /dev/null +++ b/src/core_seaice/seaice.cmake @@ -0,0 +1,108 @@ + +# build_options.mk stuff handled here +list(APPEND CPPDEFS "-DCORE_SEAICE" "-Dcoupled" "-DCCSMCOUPLED") +list(APPEND INCLUDES "${CMAKE_BINARY_DIR}/core_seaice/column" "${CMAKE_BINARY_DIR}/core_seaice/shared" "${CMAKE_BINARY_DIR}/core_seaice/analysis_members" "${CMAKE_BINARY_DIR}/core_seaice/model_forward") + + +# driver (files live in E3SM) +list(APPEND RAW_SOURCES + ../../mpas-seaice/driver/ice_comp_mct.F + ../../mpas-seaice/driver/mpassi_cpl_indices.F + ../../mpas-seaice/driver/mpassi_mct_vars.F +) + +# column +list(APPEND RAW_SOURCES + core_seaice/column/ice_colpkg.F90 + core_seaice/column/ice_kinds_mod.F90 + core_seaice/column/ice_warnings.F90 + core_seaice/column/ice_colpkg_shared.F90 + core_seaice/column/constants/cesm/ice_constants_colpkg.F90 + core_seaice/column/ice_therm_shared.F90 + core_seaice/column/ice_orbital.F90 + core_seaice/column/ice_mushy_physics.F90 + core_seaice/column/ice_therm_mushy.F90 + core_seaice/column/ice_atmo.F90 + core_seaice/column/ice_age.F90 + core_seaice/column/ice_firstyear.F90 + core_seaice/column/ice_flux_colpkg.F90 + core_seaice/column/ice_meltpond_cesm.F90 + core_seaice/column/ice_meltpond_lvl.F90 + core_seaice/column/ice_meltpond_topo.F90 + core_seaice/column/ice_therm_vertical.F90 + core_seaice/column/ice_therm_bl99.F90 + core_seaice/column/ice_therm_0layer.F90 + core_seaice/column/ice_itd.F90 + core_seaice/column/ice_colpkg_tracers.F90 + core_seaice/column/ice_therm_itd.F90 + core_seaice/column/ice_shortwave.F90 + core_seaice/column/ice_mechred.F90 + core_seaice/column/ice_aerosol.F90 + core_seaice/column/ice_brine.F90 + core_seaice/column/ice_algae.F90 + core_seaice/column/ice_zbgc.F90 + core_seaice/column/ice_zbgc_shared.F90 + core_seaice/column/ice_zsalinity.F90 + core_seaice/column/ice_snow.F90 +) + +# shared +list(APPEND RAW_SOURCES + core_seaice/shared/mpas_seaice_time_integration.F + core_seaice/shared/mpas_seaice_advection_incremental_remap_tracers.F + core_seaice/shared/mpas_seaice_advection_incremental_remap.F + core_seaice/shared/mpas_seaice_advection_upwind.F + core_seaice/shared/mpas_seaice_advection.F + core_seaice/shared/mpas_seaice_velocity_solver_unit_tests.F + core_seaice/shared/mpas_seaice_velocity_solver.F + core_seaice/shared/mpas_seaice_velocity_solver_weak.F + core_seaice/shared/mpas_seaice_velocity_solver_variational.F + core_seaice/shared/mpas_seaice_velocity_solver_wachspress.F + core_seaice/shared/mpas_seaice_velocity_solver_pwl.F + core_seaice/shared/mpas_seaice_velocity_solver_variational_shared.F + core_seaice/shared/mpas_seaice_velocity_solver_constitutive_relation.F + core_seaice/shared/mpas_seaice_forcing.F + core_seaice/shared/mpas_seaice_initialize.F + core_seaice/shared/mpas_seaice_testing.F + core_seaice/shared/mpas_seaice_unit_test.F + core_seaice/shared/mpas_seaice_mesh.F + core_seaice/shared/mpas_seaice_diagnostics.F + core_seaice/shared/mpas_seaice_numerics.F + core_seaice/shared/mpas_seaice_constants.F + core_seaice/shared/mpas_seaice_column.F + core_seaice/shared/mpas_seaice_diagnostics.F + core_seaice/shared/mpas_seaice_error.F +) + +# analysis members +list(APPEND RAW_SOURCES + core_seaice/analysis_members/mpas_seaice_analysis_driver.F + core_seaice/analysis_members/mpas_seaice_high_frequency_output.F + core_seaice/analysis_members/mpas_seaice_temperatures.F + core_seaice/analysis_members/mpas_seaice_regional_statistics.F + core_seaice/analysis_members/mpas_seaice_ridging_diagnostics.F + core_seaice/analysis_members/mpas_seaice_conservation_check.F + core_seaice/analysis_members/mpas_seaice_geographical_vectors.F + core_seaice/analysis_members/mpas_seaice_ice_present.F + core_seaice/analysis_members/mpas_seaice_time_series_stats.F + core_seaice/analysis_members/mpas_seaice_load_balance.F + core_seaice/analysis_members/mpas_seaice_maximum_ice_presence.F + core_seaice/analysis_members/mpas_seaice_miscellaneous.F + core_seaice/analysis_members/mpas_seaice_area_variables.F + core_seaice/analysis_members/mpas_seaice_pond_diagnostics.F + core_seaice/analysis_members/mpas_seaice_deactivate_unneeded_fields.F + core_seaice/analysis_members/mpas_seaice_pointwise_stats.F + core_seaice/analysis_members/mpas_seaice_unit_conversion.F + core_seaice/analysis_members/mpas_seaice_ice_shelves.F +) + +# model_forward (DISABLE qsmp for these) +set(SEAICE_MODEL_FORWARD + core_seaice/model_forward/mpas_seaice_core.F + core_seaice/model_forward/mpas_seaice_core_interface.F +) +list(APPEND RAW_SOURCES ${SEAICE_MODEL_FORWARD}) +list(APPEND DISABLE_QSMP ${SEAICE_MODEL_FORWARD}) + +# Generate core input +handle_st_nl_gen("namelist.seaice" "streams.seaice stream_list.seaice. listed" ${CORE_INPUT_DIR} ${CORE_BLDDIR}) diff --git a/src/driver/mpas.F b/src/driver/mpas.F index 32092a010c..d0370fd577 100644 --- a/src/driver/mpas.F +++ b/src/driver/mpas.F @@ -8,17 +8,18 @@ program mpas use mpas_subdriver + use mpas_derived_types, only : core_type, domain_type implicit none - type (core_type), pointer :: corelist - type (domain_type), pointer :: domain_ptr + type (core_type), pointer :: corelist => null() + type (domain_type), pointer :: domain => null() - call mpas_init(corelist, domain_ptr) + call mpas_init(corelist, domain) - call mpas_run(domain_ptr) + call mpas_run(domain) - call mpas_finalize(corelist, domain_ptr) + call mpas_finalize(corelist, domain) stop diff --git a/src/driver/mpas_subdriver.F b/src/driver/mpas_subdriver.F index 4da7ae98e0..2705777a25 100644 --- a/src/driver/mpas_subdriver.F +++ b/src/driver/mpas_subdriver.F @@ -35,7 +35,6 @@ module mpas_subdriver use test_core_interface #endif - type (dm_info), pointer :: dminfo contains @@ -51,8 +50,8 @@ subroutine mpas_init(corelist, domain_ptr, mpi_comm) implicit none - type (core_type), intent(out), pointer :: corelist - type (domain_type), intent(out), pointer :: domain_ptr + type (core_type), intent(inout), pointer :: corelist + type (domain_type), intent(inout), pointer :: domain_ptr integer, intent(in), optional :: mpi_comm integer :: iArg, nArgs @@ -83,7 +82,7 @@ subroutine mpas_init(corelist, domain_ptr, mpi_comm) character(len=StrKIND) :: iotype logical :: streamsExists integer :: mesh_iotype - integer, save :: domainID = -1 + integer, save :: domainID = 0 interface subroutine xml_stream_parser(xmlname, mgr_p, comm, ierr) bind(c) @@ -154,13 +153,13 @@ end subroutine xml_stream_get_attributes call mpas_allocate_domain(domain_ptr) - domainID = domainID + 1 domain_ptr % domainID = domainID + domainID = domainID + 1 ! ! Initialize infrastructure ! - call mpas_framework_init_phase1(domain_ptr % dminfo, mpi_comm) + call mpas_framework_init_phase1(domain_ptr % dminfo, mpi_comm=mpi_comm) #ifdef CORE_ATMOSPHERE @@ -345,11 +344,13 @@ end subroutine mpas_init subroutine mpas_run(domain_ptr) + use mpas_log, only: mpas_log_info implicit none type (domain_type), intent(inout), pointer :: domain_ptr + integer :: iErr if ( associated(domain_ptr % logInfo) ) mpas_log_info => domain_ptr % logInfo diff --git a/src/external/Makefile b/src/external/Makefile index 4409d9c704..afb7533445 100644 --- a/src/external/Makefile +++ b/src/external/Makefile @@ -3,7 +3,7 @@ all: esmf_time ezxml-lib esmf_time: - ( cd esmf_time_f90; $(MAKE) FC="$(FC) $(FFLAGS)" CPP="$(CPP)" CPPFLAGS="$(CPPFLAGS) -DHIDE_MPI" GEN_F90=$(GEN_F90) ) + ( cd esmf_time_f90; $(MAKE) FC="$(FC)" FFLAGS="$(FFLAGS)" CPP="$(CPP)" CPPFLAGS="$(CPPFLAGS) -DHIDE_MPI" GEN_F90=$(GEN_F90) ) ezxml-lib: ( cd ezxml; $(MAKE) ) diff --git a/src/framework/framework.cmake b/src/framework/framework.cmake new file mode 100644 index 0000000000..f74747fb4f --- /dev/null +++ b/src/framework/framework.cmake @@ -0,0 +1,35 @@ +# framework +list(APPEND COMMON_RAW_SOURCES + framework/mpas_kind_types.F + framework/mpas_framework.F + framework/mpas_timer.F + framework/mpas_timekeeping.F + framework/mpas_constants.F + framework/mpas_attlist.F + framework/mpas_hash.F + framework/mpas_sort.F + framework/mpas_block_decomp.F + framework/mpas_block_creator.F + framework/mpas_dmpar.F + framework/mpas_abort.F + framework/mpas_decomp.F + framework/mpas_threading.F + framework/mpas_io.F + framework/mpas_io_streams.F + framework/mpas_bootstrapping.F + framework/mpas_io_units.F + framework/mpas_stream_manager.F + framework/mpas_stream_list.F + framework/mpas_forcing.F + framework/mpas_c_interfacing.F + framework/random_id.c + framework/pool_hash.c + framework/mpas_derived_types.F + framework/mpas_domain_routines.F + framework/mpas_field_routines.F + framework/mpas_pool_routines.F + framework/xml_stream_parser.c + framework/regex_matching.c + framework/mpas_field_accessor.F + framework/mpas_log.F +) diff --git a/src/framework/mpas_dmpar.F b/src/framework/mpas_dmpar.F index 4acf7695f6..42bb827efa 100644 --- a/src/framework/mpas_dmpar.F +++ b/src/framework/mpas_dmpar.F @@ -371,9 +371,7 @@ subroutine mpas_dmpar_abort(dminfo)!{{{ #ifdef _MPI integer :: mpi_ierr, mpi_errcode - if ( dminfo % initialized_mpi ) then - call MPI_Abort(dminfo % comm, mpi_errcode, mpi_ierr) - end if + call MPI_Abort(dminfo % comm, mpi_errcode, mpi_ierr) #endif stop @@ -8670,7 +8668,12 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field1d_integer(exchangeGroup, fiel commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + ! workaround for PGI compiler (CPR): ICE on pointers in private clause of omp-do workshare + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -8735,7 +8738,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field2d_integer(exchangeGroup, fiel commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -8803,7 +8810,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field3d_integer(exchangeGroup, fiel commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, k, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -8873,7 +8884,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field1d_real(exchangeGroup, field, commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -8938,7 +8953,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field2d_real(exchangeGroup, field, commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -9005,7 +9024,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field3d_real(exchangeGroup, field, commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, k, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -9075,7 +9098,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field4d_real(exchangeGroup, field, commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, k, l, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem @@ -9148,7 +9175,11 @@ subroutine mpas_dmpar_exch_group_pack_buffer_field5d_real(exchangeGroup, field, commListPtr => exchangeGroup % sendList if (.not. associated(commListPtr)) return commListSize = commListPtr % commListSize +#ifdef CPRPGI + !$omp do private(listPosition, bufferOffset, nAdded, iExch, iBuffer) +#else !$omp do private(commListPtr, listPosition, bufferOffset, nAdded, fieldCursor, exchListPtr, iExch, j, k, l, m, iBuffer) +#endif do listItem = 1, commListSize commListPtr => exchangeGroup % sendList do listPosition = 2, listItem diff --git a/src/framework/mpas_domain_types.inc b/src/framework/mpas_domain_types.inc index 66d0c1efc5..0fd5a882ff 100644 --- a/src/framework/mpas_domain_types.inc +++ b/src/framework/mpas_domain_types.inc @@ -26,6 +26,9 @@ character (len=StrKIND) :: mesh_spec = '' !< mesh_spec attribute, read in from input file. character (len=StrKIND) :: parent_id = '' !< parent_id attribute, read in from input file. + ! Unique global ID number for this domain + integer :: domainID + ! Pointer to timer root type (mpas_timer_root), pointer :: timer_root => null() @@ -34,7 +37,4 @@ ! Domain_type is a linked list type (domain_type), pointer :: next => null() - - ! Unique global ID number for this domain - integer :: domainID end type domain_type diff --git a/src/framework/mpas_field_routines.F b/src/framework/mpas_field_routines.F index a5f6960749..0ae6e169e8 100644 --- a/src/framework/mpas_field_routines.F +++ b/src/framework/mpas_field_routines.F @@ -108,6 +108,22 @@ module mpas_field_routines module procedure mpas_deallocate_field1d_char end interface + interface mpas_deallocate_field_target + module procedure mpas_deallocate_field0d_logical_target + module procedure mpas_deallocate_field0d_integer_target + module procedure mpas_deallocate_field1d_integer_target + module procedure mpas_deallocate_field2d_integer_target + module procedure mpas_deallocate_field3d_integer_target + module procedure mpas_deallocate_field0d_real_target + module procedure mpas_deallocate_field1d_real_target + module procedure mpas_deallocate_field2d_real_target + module procedure mpas_deallocate_field3d_real_target + module procedure mpas_deallocate_field4d_real_target + module procedure mpas_deallocate_field5d_real_target + module procedure mpas_deallocate_field0d_char_target + module procedure mpas_deallocate_field1d_char_target + end interface + contains !*********************************************************************** @@ -1388,43 +1404,39 @@ end subroutine mpas_deallocate_scratch_field1d_char!}}} !*********************************************************************** ! -! routine mpas_deallocate_field0d_logical +! routine mpas_deallocate_field0D_logical ! -!> \brief MPAS 0D logical deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \brief MPAS 0D int deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 0D logical field. +!> This routine deallocates a 0-d logical field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field0d_logical(f)!{{{ + + implicit none + type (field0dLogical), pointer :: f !< Input: Field to deallocate - type (field0dLogical), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() - - if ( threadNum == 0 ) then - f_cursor => f - - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field0dLogical), pointer :: f_cursor, f_next - deallocate(f_cursor) - f_cursor => f - end do + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field0d_logical!}}} @@ -1432,43 +1444,39 @@ end subroutine mpas_deallocate_field0d_logical!}}} !*********************************************************************** ! -! routine mpas_deallocate_field0d_integer +! routine mpas_deallocate_field0D_integer ! -!> \brief MPAS 0D integer deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \brief MPAS 0D int deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 0D integer field. +!> This routine deallocates a 0-d integer field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field0d_integer(f)!{{{ + + implicit none + type (field0dInteger), pointer :: f !< Input: Field to deallocate - type (field0dInteger), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() - - if ( threadNum == 0 ) then - f_cursor => f - - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field0dInteger), pointer :: f_cursor, f_next + + call mpas_deallocate_field_target(f) - deallocate(f_cursor) - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field0d_integer!}}} @@ -1478,45 +1486,37 @@ end subroutine mpas_deallocate_field0d_integer!}}} ! ! routine mpas_deallocate_field1D_integer ! -!> \brief MPAS 1D integer deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \brief MPAS 1D int deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 1D integer field. +!> This routine deallocates a 1-d integer field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field1d_integer(f)!{{{ + + implicit none + type (field1dInteger), pointer :: f !< Input: Field to deallocate - type (field1dInteger), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + type (field1dInteger), pointer :: f_cursor, f_next - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + call mpas_deallocate_field_target(f) - deallocate(f_cursor) - - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field1d_integer!}}} @@ -1526,45 +1526,37 @@ end subroutine mpas_deallocate_field1d_integer!}}} ! ! routine mpas_deallocate_field2D_integer ! -!> \brief MPAS 2D integer deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \brief MPAS 2D int deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 2D integer field. +!> This routine deallocates a 2-d integer field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field2d_integer(f)!{{{ + + implicit none + type (field2dInteger), pointer :: f !< Input: Field to deallocate - type (field2dInteger), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + type (field2dInteger), pointer :: f_cursor, f_next - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + call mpas_deallocate_field_target(f) - deallocate(f_cursor) - - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field2d_integer!}}} @@ -1574,45 +1566,37 @@ end subroutine mpas_deallocate_field2d_integer!}}} ! ! routine mpas_deallocate_field3D_integer ! -!> \brief MPAS 3D integer deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \brief MPAS 3D int deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 3D integer field. +!> This routine deallocates a 3-d integer field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field3d_integer(f)!{{{ + + implicit none + type (field3dInteger), pointer :: f !< Input: Field to deallocate - type (field3dInteger), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + type (field3dInteger), pointer :: f_cursor, f_next - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + call mpas_deallocate_field_target(f) - deallocate(f_cursor) - - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field3d_integer!}}} @@ -1620,44 +1604,39 @@ end subroutine mpas_deallocate_field3d_integer!}}} !*********************************************************************** ! -! routine mpas_deallocate_field0d_real +! routine mpas_deallocate_field0D_real ! !> \brief MPAS 0D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 0D real field. +!> This routine deallocates a 0-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field0d_real(f)!{{{ - type (field0dReal), pointer :: f !< Input: Field to deallocate - type (field0dReal), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + implicit none - f_cursor => f + type (field0dReal), pointer :: f !< Input: Field to deallocate - if ( threadNum == 0 ) then - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field0dReal), pointer :: f_cursor, f_next - deallocate(f_cursor) - - f_cursor => f - end do + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field0d_real!}}} @@ -1668,44 +1647,36 @@ end subroutine mpas_deallocate_field0d_real!}}} ! routine mpas_deallocate_field1D_real ! !> \brief MPAS 1D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 1D real field. +!> This routine deallocates a 1-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field1d_real(f)!{{{ - type (field1dReal), pointer :: f !< Input: Field to deallocate - type (field1dReal), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + implicit none - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field1dReal), pointer :: f !< Input: Field to deallocate + + type (field1dReal), pointer :: f_cursor, f_next - deallocate(f_cursor) + call mpas_deallocate_field_target(f) - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field1d_real!}}} @@ -1716,44 +1687,36 @@ end subroutine mpas_deallocate_field1d_real!}}} ! routine mpas_deallocate_field2D_real ! !> \brief MPAS 2D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 2D real field. +!> This routine deallocates a 2-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field2d_real(f)!{{{ - type (field2dReal), pointer :: f !< Input: Field to deallocate - type (field2dReal), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + implicit none - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field2dReal), pointer :: f !< Input: Field to deallocate - deallocate(f_cursor) + type (field2dReal), pointer :: f_cursor, f_next - f_cursor => f - end do + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field2d_real!}}} @@ -1764,44 +1727,36 @@ end subroutine mpas_deallocate_field2d_real!}}} ! routine mpas_deallocate_field3D_real ! !> \brief MPAS 3D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 3D real field. +!> This routine deallocates a 3-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field3d_real(f)!{{{ - type (field3dReal), pointer :: f !< Input: Field to deallocate - type (field3dReal), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + implicit none - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field3dReal), pointer :: f !< Input: Field to deallocate + + type (field3dReal), pointer :: f_cursor, f_next - deallocate(f_cursor) + call mpas_deallocate_field_target(f) - f_cursor => f - end do + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field3d_real!}}} @@ -1812,44 +1767,36 @@ end subroutine mpas_deallocate_field3d_real!}}} ! routine mpas_deallocate_field4D_real ! !> \brief MPAS 4D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 4D real field. +!> This routine deallocates a 4-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field4d_real(f)!{{{ - type (field4dReal), pointer :: f !< Input: Field to deallocate - type (field4dReal), pointer :: f_cursor - integer :: threadNum - integer :: i, iErr - threadNum = mpas_threading_get_thread_num() + implicit none - if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + type (field4dReal), pointer :: f !< Input: Field to deallocate - deallocate(f_cursor) + type (field4dReal), pointer :: f_cursor, f_next - f_cursor => f - end do + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do end if end subroutine mpas_deallocate_field4d_real!}}} @@ -1860,139 +1807,753 @@ end subroutine mpas_deallocate_field4d_real!}}} ! routine mpas_deallocate_field5D_real ! !> \brief MPAS 5D real deallocation routine. -!> \author Doug Jacobsen -!> \date 04/02/13 +!> \author Michael G. Duda +!> \date 4 November 2019 !> \details -!> This routine deallocates a 5D real field. +!> This routine deallocates a 5-d real field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. ! !----------------------------------------------------------------------- subroutine mpas_deallocate_field5d_real(f)!{{{ + + implicit none + type (field5dReal), pointer :: f !< Input: Field to deallocate - type (field5dReal), pointer :: f_cursor + + type (field5dReal), pointer :: f_cursor, f_next + + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field5d_real!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field0D_char +! +!> \brief MPAS 0D real deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 +!> \details +!> This routine deallocates a 0-d character field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field0d_char(f)!{{{ + + implicit none + + type (field0dChar), pointer :: f !< Input: Field to deallocate + + type (field0dChar), pointer :: f_cursor, f_next + + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field0d_char!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field1D_char +! +!> \brief MPAS 1D char deallocation routine. +!> \author Michael G. Duda +!> \date 4 November 2019 +!> \details +!> This routine deallocates a 1-d character field. Upon return, all memory +!> that is uniquely associated with the field pointed to by f +!> (i.e., array storage, attribute lists, constitutent names; *not* sendList, +!> recvList, block, etc. that are potentially referenced by other fields) +!> will have been freed, and the input pointer will be nullified. +!> +!> For fields that are elements of an array of fields, do not use this +!> routine, and instead use mpas_deallocate_field_target. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field1d_char(f)!{{{ + + implicit none + + type (field1dChar), pointer :: f !< Input: Field to deallocate + + type (field1dChar), pointer :: f_cursor, f_next + + call mpas_deallocate_field_target(f) + + if ( mpas_threading_get_thread_num() == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + deallocate(f_cursor) + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field1d_char!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field0D_logical_target +! +!> \brief MPAS 0D int deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 0D logical field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field0d_logical_target(f)!{{{ + + implicit none + + type (field0dLogical), target :: f !< Input: Field to deallocate + + type (field0dLogical), pointer :: f_cursor, f_next integer :: threadNum integer :: i, iErr threadNum = mpas_threading_get_thread_num() if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if - deallocate(f_cursor) + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if - f_cursor => f - end do + f_cursor => f_next + end do end if - end subroutine mpas_deallocate_field5d_real!}}} + end subroutine mpas_deallocate_field0d_logical_target!}}} !*********************************************************************** ! -! routine mpas_deallocate_field0D_char +! routine mpas_deallocate_field0D_integer_target ! -!> \brief MPAS 0D character deallocation routine. -!> \author Doug Jacobsen +!> \brief MPAS 0D int deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda !> \date 04/02/13 !> \details -!> This routine deallocates a 0D character field. +!> This routine deallocates a 0D int field. ! !----------------------------------------------------------------------- - subroutine mpas_deallocate_field0d_char(f)!{{{ - type (field0dChar), pointer :: f !< Input: Field to deallocate - type (field0dChar), pointer :: f_cursor + subroutine mpas_deallocate_field0d_integer_target(f)!{{{ + + implicit none + + type (field0dInteger), target :: f !< Input: Field to deallocate + + type (field0dInteger), pointer :: f_cursor, f_next integer :: threadNum integer :: i, iErr threadNum = mpas_threading_get_thread_num() if ( threadNum == 0 ) then - f_cursor => f - - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if - deallocate(f_cursor) - f_cursor => f - end do + f_cursor => f_next + end do end if - end subroutine mpas_deallocate_field0d_char!}}} + end subroutine mpas_deallocate_field0d_integer_target!}}} !*********************************************************************** ! -! routine mpas_deallocate_field1D_char +! routine mpas_deallocate_field1D_integer_target ! -!> \brief MPAS 1D character deallocation routine. -!> \author Doug Jacobsen +!> \brief MPAS 1D int deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda !> \date 04/02/13 !> \details -!> This routine deallocates a 1D character field. +!> This routine deallocates a 1D int field. ! !----------------------------------------------------------------------- - subroutine mpas_deallocate_field1d_char(f)!{{{ - type (field1dChar), pointer :: f !< Input: Field to deallocate - type (field1dChar), pointer :: f_cursor + subroutine mpas_deallocate_field1d_integer_target(f)!{{{ + + implicit none + + type (field1dInteger), target :: f !< Input: Field to deallocate + + type (field1dInteger), pointer :: f_cursor, f_next integer :: threadNum integer :: i, iErr threadNum = mpas_threading_get_thread_num() if ( threadNum == 0 ) then - f_cursor => f - do while(associated(f_cursor)) - if(associated(f % next)) then - f => f % next - else - nullify(f) - end if - - if(associated(f_cursor % array)) then - deallocate(f_cursor % array) - end if - - if ( associated(f_cursor % attLists) ) then - do i = 1, size(f_cursor % attLists, dim=1) - call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) - end do - deallocate(f_cursor % attLists) - end if + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if - deallocate(f_cursor) + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if - f_cursor => f - end do + f_cursor => f_next + end do end if - end subroutine mpas_deallocate_field1d_char!}}} + end subroutine mpas_deallocate_field1d_integer_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field2D_integer_target +! +!> \brief MPAS 2D int deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 2D int field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field2d_integer_target(f)!{{{ + + implicit none + + type (field2dInteger), target :: f !< Input: Field to deallocate + + type (field2dInteger), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field2d_integer_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field3D_integer_target +! +!> \brief MPAS 3D int deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 3D int field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field3d_integer_target(f)!{{{ + + implicit none + + type (field3dInteger), target :: f !< Input: Field to deallocate + + type (field3dInteger), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field3d_integer_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field0D_real_target +! +!> \brief MPAS 0D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 0D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field0d_real_target(f)!{{{ + + implicit none + + type (field0dReal), target :: f !< Input: Field to deallocate + + type (field0dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field0d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field1D_real_target +! +!> \brief MPAS 1D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 1D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field1d_real_target(f)!{{{ + + implicit none + + type (field1dReal), target :: f !< Input: Field to deallocate + + type (field1dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field1d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field2D_real_target +! +!> \brief MPAS 2D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 2D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field2d_real_target(f)!{{{ + + implicit none + + type (field2dReal), target :: f !< Input: Field to deallocate + + type (field2dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field2d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field3D_real_target +! +!> \brief MPAS 3D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 3D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field3d_real_target(f)!{{{ + + implicit none + + type (field3dReal), target :: f !< Input: Field to deallocate + + type (field3dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field3d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field4D_real_target +! +!> \brief MPAS 4D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 4D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field4d_real_target(f)!{{{ + + implicit none + + type (field4dReal), target :: f !< Input: Field to deallocate + + type (field4dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field4d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field5D_real_target +! +!> \brief MPAS 5D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 5D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field5d_real_target(f)!{{{ + + implicit none + + type (field5dReal), target :: f !< Input: Field to deallocate + + type (field5dReal), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field5d_real_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field0D_char_target +! +!> \brief MPAS 0D real deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 0D real field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field0d_char_target(f)!{{{ + + implicit none + + type (field0dChar), target :: f !< Input: Field to deallocate + + type (field0dChar), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field0d_char_target!}}} + + +!*********************************************************************** +! +! routine mpas_deallocate_field1D_char_target +! +!> \brief MPAS 1D char deallocation routine. +!> \author Doug Jacobsen, Michael G. Duda +!> \date 04/02/13 +!> \details +!> This routine deallocates a 1D char field. +! +!----------------------------------------------------------------------- + subroutine mpas_deallocate_field1d_char_target(f)!{{{ + + implicit none + + type (field1dChar), target :: f !< Input: Field to deallocate + + type (field1dChar), pointer :: f_cursor, f_next + integer :: threadNum + integer :: i, iErr + + threadNum = mpas_threading_get_thread_num() + + if ( threadNum == 0 ) then + f_cursor => f + do while(associated(f_cursor)) + f_next => f_cursor % next + + if (associated(f_cursor % array)) then + deallocate(f_cursor % array) + end if + + if (associated(f_cursor % attLists)) then + do i = 1, size(f_cursor % attLists, dim=1) + call mpas_deallocate_attlist(f_cursor % attLists(i) % attList, iErr) + end do + deallocate(f_cursor % attLists) + end if + + if (associated(f_cursor % constituentNames)) then + deallocate(f_cursor % constituentNames) + end if + + f_cursor => f_next + end do + end if + + end subroutine mpas_deallocate_field1d_char_target!}}} !*********************************************************************** diff --git a/src/framework/mpas_log.F b/src/framework/mpas_log.F index a7391d2314..08404ca4c0 100644 --- a/src/framework/mpas_log.F +++ b/src/framework/mpas_log.F @@ -41,7 +41,7 @@ module mpas_log use mpas_derived_types use mpas_abort, only : mpas_dmpar_global_abort - use mpas_io_units + use mpas_io_units, only : mpas_new_unit, mpas_release_unit implicit none private @@ -176,13 +176,9 @@ subroutine mpas_log_init(coreLogInfo, domain, unitNumbers, err) mpas_log_info % taskID = domain % dminfo % my_proc_id mpas_log_info % nTasks = domain % dminfo % nprocs - ! Store the domain number and initialized_mpi + ! Store the domain number ! This will be used to number the log files mpas_log_info % domainID = domain % domainID - write(domainString, '(i4.4)') mpas_log_info % domainID - - ! This will be used to decide whether to abort MPI - mpas_log_info % initialized_mpi = domain % dminfo % initialized_mpi ! Set log file to be active or not based on master/nonmaster task and optimized/debug build ! * Optimized build: Only master task log is active @@ -215,12 +211,16 @@ subroutine mpas_log_init(coreLogInfo, domain, unitNumbers, err) else write(taskString, '(i9.9)') mpas_log_info % taskID end if - write(proposedLogFileName, fmt='(a, a, a, a, a)') "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".out" - write(proposedErrFileName, fmt='(a, a, a, a, a)') "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".err" if ( mpas_log_info % domainID > 0 ) then - write(proposedLogFileName, fmt='(a, a, a)') trim(proposedLogFileName),".d", trim(domainString) - write(proposedErrFileName, fmt='(a, a, a)') trim(proposedErrFileName),".d", trim(domainString) + write(domainString, '(i4.4)') mpas_log_info % domainID + write(proposedLogFileName, fmt='(a, a, a, a, a, a, a)') & + "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".d", trim(domainString), ".out" + write(proposedErrFileName, fmt='(a, a, a, a, a, a, a)') & + "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".d", trim(domainString), ".err" + else + write(proposedLogFileName, fmt='(a, a, a, a, a)') "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".out" + write(proposedErrFileName, fmt='(a, a, a, a, a)') "log.", trim(mpas_log_info % coreName), ".", trim(taskString), ".err" end if ! Set the log and err file names and unit numbers @@ -284,9 +284,6 @@ subroutine mpas_log_init(coreLogInfo, domain, unitNumbers, err) mpas_log_info % outputLog % unitNum = unitNumber call mpas_new_unit(unitNumber) mpas_log_info % errorLog % unitNum = unitNumber - if ( unitNumber < 0 ) then - call mpas_dmpar_global_abort('ERROR: All file units are taken. Change maxUnits in mpas_io_units.F') - end if endif @@ -437,8 +434,7 @@ subroutine mpas_log_open(openErrorFile, err) write(unitNumber, '(a,a,a,a,a,i7.1,a,i7.1)') 'Beginning MPAS-', trim(mpas_log_info % coreName), ' ', & trim(logTypeString), ' Log File for task ', mpas_log_info % taskID, ' of ', mpas_log_info % nTasks if ( mpas_log_info % domainID > 0 ) then - write(unitNumber, '(a,i7.1)') & - ' for domain number ', mpas_log_info % domainID + write(unitNumber, '(a,i7.1)') ' for domain ID ', mpas_log_info % domainID end if call date_and_time(date,time) write(unitNumber, '(a)') ' Opened at ' // date(1:4)//'/'//date(5:6)//'/'//date(7:8) // & @@ -851,9 +847,7 @@ subroutine log_abort() deallocate(mpas_log_info) #ifdef _MPI - if ( mpas_log_info % initialized_mpi ) then - call MPI_Abort(MPI_COMM_WORLD, mpi_errcode, mpi_ierr) - end if + call MPI_Abort(MPI_COMM_WORLD, mpi_errcode, mpi_ierr) #else stop #endif diff --git a/src/framework/mpas_log_types.inc b/src/framework/mpas_log_types.inc index 9acd32fa7e..34ba091a56 100644 --- a/src/framework/mpas_log_types.inc +++ b/src/framework/mpas_log_types.inc @@ -28,7 +28,6 @@ !< (stored here to eliminate the need for dminfo later) character(len=StrKIND) :: coreName !< name of the core to which this log manager instance belongs integer :: domainID !< domain number for this instance of the log manager - logical :: initialized_mpi integer :: outputMessageCount !< counter for number of output messages printed during the run integer :: warningMessageCount !< counter for number of warning messages printed during the run diff --git a/src/framework/mpas_pool_routines.F b/src/framework/mpas_pool_routines.F index c362c770d8..aab1818c30 100644 --- a/src/framework/mpas_pool_routines.F +++ b/src/framework/mpas_pool_routines.F @@ -209,7 +209,7 @@ recursive subroutine mpas_pool_destroy_pool(inPool)!{{{ integer :: i, j type (mpas_pool_member_type), pointer :: ptr type (mpas_pool_data_type), pointer :: dptr - integer :: local_err, threadNum + integer :: threadNum threadNum = mpas_threading_get_thread_num() @@ -224,9 +224,9 @@ recursive subroutine mpas_pool_destroy_pool(inPool)!{{{ if (ptr % contentsType == MPAS_POOL_DIMENSION) then if (ptr % data % contentsDims > 0) then - deallocate(ptr % data % simple_int_arr, stat=local_err) + deallocate(ptr % data % simple_int_arr) else - deallocate(ptr % data % simple_int, stat=local_err) + deallocate(ptr % data % simple_int) end if else if (ptr % contentsType == MPAS_POOL_CONFIG) then @@ -234,13 +234,13 @@ recursive subroutine mpas_pool_destroy_pool(inPool)!{{{ dptr => ptr % data if (dptr % contentsType == MPAS_POOL_REAL) then - deallocate(dptr % simple_real, stat=local_err) + deallocate(dptr % simple_real) else if (dptr % contentsType == MPAS_POOL_INTEGER) then - deallocate(dptr % simple_int, stat=local_err) + deallocate(dptr % simple_int) else if (dptr % contentsType == MPAS_POOL_CHARACTER) then - deallocate(dptr % simple_char, stat=local_err) + deallocate(dptr % simple_char) else if (dptr % contentsType == MPAS_POOL_LOGICAL) then - deallocate(dptr % simple_logical, stat=local_err) + deallocate(dptr % simple_logical) end if else if (ptr % contentsType == MPAS_POOL_FIELD) then @@ -249,138 +249,96 @@ recursive subroutine mpas_pool_destroy_pool(inPool)!{{{ ! Do this through brute force... if (associated(dptr % r0)) then - deallocate(dptr % r0, stat=local_err) + call mpas_deallocate_field(dptr % r0) else if (associated(dptr % r1)) then - if (associated(dptr % r1 % array)) then - deallocate(dptr % r1 % array, stat=local_err) - end if - - deallocate(dptr % r1, stat=local_err) + call mpas_deallocate_field(dptr % r1) else if (associated(dptr % r2)) then - if (associated(dptr % r2 % array)) then - deallocate(dptr % r2 % array, stat=local_err) - end if - - deallocate(dptr % r2, stat=local_err) + call mpas_deallocate_field(dptr % r2) else if (associated(dptr % r3)) then - if (associated(dptr % r3 % array)) then - deallocate(dptr % r3 % array, stat=local_err) - end if - - deallocate(dptr % r3, stat=local_err) + call mpas_deallocate_field(dptr % r3) else if (associated(dptr % r4)) then - if (associated(dptr % r4 % array)) then - deallocate(dptr % r4 % array, stat=local_err) - end if - - deallocate(dptr % r4, stat=local_err) + call mpas_deallocate_field(dptr % r4) else if (associated(dptr % r5)) then - if (associated(dptr % r5 % array)) then - deallocate(dptr % r5 % array, stat=local_err) - end if - - deallocate(dptr % r5, stat=local_err) + call mpas_deallocate_field(dptr % r5) else if (associated(dptr % i0)) then - deallocate(dptr % i0, stat=local_err) + call mpas_deallocate_field(dptr % i0) else if (associated(dptr % i1)) then - if (associated(dptr % i1 % array)) then - deallocate(dptr % i1 % array, stat=local_err) - end if - - deallocate(dptr % i1, stat=local_err) + call mpas_deallocate_field(dptr % i1) else if (associated(dptr % i2)) then - if (associated(dptr % i2 % array)) then - deallocate(dptr % i2 % array, stat=local_err) - end if - - deallocate(dptr % i2, stat=local_err) + call mpas_deallocate_field(dptr % i2) else if (associated(dptr % i3)) then - if (associated(dptr % i3 % array)) then - deallocate(dptr % i3 % array, stat=local_err) - end if - - deallocate(dptr % i3, stat=local_err) + call mpas_deallocate_field(dptr % i3) else if (associated(dptr % c0)) then - deallocate(dptr % c0, stat=local_err) + call mpas_deallocate_field(dptr % c0) else if (associated(dptr % c1)) then - if (associated(dptr % c1 % array)) then - deallocate(dptr % c1 % array, stat=local_err) - end if - - deallocate(dptr % c1, stat=local_err) + call mpas_deallocate_field(dptr % c1) else if (associated(dptr % l0)) then - deallocate(dptr % l0, stat=local_err) + call mpas_deallocate_field(dptr % l0) else if (associated(dptr % r0a)) then - deallocate(dptr % r0a, stat=local_err) + do j=1,dptr % contentsTimeLevs + call mpas_deallocate_field_target(dptr % r0a(j)) + end do + deallocate(dptr % r0a) else if (associated(dptr % r1a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % r1a(j) % array)) then - deallocate(dptr % r1a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % r1a(j)) end do - deallocate(dptr % r1a, stat=local_err) + deallocate(dptr % r1a) else if (associated(dptr % r2a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % r2a(j) % array)) then - deallocate(dptr % r2a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % r2a(j)) end do - deallocate(dptr % r2a, stat=local_err) + deallocate(dptr % r2a) else if (associated(dptr % r3a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % r3a(j) % array)) then - deallocate(dptr % r3a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % r3a(j)) end do - deallocate(dptr % r3a, stat=local_err) + deallocate(dptr % r3a) else if (associated(dptr % r4a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % r4a(j) % array)) then - deallocate(dptr % r4a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % r4a(j)) end do - deallocate(dptr % r4a, stat=local_err) + deallocate(dptr % r4a) else if (associated(dptr % r5a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % r5a(j) % array)) then - deallocate(dptr % r5a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % r5a(j)) end do - deallocate(dptr % r5a, stat=local_err) + deallocate(dptr % r5a) else if (associated(dptr % i0a)) then - deallocate(dptr % i0a, stat=local_err) + do j=1,dptr % contentsTimeLevs + call mpas_deallocate_field_target(dptr % i0a(j)) + end do + deallocate(dptr % i0a) else if (associated(dptr % i1a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % i1a(j) % array)) then - deallocate(dptr % i1a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % i1a(j)) end do - deallocate(dptr % i1a, stat=local_err) + deallocate(dptr % i1a) else if (associated(dptr % i2a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % i2a(j) % array)) then - deallocate(dptr % i2a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % i2a(j)) end do - deallocate(dptr % i2a, stat=local_err) + deallocate(dptr % i2a) else if (associated(dptr % i3a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % i3a(j) % array)) then - deallocate(dptr % i3a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % i3a(j)) end do - deallocate(dptr % i3a, stat=local_err) + deallocate(dptr % i3a) else if (associated(dptr % c0a)) then - deallocate(dptr % c0a, stat=local_err) + do j=1,dptr % contentsTimeLevs + call mpas_deallocate_field_target(dptr % c0a(j)) + end do + deallocate(dptr % c0a) else if (associated(dptr % c1a)) then do j=1,dptr % contentsTimeLevs - if (associated(dptr % c1a(j) % array)) then - deallocate(dptr % c1a(j) % array, stat=local_err) - end if + call mpas_deallocate_field_target(dptr % c1a(j)) end do - deallocate(dptr % c1a, stat=local_err) + deallocate(dptr % c1a) else if (associated(dptr % l0a)) then - deallocate(dptr % l0a, stat=local_err) + do j=1,dptr % contentsTimeLevs + call mpas_deallocate_field_target(dptr % l0a(j)) + end do + deallocate(dptr % l0a) else call pool_mesg('While destroying pool, member '//trim(ptr % key)//' has no valid field pointers.') end if @@ -390,14 +348,14 @@ recursive subroutine mpas_pool_destroy_pool(inPool)!{{{ call mpas_pool_destroy_pool(ptr % data % p) end if - deallocate(ptr % data, stat=local_err) - deallocate(ptr, stat=local_err) + deallocate(ptr % data) + deallocate(ptr) end do end do - deallocate(inPool % table, stat=local_err) - deallocate(inPool, stat=local_err) + deallocate(inPool % table) + deallocate(inPool) end if end subroutine mpas_pool_destroy_pool!}}} @@ -1861,7 +1819,7 @@ recursive subroutine mpas_pool_link_parinfo(block, inPool)!{{{ end if else if (poolItr % nDims == 4) then if (poolItr % nTimeLevels > 1) then - decompType = pool_get_member_decomp_type(poolMem % r4 % dimNames(4)) + decompType = pool_get_member_decomp_type(poolMem % r4a(1) % dimNames(4)) if (decompType == MPAS_DECOMP_CELLS) then do i = 1, poolItr % nTimeLevels @@ -5142,7 +5100,7 @@ subroutine mpas_pool_add_subpool(inPool, key, subPool)!{{{ type (mpas_pool_type), intent(inout) :: inPool character (len=*), intent(in) :: key - type (mpas_pool_type), intent(in), target :: subPool + type (mpas_pool_type), pointer :: subPool type (mpas_pool_member_type), pointer :: newmem diff --git a/src/framework/mpas_timekeeping.F b/src/framework/mpas_timekeeping.F index c6de8f0aed..1ca9f8bc9c 100644 --- a/src/framework/mpas_timekeeping.F +++ b/src/framework/mpas_timekeeping.F @@ -1334,6 +1334,9 @@ subroutine mpas_set_timeInterval(interval, YY, MM, DD, H, M, S, S_n, S_d, S_i8, character (len=StrKIND) :: timeSubString character (len=StrKIND) :: secDecSubString character(len=StrKIND), pointer, dimension(:) :: subStrings + character(len=16) :: fmtString + integer :: iwidth + integer :: idecimals ! if (present(DD)) then ! days = DD @@ -1403,8 +1406,29 @@ subroutine mpas_set_timeInterval(interval, YY, MM, DD, H, M, S, S_n, S_d, S_i8, if (present(timeString) .or. present(dt)) then - if(present(dt)) then - write (timeString_,*) "00:00:", dt + if (present(dt)) then + ! + ! Before writing dt into a timeString, first construct an appropriate format string + ! + + ! Number of decimal places of precision (9 = nanosecond precision) + idecimals = 9 + + ! Scale total width of representation based on max(log10(dt),0.0) + ! (+2 for at least a leading zero and a '.') + if (dt /= 0.0_RKIND) then + iwidth = int(max(log10(abs(dt)),0.0_RKIND)) + idecimals + 2 + else + iwidth = idecimals + 2 + end if + + ! Add an extra character for a minus sign if needed + if (dt < 0.0_RKIND) then + iwidth = iwidth + 1 + end if + + write(fmtString, '(a,i2.2,a,i2.2,a)') '(a,f', iwidth, '.', idecimals, ')' + write(timeString_,trim(fmtString)) '00:00:', dt else timeString_ = timeString end if diff --git a/src/operators/mpas_geometry_utils.F b/src/operators/mpas_geometry_utils.F index 7ec62be6cb..ba9d49522a 100644 --- a/src/operators/mpas_geometry_utils.F +++ b/src/operators/mpas_geometry_utils.F @@ -1728,4 +1728,142 @@ subroutine mpas_spherical_linear_interp(pInterp, p0, p1, alpha) !{{{ end subroutine mpas_spherical_linear_interp !}}} + +!----------------------------------------------------------------------- +! routine mpas_rotate_about_vector +! +!> \brief Rotates a point about a vector in R3 +!> \author Michael Duda +!> \date 7 March 2019 +!> \details +!> Rotates the point (x,y,z) through an angle theta about the vector +!> originating at (a, b, c) and having direction (u, v, w). +! +!> Reference: https://sites.google.com/site/glennmurray/Home/rotation-matrices-and-formulas/rotation-about-an-arbitrary-axis-in-3-dimensions +! +!----------------------------------------------------------------------- + subroutine mpas_rotate_about_vector(x, y, z, theta, a, b, c, u, v, w, xp, yp, zp) + + implicit none + + real (kind=RKIND), intent(in) :: x, y, z, theta, a, b, c, u, v, w + real (kind=RKIND), intent(out) :: xp, yp, zp + + real (kind=RKIND) :: vw2, uw2, uv2 + real (kind=RKIND) :: m + + vw2 = v**2.0 + w**2.0 + uw2 = u**2.0 + w**2.0 + uv2 = u**2.0 + v**2.0 + m = sqrt(u**2.0 + v**2.0 + w**2.0) + + xp = (a*vw2 + u*(-b*v-c*w+u*x+v*y+w*z) + ((x-a)*vw2+u*(b*v+c*w-v*y-w*z))*cos(theta) + m*(-c*v+b*w-w*y+v*z)*sin(theta))/m**2.0 + yp = (b*uw2 + v*(-a*u-c*w+u*x+v*y+w*z) + ((y-b)*uw2+v*(a*u+c*w-u*x-w*z))*cos(theta) + m*( c*u-a*w+w*x-u*z)*sin(theta))/m**2.0 + zp = (c*uv2 + w*(-a*u-b*v+u*x+v*y+w*z) + ((z-c)*uv2+w*(a*u+b*v-u*x-v*y))*cos(theta) + m*(-b*u+a*v-v*x+u*y)*sin(theta))/m**2.0 + + end subroutine mpas_rotate_about_vector + + +!----------------------------------------------------------------------- +! routine mpas_mirror_point +! +!> \brief Finds the "mirror" of a point about a great-circle arc +!> \author Michael Duda +!> \date 7 March 2019 +!> \details +!> Given the endpoints of a great-circle arc (A,B) and a point, computes +!> the location of the point on the opposite side of the arc along a great- +!> circle arc that intersects (A,B) at a right angle, and such that the arc +!> between the point and its mirror is bisected by (A,B). +!> +!> Assumptions: A, B, and the point to be reflected all lie on the surface +!> of the unit sphere. +! +!----------------------------------------------------------------------- + subroutine mpas_mirror_point(xPoint, yPoint, zPoint, xA, yA, zA, xB, yB, zB, xMirror, yMirror, zMirror) + + implicit none + + real(kind=RKIND), intent(in) :: xPoint, yPoint, zPoint + real(kind=RKIND), intent(in) :: xA, yA, zA + real(kind=RKIND), intent(in) :: xB, yB, zB + real(kind=RKIND), intent(out) :: xMirror, yMirror, zMirror + + real(kind=RKIND) :: alpha + + ! + ! Find the spherical angle between arcs AP and AB (where P is the point to be reflected) + ! + alpha = mpas_sphere_angle(xA, yA, zA, xPoint, yPoint, zPoint, xB, yB, zB) + + ! + ! Rotate the point to be reflected by twice alpha about the vector from the origin to A + ! + call mpas_rotate_about_vector(xPoint, yPoint, zPoint, 2.0_RKIND * alpha, 0.0_RKIND, 0.0_RKIND, 0.0_RKIND, & + xA, yA, zA, xMirror, yMirror, zMirror) + + end subroutine mpas_mirror_point + + +!----------------------------------------------------------------------- +! routine mpas_in_cell +! +!> \brief Determines whether a point is within a Voronoi cell +!> \author Michael Duda +!> \date 7 March 2019 +!> \details +!> Given a point on the surface of the sphere, the corner points of a Voronoi +!> cell, and the generating point for that Voronoi cell, determines whether +!> the given point is within the Voronoi cell. +! +!----------------------------------------------------------------------- + logical function mpas_in_cell(xPoint, yPoint, zPoint, xCell, yCell, zCell, & + nEdgesOnCell, verticesOnCell, xVertex, yVertex, zVertex) + + implicit none + + real(kind=RKIND), intent(in) :: xPoint, yPoint, zPoint + real(kind=RKIND), intent(in) :: xCell, yCell, zCell + integer, intent(in) :: nEdgesOnCell + integer, dimension(:), intent(in) :: verticesOnCell + real(kind=RKIND), dimension(:), intent(in) :: xVertex, yVertex, zVertex + + integer :: i + integer :: vtx1, vtx2 + real(kind=RKIND) :: xNeighbor, yNeighbor, zNeighbor + real(kind=RKIND) :: inDist, outDist + real(kind=RKIND) :: radius + real(kind=RKIND) :: radius_inv + + radius = sqrt(xCell * xCell + yCell * yCell + zCell * zCell) + radius_inv = 1.0_RKIND / radius + + inDist = mpas_arc_length(xPoint, yPoint, zPoint, xCell, yCell, zCell) + + mpas_in_cell = .true. + + do i=1,nEdgesOnCell + vtx1 = verticesOnCell(i) + vtx2 = verticesOnCell(mod(i,nEdgesOnCell)+1) + + call mpas_mirror_point(xCell*radius_inv, yCell*radius_inv, zCell*radius_inv, & + xVertex(vtx1)*radius_inv, yVertex(vtx1)*radius_inv, zVertex(vtx1)*radius_inv, & + xVertex(vtx2)*radius_inv, yVertex(vtx2)*radius_inv, zVertex(vtx2)*radius_inv, & + xNeighbor, yNeighbor, zNeighbor) + + xNeighbor = xNeighbor * radius + yNeighbor = yNeighbor * radius + zNeighbor = zNeighbor * radius + + outDist = mpas_arc_length(xPoint, yPoint, zPoint, xNeighbor, yNeighbor, zNeighbor) + + if (outDist < inDist) then + mpas_in_cell = .false. + return + end if + + end do + + end function mpas_in_cell + end module mpas_geometry_utils diff --git a/src/operators/mpas_spline_interpolation.F b/src/operators/mpas_spline_interpolation.F index f7fa682842..6d0d2ffa02 100644 --- a/src/operators/mpas_spline_interpolation.F +++ b/src/operators/mpas_spline_interpolation.F @@ -115,6 +115,10 @@ subroutine mpas_interpolate_cubic_spline( &!{{{ ! INPUT PARAMETERS: + integer, intent(in) :: & + n, &!< Input: number of nodes, input grid + nOut !< Input: number of nodes, output grid + real (kind=RKIND), dimension(n), intent(in) :: & x, &!< Input: node location, input grid y, &!< Input: interpolation variable, input grid @@ -123,10 +127,6 @@ subroutine mpas_interpolate_cubic_spline( &!{{{ real (kind=RKIND), dimension(nOut), intent(in) :: & xOut !< Input: node location, output grid - integer, intent(in) :: & - n, &!< Input: number of nodes, input grid - nOut !< Input: number of nodes, output grid - ! OUTPUT PARAMETERS: real (kind=RKIND), dimension(nOut), intent(out) :: & @@ -359,6 +359,10 @@ subroutine mpas_interpolate_linear( &!{{{ ! !INPUT PARAMETERS: + integer, intent(in) :: & + N, &!< Input: number of nodes, input grid + NOut !< Input: number of nodes, output grid + real (kind=RKIND), dimension(n), intent(in) :: & x, &!< Input: node location, input grid y !< Input: interpolation variable, input grid @@ -366,10 +370,6 @@ subroutine mpas_interpolate_linear( &!{{{ real (kind=RKIND), dimension(nOut), intent(in) :: & xOut !< Input: node location, output grid - integer, intent(in) :: & - N, &!< Input: number of nodes, input grid - NOut !< Input: number of nodes, output grid - ! !OUTPUT PARAMETERS: real (kind=RKIND), dimension(nOut), intent(out) :: & diff --git a/src/operators/mpas_tracer_advection_helpers.F b/src/operators/mpas_tracer_advection_helpers.F index f18570bd79..15c9ec22d4 100644 --- a/src/operators/mpas_tracer_advection_helpers.F +++ b/src/operators/mpas_tracer_advection_helpers.F @@ -188,7 +188,7 @@ subroutine mpas_tracer_advection_coefficients( meshPool, horiz_adv_order, deriv_ sorted_cell_indices(2, n) = cellsOnCell(i, cell1) call mpas_hash_insert(cell_hash, cellsOnCell(i, cell1)) end if - end do ! loop over i + end do do i = 1, nEdgesOnCell(cell2) if(.not. mpas_hash_search(cell_hash, cellsOnCell(i, cell2))) then @@ -198,7 +198,7 @@ subroutine mpas_tracer_advection_coefficients( meshPool, horiz_adv_order, deriv_ sorted_cell_indices(2, n) = cellsOnCell(i, cell2) call mpas_hash_insert(cell_hash, cellsOnCell(i, cell2)) end if - end do ! loop over i + end do call mpas_hash_destroy(cell_hash) @@ -207,11 +207,28 @@ subroutine mpas_tracer_advection_coefficients( meshPool, horiz_adv_order, deriv_ nAdvCellsForEdge(iEdge) = n do iCell = 1, nAdvCellsForEdge(iEdge) advCellsForEdge(iCell, iEdge) = sorted_cell_indices(2, iCell) - end do ! loop over iCell + end do + + ! equation 7 in Skamarock, W. C., & Gassmann, A. (2011): + ! F(u,psi)_{i+1/2} = u_{i+1/2} * + ! [ 1/2 (psi_{i+1} + psi_i) term 1 + ! - 1/12(dx^2psi_{i+1} + dx^2psi_i) term 2 + ! + sign(u) beta/12 (dx^2psi_{i+1} - dx^2psi_i)] term 3 (note minus sign) + ! + ! adv_coefs accounts for terms 1 and 2 in SG11 equation 7. Term 1 is + ! the 2nd-order flux-function term. adv_coefs accounts for this with + ! the "+ 0.5" lines below. In the advection routines that use these + ! coefficients, the 2nd-order flux loop is then skipped. Term 2 is + ! the 4th-order flux-function term. adv_coefs_3rd accounts for term + ! 3, the beta term. beta > 0 corresponds to the third-order flux + ! function. The - sign in the deriv_two accumulation is for the i+1 + ! part of term 3, while the + sign is for the i part. adv_coefs(:,iEdge) = 0. adv_coefs_3rd(:,iEdge) = 0. + ! pull together third and fourth order contributions to the flux + ! first from cell1 k = mpas_binary_search(sorted_cell_indices, 2, 1, nAdvCellsForEdge(iEdge), indexToCellID(cell1)) if(k <= nAdvCellsForEdge(iEdge)) then adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + deriv_two(1,1,iEdge) @@ -224,27 +241,30 @@ subroutine mpas_tracer_advection_coefficients( meshPool, horiz_adv_order, deriv_ adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + deriv_two(iCell+1, 1, iEdge) adv_coefs_3rd(k, iEdge) = adv_coefs_3rd(k, iEdge) + deriv_two(iCell+1, 1, iEdge) end if - end do ! loop over iCell + end do + ! pull together third and fourth order contributions to the flux + ! now from cell2 k = mpas_binary_search(sorted_cell_indices, 2, 1, nAdvCellsForEdge(iEdge), indexToCellID(cell2)) if(k <= nAdvCellsForEdge(iEdge)) then adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + deriv_two(1,2,iEdge) - adv_coefs_3rd(k, iEdge) = adv_coefs_3rd(k, iEdge) + deriv_two(1,2,iEdge) + adv_coefs_3rd(k, iEdge) = adv_coefs_3rd(k, iEdge) - deriv_two(1,2,iEdge) end if do iCell = 1, nEdgesOnCell(cell2) k = mpas_binary_search(sorted_cell_indices, 2, 1, nAdvCellsForEdge(iEdge), indexToCellID(cellsOnCell(iCell,cell2))) if(k <= nAdvCellsForEdge(iEdge)) then adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + deriv_two(iCell+1, 2, iEdge) - adv_coefs_3rd(k, iEdge) = adv_coefs_3rd(k, iEdge) + deriv_two(iCell+1, 2, iEdge) + adv_coefs_3rd(k, iEdge) = adv_coefs_3rd(k, iEdge) - deriv_two(iCell+1, 2, iEdge) end if - end do ! loop over iCell + end do do iCell = 1,nAdvCellsForEdge(iEdge) adv_coefs (iCell,iEdge) = - (dcEdge(iEdge) **2) * adv_coefs (iCell,iEdge) / 12. adv_coefs_3rd(iCell,iEdge) = - (dcEdge(iEdge) **2) * adv_coefs_3rd(iCell,iEdge) / 12. - end do ! loop over iCell + end do + ! 2nd order centered contribution - place this in the main flux weights k = mpas_binary_search(sorted_cell_indices, 2, 1, nAdvCellsForEdge(iEdge), indexToCellID(cell1)) if(k <= nAdvCellsForEdge(iEdge)) then adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + 0.5 @@ -255,11 +275,12 @@ subroutine mpas_tracer_advection_coefficients( meshPool, horiz_adv_order, deriv_ adv_coefs(k, iEdge) = adv_coefs(k, iEdge) + 0.5 end if + ! multiply by edge length - thus the flux is just dt*ru times the results of the vector-vector multiply do iCell=1,nAdvCellsForEdge(iEdge) adv_coefs (iCell,iEdge) = dvEdge(iEdge) * adv_coefs (iCell,iEdge) adv_coefs_3rd(iCell,iEdge) = dvEdge(iEdge) * adv_coefs_3rd(iCell,iEdge) - end do ! loop over iCell - end if + end do + end if ! only do for edges of owned-cells end do ! end loop over edges deallocate(cell_indices) diff --git a/src/operators/operators.cmake b/src/operators/operators.cmake new file mode 100644 index 0000000000..d65c7c661e --- /dev/null +++ b/src/operators/operators.cmake @@ -0,0 +1,13 @@ +# operators +list(APPEND COMMON_RAW_SOURCES + operators/mpas_vector_operations.F + operators/mpas_matrix_operations.F + operators/mpas_tensor_operations.F + operators/mpas_rbf_interpolation.F + operators/mpas_vector_reconstruction.F + operators/mpas_spline_interpolation.F + operators/mpas_tracer_advection_helpers.F + operators/mpas_tracer_advection_mono.F + operators/mpas_tracer_advection_std.F + operators/mpas_geometry_utils.F +) diff --git a/src/tools/CMakeLists.txt b/src/tools/CMakeLists.txt new file mode 100644 index 0000000000..513ae48cf1 --- /dev/null +++ b/src/tools/CMakeLists.txt @@ -0,0 +1,30 @@ + +if (DEFINED ENV{MPAS_TOOL_DIR}) + message(STATUS "*** Using MPAS tools from $ENV{MPAS_TOOL_DIR} ***") + add_custom_target(namelist_gen) + add_custom_command( + TARGET namelist_gen PRE_BUILD + COMMAND ${CMAKE_COMMAND} -E copy $ENV{MPAS_TOOL_DIR}/input_gen/namelist_gen ${CMAKE_CURRENT_BINARY_DIR}/namelist_gen) + add_custom_target(streams_gen) + add_custom_command( + TARGET streams_gen PRE_BUILD + COMMAND ${CMAKE_COMMAND} -E copy $ENV{MPAS_TOOL_DIR}/input_gen/streams_gen ${CMAKE_CURRENT_BINARY_DIR}/streams_gen) + add_custom_target(parse) + add_custom_command( + TARGET parse PRE_BUILD + COMMAND ${CMAKE_COMMAND} -E copy $ENV{MPAS_TOOL_DIR}/input_gen/parse ${CMAKE_CURRENT_BINARY_DIR}/parse) +else() + message(STATUS "*** Building MPAS tools from source ***") + # Make build tools, need to be compiled with serial compiler. + set(CMAKE_C_COMPILER ${SCC}) + + add_executable(streams_gen input_gen/streams_gen.c input_gen/test_functions.c ../external/ezxml/ezxml.c) + add_executable(namelist_gen input_gen/namelist_gen.c input_gen/test_functions.c ../external/ezxml/ezxml.c) + add_executable(parse registry/parse.c registry/dictionary.c registry/gen_inc.c registry/fortprintf.c registry/utility.c ../external/ezxml/ezxml.c) + + foreach(EXEITEM streams_gen namelist_gen parse) + target_compile_definitions(${EXEITEM} PRIVATE ${CPPDEFS}) + target_compile_options(${EXEITEM} PRIVATE "-Uvector") + target_include_directories(${EXEITEM} PRIVATE ${INCLUDES}) + endforeach() +endif() diff --git a/src/tools/registry/gen_inc.c b/src/tools/registry/gen_inc.c index 7b4526a56f..619740002f 100644 --- a/src/tools/registry/gen_inc.c +++ b/src/tools/registry/gen_inc.c @@ -50,7 +50,7 @@ void write_model_variables(ezxml_t registry){/*{{{*/ }/*}}}*/ -int write_field_pointers(FILE* fd){/*{{{*/ +int write_field_pointer_arrays(FILE* fd){/*{{{*/ fortprintf(fd, "\n"); fortprintf(fd, " type (field0DReal), pointer :: r0Ptr\n"); fortprintf(fd, " type (field1DReal), pointer :: r1Ptr\n"); @@ -64,79 +64,80 @@ int write_field_pointers(FILE* fd){/*{{{*/ fortprintf(fd, " type (field3DInteger), pointer :: i3Ptr\n"); fortprintf(fd, " type (field0DChar), pointer :: c0Ptr\n"); fortprintf(fd, " type (field1DChar), pointer :: c1Ptr\n"); + fortprintf(fd, " type (field0DReal), dimension(:), pointer :: r0aPtr\n"); + fortprintf(fd, " type (field1DReal), dimension(:), pointer :: r1aPtr\n"); + fortprintf(fd, " type (field2DReal), dimension(:), pointer :: r2aPtr\n"); + fortprintf(fd, " type (field3DReal), dimension(:), pointer :: r3aPtr\n"); + fortprintf(fd, " type (field4DReal), dimension(:), pointer :: r4aPtr\n"); + fortprintf(fd, " type (field5DReal), dimension(:), pointer :: r5aPtr\n"); + fortprintf(fd, " type (field0DInteger), dimension(:), pointer :: i0aPtr\n"); + fortprintf(fd, " type (field1DInteger), dimension(:), pointer :: i1aPtr\n"); + fortprintf(fd, " type (field2DInteger), dimension(:), pointer :: i2aPtr\n"); + fortprintf(fd, " type (field3DInteger), dimension(:), pointer :: i3aPtr\n"); + fortprintf(fd, " type (field0DChar), dimension(:), pointer :: c0aPtr\n"); + fortprintf(fd, " type (field1DChar), dimension(:), pointer :: c1aPtr\n"); fortprintf(fd, "\n"); return 0; }/*}}}*/ -int write_field_pointer_arrays(FILE* fd){/*{{{*/ - fortprintf(fd, "\n"); - fortprintf(fd, " type (field0DReal), dimension(:), pointer :: r0Ptr\n"); - fortprintf(fd, " type (field1DReal), dimension(:), pointer :: r1Ptr\n"); - fortprintf(fd, " type (field2DReal), dimension(:), pointer :: r2Ptr\n"); - fortprintf(fd, " type (field3DReal), dimension(:), pointer :: r3Ptr\n"); - fortprintf(fd, " type (field4DReal), dimension(:), pointer :: r4Ptr\n"); - fortprintf(fd, " type (field5DReal), dimension(:), pointer :: r5Ptr\n"); - fortprintf(fd, " type (field0DInteger), dimension(:), pointer :: i0Ptr\n"); - fortprintf(fd, " type (field1DInteger), dimension(:), pointer :: i1Ptr\n"); - fortprintf(fd, " type (field2DInteger), dimension(:), pointer :: i2Ptr\n"); - fortprintf(fd, " type (field3DInteger), dimension(:), pointer :: i3Ptr\n"); - fortprintf(fd, " type (field0DChar), dimension(:), pointer :: c0Ptr\n"); - fortprintf(fd, " type (field1DChar), dimension(:), pointer :: c1Ptr\n"); - fortprintf(fd, "\n"); +int set_pointer_name(int type, int ndims, char *pointer_name, int time_levs){/*{{{*/ - return 0; -}/*}}}*/ + char suffix[6]; + if (time_levs > 1) { + snprintf(suffix, 6, "aPtr"); + } else { + snprintf(suffix, 6, "Ptr"); + } -int set_pointer_name(int type, int ndims, char *pointer_name){/*{{{*/ if(type == REAL) { switch (ndims){ default: case 0: - snprintf(pointer_name, 1024, "r0Ptr"); + snprintf(pointer_name, 1024, "r0%s", suffix); break; case 1: - snprintf(pointer_name, 1024, "r1Ptr"); + snprintf(pointer_name, 1024, "r1%s", suffix); break; case 2: - snprintf(pointer_name, 1024, "r2Ptr"); + snprintf(pointer_name, 1024, "r2%s", suffix); break; case 3: - snprintf(pointer_name, 1024, "r3Ptr"); + snprintf(pointer_name, 1024, "r3%s", suffix); break; case 4: - snprintf(pointer_name, 1024, "r4Ptr"); + snprintf(pointer_name, 1024, "r4%s", suffix); break; case 5: - snprintf(pointer_name, 1024, "r5Ptr"); + snprintf(pointer_name, 1024, "r5%s", suffix); break; } } else if (type == INTEGER) { switch (ndims){ default: case 0: - snprintf(pointer_name, 1024, "i0Ptr"); + snprintf(pointer_name, 1024, "i0%s", suffix); break; case 1: - snprintf(pointer_name, 1024, "i1Ptr"); + snprintf(pointer_name, 1024, "i1%s", suffix); break; case 2: - snprintf(pointer_name, 1024, "i2Ptr"); + snprintf(pointer_name, 1024, "i2%s", suffix); break; case 3: - snprintf(pointer_name, 1024, "i3Ptr"); + snprintf(pointer_name, 1024, "i3%s", suffix); break; } } else if (type == CHARACTER) { switch (ndims){ default: case 0: - snprintf(pointer_name, 1024, "c0Ptr"); + snprintf(pointer_name, 1024, "c0%s", suffix); break; case 1: - snprintf(pointer_name, 1024, "c1Ptr"); + snprintf(pointer_name, 1024, "c1%s", suffix); break; } } @@ -538,7 +539,7 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ int in_subpool; - FILE *fd, *fd2; + FILE *fd, *fd2, *fcd, *fcg; const_core = ezxml_attr(registry, "core_abbrev"); @@ -546,6 +547,8 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ fd = fopen("namelist_defines.inc", "w+"); fd2 = fopen("namelist_call.inc", "w+"); + fcd = fopen("config_declare.inc", "w+"); + fcg = fopen("config_get.inc", "w+"); fortprintf(fd2, " function %s_setup_namelists(configPool, namelistFilename, dminfo) result(iErr)\n", core_string); fortprintf(fd2, " use mpas_derived_types\n"); @@ -607,7 +610,7 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ fortprintf(fd, " integer :: ierr\n"); fortprintf(fd, "\n"); - // Define variable defintions prior to reading the namelist in. + // Define variable definitions prior to reading the namelist in. for (nmlopt_xml = ezxml_child(nmlrecs_xml, "nml_option"); nmlopt_xml; nmlopt_xml = nmlopt_xml->next){ nmloptname = ezxml_attr(nmlopt_xml, "name"); nmlopttype = ezxml_attr(nmlopt_xml, "type"); @@ -619,9 +622,12 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ if(strncmp(nmlopttype, "real", 1024) == 0){ fortprintf(fd, " real (kind=RKIND) :: %s = %lf\n", nmloptname, (double)atof(nmloptval)); + fortprintf(fcd, " real (kind=RKIND), pointer :: %s\n", nmloptname); } else if(strncmp(nmlopttype, "integer", 1024) == 0){ fortprintf(fd, " integer :: %s = %d\n", nmloptname, atoi(nmloptval)); + fortprintf(fcd, " integer, pointer :: %s\n", nmloptname); } else if(strncmp(nmlopttype, "logical", 1024) == 0){ + fortprintf(fcd, " logical, pointer :: %s\n", nmloptname); if(strncmp(nmloptval, "true", 1024) == 0 || strncmp(nmloptval, ".true.", 1024) == 0){ fortprintf(fd, " logical :: %s = .true.\n", nmloptname); } else { @@ -629,9 +635,11 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ } } else if(strncmp(nmlopttype, "character", 1024) == 0){ fortprintf(fd, " character (len=StrKIND) :: %s = '%s'\n", nmloptname, nmloptval); + fortprintf(fcd, " character (len=StrKIND), pointer :: %s\n", nmloptname); } } fortprintf(fd, "\n"); + fortprintf(fcd, "\n"); // Define the namelist block, to read the namelist record in. fortprintf(fd, " namelist /%s/ &\n", nmlrecname); @@ -653,7 +661,11 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ } fortprintf(fd, " if (dminfo %% my_proc_id == IO_NODE) then\n"); + fortprintf(fd, "! Rewinding before each read leads to errors when the code is built with\n"); + fortprintf(fd, "! the NAG Fortran compiler. If building with NAG, be kind and don't rewind.\n"); + fortprintf(fd, "#ifndef NAG_COMPILER\n"); fortprintf(fd, " rewind(unitNumber)\n"); + fortprintf(fd, "#endif\n"); fortprintf(fd, " read(unitNumber, %s, iostat=ierr)\n", nmlrecname); fortprintf(fd, " end if\n"); @@ -712,8 +724,10 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ nmloptname = ezxml_attr(nmlopt_xml, "name"); fortprintf(fd, " call mpas_pool_add_config(%s, '%s', %s)\n", pool_name, nmloptname, nmloptname); + fortprintf(fcg, " call mpas_pool_get_config(configPool, '%s', %s)\n", nmloptname, nmloptname); } fortprintf(fd, "\n"); + fortprintf(fcg, "\n"); // End new subroutine for namelist record. fortprintf(fd, " end subroutine %s_setup_nmlrec_%s\n", core_string, nmlrecname); @@ -724,6 +738,11 @@ int parse_namelist_records_from_registry(ezxml_t registry)/*{{{*/ fortprintf(fd2, " close(unitNumber)\n"); fortprintf(fd2, " end function %s_setup_namelists\n", core_string); + fclose(fd); + fclose(fd2); + fclose(fcd); + fclose(fcg); + return 0; }/*}}}*/ @@ -1023,6 +1042,7 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var char *string, *tofree, *token; char temp_str[1024]; char pointer_name[1024]; + char pointer_name_arr[1024]; char spacing[1024], sub_spacing[1024]; char default_value[1024]; char missing_value[1024]; @@ -1071,13 +1091,23 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var // Determine field type and default value. get_field_information(vararrtype, vararrdefaultval, default_value, vararrmissingval, missing_value, &type); + // If a default_value is not specified, but a missing_value is, then set the + // default_value (defaultValue) to missing_value + if(!vararrdefaultval && vararrmissingval) { + snprintf(default_value, 1024, "%s ! defaultValue taking specified missing_value", missing_value); + } + // Determine ndims, hasTime, and decomp type build_dimension_information(registry, var_arr_xml, &ndims, &hasTime, &decomp); ndims++; // Add a dimension for constituents in var_array // Determine name of pointer for this field. - set_pointer_name(type, ndims, pointer_name); - fortprintf(fd, " allocate(%s(%d))\n", pointer_name, time_levs); + set_pointer_name(type, ndims, pointer_name, time_levs); + if (time_levs > 1) { + fortprintf(fd, " allocate(%s(%d))\n", pointer_name, time_levs); + } else { + fortprintf(fd, " allocate(%s)\n", pointer_name); + } fortprintf(fd, " index_counter = 0\n", spacing); fortprintf(fd, " group_counter = -1\n", spacing); @@ -1253,27 +1283,32 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var fortprintf(fd, " end if\n"); for(time_lev = 1; time_lev <= time_levs; time_lev++){ + if (time_levs > 1) { + snprintf(pointer_name_arr, 1024, "%s(%d)", pointer_name, time_lev); + } else { + snprintf(pointer_name_arr, 1024, "%s", pointer_name); + } fortprintf(fd, "! Defining time level %d\n", time_lev); - fortprintf(fd, " allocate( %s(%d) %% constituentNames(numConstituents) )\n", pointer_name, time_lev); - fortprintf(fd, " %s(%d) %% fieldName = '%s'\n", pointer_name, time_lev, vararrname); + fortprintf(fd, " allocate( %s %% constituentNames(numConstituents) )\n", pointer_name_arr); + fortprintf(fd, " %s %% fieldName = '%s'\n", pointer_name_arr , vararrname); if (decomp != -1) { - fortprintf(fd, " %s(%d) %% isDecomposed = .true.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isDecomposed = .true.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% isDecomposed = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isDecomposed = .false.\n", pointer_name_arr); } if (hasTime) { - fortprintf(fd, " %s(%d) %% hasTimeDimension = .true.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% hasTimeDimension = .true.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% hasTimeDimension = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% hasTimeDimension = .false.\n", pointer_name_arr); } - fortprintf(fd, " %s(%d) %% isVarArray = .true.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isVarArray = .true.\n", pointer_name_arr); if(ndims > 0){ if(persistence == SCRATCH){ - fortprintf(fd, " %s(%d) %% isPersistent = .false.\n", pointer_name, time_lev); - fortprintf(fd, " %s(%d) %% isActive = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isPersistent = .false.\n", pointer_name_arr); + fortprintf(fd, " %s %% isActive = .false.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% isPersistent = .true.\n", pointer_name, time_lev); - fortprintf(fd, " %s(%d) %% isActive = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isPersistent = .true.\n", pointer_name_arr); + fortprintf(fd, " %s %% isActive = .false.\n", pointer_name_arr); } } fortprintf(fd, "\n"); @@ -1289,7 +1324,7 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var fortprintf(fd, " call mpas_pool_get_dimension(newSubPool, 'index_%s', const_index)\n", varname_in_code); fortprintf(fd, " end if\n"); fortprintf(fd, " if (const_index > 0) then\n", spacing); - fortprintf(fd, " %s(%d) %% constituentNames(const_index) = '%s'\n", pointer_name, time_lev, varname); + fortprintf(fd, " %s %% constituentNames(const_index) = '%s'\n", pointer_name_arr, varname); fortprintf(fd, " end if\n", spacing); } @@ -1298,7 +1333,7 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var // Setup dimensions fortprintf(fd, "! Setup dimensions for \n", vararrname); i = 1; - fortprintf(fd, " %s(%d) %% dimNames(%d) = 'num_%s'\n", pointer_name, time_lev, i, vararrname); + fortprintf(fd, " %s %% dimNames(%d) = 'num_%s'\n", pointer_name_arr, i, vararrname); string = strdup(vararrdims); tofree = string; @@ -1307,18 +1342,18 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var if(strncmp(token, "Time", 1024) != 0){ i++; if(strncmp(token, "nCells", 1024) == 0 || strncmp(token, "nEdges", 1024) == 0 || strncmp(token, "nVertices", 1024) == 0){ - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); } else { - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); } } while( (token = strsep(&string, " ")) != NULL){ if(strncmp(token, "Time", 1024) != 0){ i++; if(strncmp(token, "nCells", 1024) == 0 || strncmp(token, "nEdges", 1024) == 0 || strncmp(token, "nVertices", 1024) == 0){ - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); } else { - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); } } } @@ -1327,13 +1362,13 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var fortprintf(fd, "\n"); if ( ndims == 0 ) { - fortprintf(fd, " %s(%d) %% scalar = %s\n", pointer_name, time_lev, default_value); + fortprintf(fd, " %s %% scalar = %s\n", pointer_name_arr, default_value); } - fortprintf(fd, " %s(%d) %% defaultValue = %s\n", pointer_name, time_lev, default_value); - fortprintf(fd, " allocate(%s(%d) %% attLists(size(%s(%d) %% constituentNames, dim=1)))\n", pointer_name, time_lev, pointer_name, time_lev); + fortprintf(fd, " %s %% defaultValue = %s\n", pointer_name_arr, default_value); + fortprintf(fd, " allocate(%s %% attLists(size(%s %% constituentNames, dim=1)))\n", pointer_name_arr, pointer_name_arr); - fortprintf(fd, " do index_counter = 1, size(%s(%d) %% constituentNames, dim=1)\n", pointer_name, time_lev); - fortprintf(fd, " allocate(%s(%d) %% attLists(index_counter) %% attList)\n", pointer_name, time_lev); + fortprintf(fd, " do index_counter = 1, size(%s %% constituentNames, dim=1)\n", pointer_name_arr); + fortprintf(fd, " allocate(%s %% attLists(index_counter) %% attList)\n", pointer_name_arr); fortprintf(fd, " end do\n"); for(var_xml = ezxml_child(var_arr_xml, "var"); var_xml; var_xml = var_xml->next){ @@ -1364,7 +1399,7 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var free(tofree); - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(const_index) %% attList, 'long_name', '%s')\n", pointer_name, time_lev, temp_str); + fortprintf(fd, " call mpas_add_att(%s %% attLists(const_index) %% attList, 'long_name', '%s')\n", pointer_name_arr, temp_str); } if ( varunits != NULL ) { @@ -1381,21 +1416,19 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var free(tofree); - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(const_index) %% attList, 'units', '%s')\n", pointer_name, time_lev, temp_str); + fortprintf(fd, " call mpas_add_att(%s %% attLists(const_index) %% attList, 'units', '%s')\n", pointer_name_arr, temp_str); } if ( vararrmissingval ) { - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(const_index) %% attList, 'missing_value', %s)\n", pointer_name, time_lev, missing_value); - // Uncomment to add _FillValue to match missing_value - // fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(const_index) %% attList, '_FillValue', %s)\n", pointer_name, time_lev, missing_value); + fortprintf(fd, " call mpas_add_att(%s %% attLists(const_index) %% attList, '_FillValue', %s)\n", pointer_name_arr, missing_value); } - fortprintf(fd, " %s(%d) %% missingValue = %s\n", pointer_name, time_lev, missing_value); - fortprintf(fd, " %s(%d) %% constituentNames(const_index) = '%s'\n", pointer_name, time_lev, varname); + fortprintf(fd, " %s %% missingValue = %s\n", pointer_name_arr, missing_value); + fortprintf(fd, " %s %% constituentNames(const_index) = '%s'\n", pointer_name_arr, varname); fortprintf(fd, " end if\n", spacing); } - fortprintf(fd, " %s(%d) %% block => block\n", pointer_name, time_lev); + fortprintf(fd, " %s %% block => block\n", pointer_name_arr); } // Parse packages if they are defined @@ -1417,7 +1450,12 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var } for(time_lev = 1; time_lev <= time_levs; time_lev++){ - fortprintf(fd, " %s%s(%d) %% isActive = .true.\n", spacing, pointer_name, time_lev); + if (time_levs > 1) { + snprintf(pointer_name_arr, 1024, "%s(%d)", pointer_name, time_lev); + } else { + snprintf(pointer_name_arr, 1024, "%s", pointer_name); + } + fortprintf(fd, " %s%s %% isActive = .true.\n", spacing, pointer_name_arr); } if (!no_packages) { @@ -1456,6 +1494,7 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa char *string, *tofree, *token; char temp_str[1024]; char pointer_name[1024]; + char pointer_name_arr[1024]; char package_spacing[1024]; char default_value[1024]; char missing_value[1024]; @@ -1502,38 +1541,52 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa // Determine field type and default value. get_field_information(vartype, vardefaultval, default_value, varmissingval, missing_value, &type); + // If a default_value is not specified, but a missing_value is, then set the + // default_value (defaultValue) to missing_value + if(!vardefaultval && varmissingval) { + snprintf(default_value, 1024, "%s ! defaultValue taking specified missing_value", missing_value); + } + // Determine ndims, hasTime, and decomp type build_dimension_information(registry, var_xml, &ndims, &hasTime, &decomp); // Determine name of pointer for this field. - set_pointer_name(type, ndims, pointer_name); - - fortprintf(fd, " allocate(%s(%d))\n", pointer_name, time_levs); + set_pointer_name(type, ndims, pointer_name, time_levs); + if (time_levs > 1) { + fortprintf(fd, " allocate(%s(%d))\n", pointer_name, time_levs); + } else { + fortprintf(fd, " allocate(%s)\n", pointer_name); + } for(time_lev = 1; time_lev <= time_levs; time_lev++){ + if (time_levs > 1) { + snprintf(pointer_name_arr, 1024, "%s(%d)", pointer_name, time_lev); + } else { + snprintf(pointer_name_arr, 1024, "%s", pointer_name); + } fortprintf(fd, "\n"); fortprintf(fd, "! Setting up time level %d\n", time_lev); - fortprintf(fd, " %s(%d) %% fieldName = '%s'\n", pointer_name, time_lev, varname); - fortprintf(fd, " %s(%d) %% isVarArray = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% fieldName = '%s'\n", pointer_name_arr, varname); + fortprintf(fd, " %s %% isVarArray = .false.\n", pointer_name_arr); if (decomp != -1) { - fortprintf(fd, " %s(%d) %% isDecomposed = .true.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isDecomposed = .true.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% isDecomposed = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isDecomposed = .false.\n", pointer_name_arr); } if(hasTime) { - fortprintf(fd, " %s(%d) %% hasTimeDimension = .true.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% hasTimeDimension = .true.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% hasTimeDimension = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% hasTimeDimension = .false.\n", pointer_name_arr); } if(ndims > 0){ if(persistence == SCRATCH){ - fortprintf(fd, " %s(%d) %% isPersistent = .false.\n", pointer_name, time_lev); - fortprintf(fd, " %s(%d) %% isActive = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isPersistent = .false.\n", pointer_name_arr); + fortprintf(fd, " %s %% isActive = .false.\n", pointer_name_arr); } else { - fortprintf(fd, " %s(%d) %% isPersistent = .true.\n", pointer_name, time_lev); - fortprintf(fd, " %s(%d) %% isActive = .false.\n", pointer_name, time_lev); + fortprintf(fd, " %s %% isPersistent = .true.\n", pointer_name_arr); + fortprintf(fd, " %s %% isActive = .false.\n", pointer_name_arr); } // Setup dimensions @@ -1543,24 +1596,24 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa i = 1; token = strsep(&string, " "); if(strncmp(token, "Time", 1024) != 0){ - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); i++; } while( (token = strsep(&string, " ")) != NULL){ if(strncmp(token, "Time", 1024) != 0){ - fortprintf(fd, " %s(%d) %% dimNames(%d) = '%s'\n", pointer_name, time_lev, i, token); + fortprintf(fd, " %s %% dimNames(%d) = '%s'\n", pointer_name_arr, i, token); i++; } } free(tofree); } - fortprintf(fd, " %s(%d) %% defaultValue = %s\n", pointer_name, time_lev, default_value); + fortprintf(fd, " %s %% defaultValue = %s\n", pointer_name_arr, default_value); if ( ndims == 0 ) { - fortprintf(fd, " %s(%d) %% scalar = %s\n", pointer_name, time_lev, default_value); + fortprintf(fd, " %s %% scalar = %s\n", pointer_name_arr, default_value); } - fortprintf(fd, " allocate(%s(%d) %% attLists(1))\n", pointer_name, time_lev); - fortprintf(fd, " allocate(%s(%d) %% attLists(1) %% attList)\n", pointer_name, time_lev); + fortprintf(fd, " allocate(%s %% attLists(1))\n", pointer_name_arr); + fortprintf(fd, " allocate(%s %% attLists(1) %% attList)\n", pointer_name_arr); if ( varunits != NULL ) { string = strdup(varunits); @@ -1576,7 +1629,7 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa free(tofree); - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(1) %% attList, 'units', '%s')\n", pointer_name, time_lev, temp_str); + fortprintf(fd, " call mpas_add_att(%s %% attLists(1) %% attList, 'units', '%s')\n", pointer_name_arr, temp_str); } if ( vardesc != NULL ) { @@ -1593,17 +1646,15 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa free(tofree); - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(1) %% attList, 'long_name', '%s')\n", pointer_name, time_lev, temp_str); + fortprintf(fd, " call mpas_add_att(%s %% attLists(1) %% attList, 'long_name', '%s')\n", pointer_name_arr, temp_str); } if ( varmissingval != NULL ) { - fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(1) %% attList, 'missing_value', %s)\n", pointer_name, time_lev, missing_value); - // Uncomment to add _FillValue to match missing_value - // fortprintf(fd, " call mpas_add_att(%s(%d) %% attLists(1) %% attList, '_FillValue', %s)\n", pointer_name, time_lev, missing_value); + fortprintf(fd, " call mpas_add_att(%s %% attLists(1) %% attList, '_FillValue', %s)\n", pointer_name_arr, missing_value); } - fortprintf(fd, " %s(%d) %% missingValue = %s\n", pointer_name, time_lev, missing_value); + fortprintf(fd, " %s %% missingValue = %s\n", pointer_name_arr, missing_value); - fortprintf(fd, " %s(%d) %% block => block\n", pointer_name, time_lev); + fortprintf(fd, " %s %% block => block\n", pointer_name_arr); } @@ -1627,7 +1678,12 @@ int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVa } for(time_lev = 1; time_lev <= time_levs; time_lev++){ - fortprintf(fd, " %s%s(%d) %% isActive = .true.\n", package_spacing, pointer_name, time_lev); + if (time_levs > 1) { + snprintf(pointer_name_arr, 1024, "%s(%d)", pointer_name, time_lev); + } else { + snprintf(pointer_name_arr, 1024, "%s", pointer_name); + } + fortprintf(fd, " %s%s %% isActive = .true.\n", package_spacing, pointer_name_arr); } if(varpackages != NULL){ @@ -1800,211 +1856,6 @@ int determine_struct_depth(int curLevel, ezxml_t superStruct){/*{{{*/ }/*}}}*/ -int generate_struct_links(FILE *fd, int curLevel, ezxml_t superStruct, ezxml_t registry){/*{{{*/ - ezxml_t subStruct; - ezxml_t var_arr_xml, var_xml; - const char *structname; - const char *vartimelevs; - const char *varname, *vardims, *vartype; - const char *vardefaultval, *varmissingval; - const char *varname_in_code; - int depth; - int err; - int has_time; - int time_lev, time_levs; - int ndims, type; - int decomp; - char *string, *tofree, *token; - char pointer_name[1024]; - char default_value[1024]; - char missing_value[1024]; - - depth = curLevel + 1; - - for(subStruct = ezxml_child(superStruct, "var_struct"); subStruct; subStruct = subStruct->next){ - structname = ezxml_attr(subStruct, "name"); - fortprintf(fd, "! ----------- NEW STRUCT ---------\n"); - fortprintf(fd, "! Get pointers to pools for struct %s\n", structname); - fortprintf(fd, "! --------------------------------\n"); - if(curLevel == 0){ - fortprintf(fd, " call mpas_pool_get_subpool(currentBlock %% structs, '%s', poolLevel%d)\n", structname, curLevel+1); - fortprintf(fd, " if(associated(prevBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_subpool(prevBlock %% structs, '%s', prevPoolLevel%d)\n", structname, curLevel+1); - fortprintf(fd, " else\n"); - fortprintf(fd, " nullify(prevPoolLevel%d)\n", curLevel+1); - fortprintf(fd, " end if\n"); - fortprintf(fd, " if(associated(nextBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_subpool(nextBlock %% structs, '%s', nextPoolLevel%d)\n", structname, curLevel+1); - fortprintf(fd, " else\n"); - fortprintf(fd, " nullify(nextPoolLevel%d)\n", curLevel+1); - fortprintf(fd, " end if\n"); - } else { - fortprintf(fd, " call mpas_pool_get_subpool(poolLevel%d, '%s', poolLevel%d)\n", curLevel, structname, curLevel+1); - fortprintf(fd, " if(associated(prevBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_subpool(prevPoolLevel%d, '%s', prevPoolLevel%d)\n", curLevel, structname, curLevel+1); - fortprintf(fd, " else\n"); - fortprintf(fd, " nullify(prevPoolLevel%d)\n", curLevel+1); - fortprintf(fd, " end if\n"); - fortprintf(fd, " if(associated(nextBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_subpool(nextPoolLevel%d, '%s', nextPoolLevel%d)\n", curLevel, structname, curLevel+1); - fortprintf(fd, " else\n"); - fortprintf(fd, " nullify(nextPoolLevel%d)\n", curLevel+1); - fortprintf(fd, " end if\n"); - } - - fortprintf(fd, "\n"); - // Link var arrays - for(var_arr_xml = ezxml_child(subStruct, "var_array"); var_arr_xml; var_arr_xml = var_arr_xml->next){/*{{{*/ - varname = ezxml_attr(var_arr_xml, "name"); - vardims = ezxml_attr(var_arr_xml, "dimensions"); - vartimelevs = ezxml_attr(var_arr_xml, "time_levs"); - vartype = ezxml_attr(var_arr_xml, "type"); - vardefaultval = ezxml_attr(var_arr_xml, "default_value"); - varmissingval = ezxml_attr(var_arr_xml, "missing_value"); - - if(!vartimelevs){ - vartimelevs = ezxml_attr(subStruct, "time_levs"); - } - - if(vartimelevs){ - time_levs = atoi(vartimelevs); - if(time_levs < 1){ - time_levs = 1; - } - } else { - time_levs = 1; - } - - if(!varmissingval){ - varmissingval = vardefaultval; - } - - // Determine field type and default value. - get_field_information(vartype, vardefaultval, default_value, varmissingval, missing_value, &type); - - // Determine number of dimensions - // and decomp type - build_dimension_information(registry, var_arr_xml, &ndims, &has_time, &decomp); - ndims++; // Add a dimension for var_arrays - - // Using type and ndims, determine name of pointer for field. - set_pointer_name(type, ndims, pointer_name); - - for(time_lev = 1; time_lev <= time_levs; time_lev++){ - fortprintf(fd, "! Linking %s for time level %d\n", varname, time_lev); - fortprintf(fd, " call mpas_pool_get_field(poolLevel%d, '%s', %s, %d)\n", curLevel+1, varname, pointer_name, time_lev); - fortprintf(fd, " if(associated(%s)) then\n", pointer_name); - fortprintf(fd, "#ifdef MPAS_DEBUG\n"); - fortprintf(fd, " call mpas_log_write('Linking %s')\n", varname); - fortprintf(fd, "#endif\n"); - fortprintf(fd, " if(associated(prevBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_field(prevPoolLevel%d, '%s', %s %% prev, %d)\n", curLevel+1, varname, pointer_name, time_lev); - fortprintf(fd, " end if\n"); - fortprintf(fd, " if(associated(nextBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_field(nextPoolLevel%d, '%s', %s %% next, %d)\n", curLevel+1, varname, pointer_name, time_lev); - fortprintf(fd, " end if\n"); - - if(decomp == CELLS){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% cellsToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% cellsToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% cellsToCopy\n", pointer_name); - } else if(decomp == EDGES){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% edgesToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% edgesToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% edgesToCopy\n", pointer_name); - } else if(decomp == VERTICES){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% verticesToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% verticesToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% verticesToCopy\n", pointer_name); - } - - fortprintf(fd, " end if\n"); - } - - fortprintf(fd, "\n"); - }/*}}}*/ - - // Link independent vars - for(var_xml = ezxml_child(subStruct, "var"); var_xml; var_xml = var_xml->next){/*{{{*/ - varname = ezxml_attr(var_xml, "name"); - vardims = ezxml_attr(var_xml, "dimensions"); - vartimelevs = ezxml_attr(var_xml, "time_levs"); - vartype = ezxml_attr(var_xml, "type"); - vardefaultval = ezxml_attr(var_xml, "default_value"); - varmissingval = ezxml_attr(var_xml, "missing_value"); - varname_in_code = ezxml_attr(var_xml, "name_in_code"); - - if(!vartimelevs){ - vartimelevs = ezxml_attr(subStruct, "time_levs"); - } - - if(vartimelevs){ - time_levs = atoi(vartimelevs); - if(time_levs < 1){ - time_levs = 1; - } - } else { - time_levs = 1; - } - - if(!varname_in_code){ - varname_in_code = ezxml_attr(var_xml, "name"); - } - - if(!varmissingval){ - varmissingval = vardefaultval; - } - - // Determine field type and default value. - get_field_information(vartype, vardefaultval, default_value, varmissingval, missing_value, &type); - - // Determine number of dimensions - // and decomp type - build_dimension_information(registry, var_xml, &ndims, &has_time, &decomp); - - // Using type and ndims, determine name of pointer for field. - set_pointer_name(type, ndims, pointer_name); - - for(time_lev = 1; time_lev <= time_levs; time_lev++){ - fortprintf(fd, "! Linking %s for time level %d with name\n", varname, time_lev, varname_in_code); - fortprintf(fd, "#ifdef MPAS_DEBUG\n"); - fortprintf(fd, " call mpas_log_write('Linking %s with name %s')\n", varname, varname_in_code); - fortprintf(fd, "#endif\n"); - fortprintf(fd, " call mpas_pool_get_field(poolLevel%d, '%s', %s, %d)\n", curLevel+1, varname_in_code, pointer_name, time_lev); - fortprintf(fd, " if(associated(%s)) then\n", pointer_name); - fortprintf(fd, " if(associated(prevBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_field(prevPoolLevel%d, '%s', %s %% prev, %d)\n", curLevel+1, varname_in_code, pointer_name, time_lev); - fortprintf(fd, " end if\n"); - fortprintf(fd, " if(associated(nextBlock)) then\n"); - fortprintf(fd, " call mpas_pool_get_field(nextPoolLevel%d, '%s', %s %% next, %d)\n", curLevel+1, varname_in_code, pointer_name, time_lev); - fortprintf(fd, " end if\n"); - - if(decomp == CELLS){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% cellsToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% cellsToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% cellsToCopy\n", pointer_name); - } else if(decomp == EDGES){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% edgesToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% edgesToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% edgesToCopy\n", pointer_name); - } else if(decomp == VERTICES){ - fortprintf(fd, " %s %% sendList => currentBlock %% parinfo %% verticesToSend\n", pointer_name); - fortprintf(fd, " %s %% recvList => currentBlock %% parinfo %% verticesToRecv\n", pointer_name); - fortprintf(fd, " %s %% copyList => currentBlock %% parinfo %% verticesToCopy\n", pointer_name); - } - fortprintf(fd, " end if\n"); - - fortprintf(fd, "\n"); - } - }/*}}}*/ - - err = generate_struct_links(fd, curLevel+1, subStruct, registry); - } - - return 0; -}/*}}}*/ - - int generate_immutable_struct_contents(FILE *fd, const char *streamname, ezxml_t varstruct_xml){/*{{{*/ ezxml_t var_xml, vararr_xml, substruct_xml; diff --git a/src/tools/registry/gen_inc.h b/src/tools/registry/gen_inc.h index 0859823fb9..96db3de8b3 100644 --- a/src/tools/registry/gen_inc.h +++ b/src/tools/registry/gen_inc.h @@ -10,9 +10,8 @@ #include "ezxml.h" void write_model_variables(ezxml_t registry); -int write_field_pointers(FILE* fd); int write_field_pointer_arrays(FILE* fd); -int set_pointer_name(int type, int ndims, char *pointer_name); +int set_pointer_name(int type, int ndims, char *pointer_name, int time_levs); int add_package_to_list(const char * package, const char * package_list); int build_struct_package_lists(ezxml_t currentPosition, char * out_packages); int get_dimension_information(ezxml_t registry, const char *test_dimname, int *has_time, int *decomp); @@ -27,7 +26,6 @@ int parse_var_array(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t var int parse_var(FILE *fd, ezxml_t registry, ezxml_t superStruct, ezxml_t currentVar, const char * corename); int parse_struct(FILE *fd, ezxml_t registry, ezxml_t superStruct, int subpool, const char *parentname, const char * corename); int determine_struct_depth(int curLevel, ezxml_t superStruct); -int generate_struct_links(FILE *fd, int curLevel, ezxml_t superStruct, ezxml_t registry); int generate_field_exchanges(FILE *fd, int curLevel, ezxml_t superStruct); int generate_field_halo_exchanges_and_copies(ezxml_t registry); int generate_field_inputs(FILE *fd, int curLevel, ezxml_t superStruct); diff --git a/testing_and_setup/atmosphere/setup_atm_run_dir b/testing_and_setup/atmosphere/setup_atm_run_dir new file mode 100755 index 0000000000..b0734c2f5f --- /dev/null +++ b/testing_and_setup/atmosphere/setup_atm_run_dir @@ -0,0 +1,174 @@ +#! /bin/sh + +# Setup a run directory for the MPAS init_atmosphere, and atmosphere cores. + +###################################################################### +# usage() - Display the usage message +###################################################################### +usage() +{ + printf "Usage: setup_atm_run_dir setup-dir\n" +} + +###################################################################### +# init_atmosphere_setup() +# +# $1 - Run directory +# $2 - MPAS-Model Source Code Directory +# Setup the init_atmosphere in the run directory ($1) by linking the +# init_atmosphere exetuable and copying the init_atmosphere namelist and +# streams from the default_inputs/ directory. +# +# On error, the program will exit and will return 1, otherwise, 0 will be +# returned. +# +###################################################################### +init_atmosphere_setup() +{ + printf "Setting up the init_atmosphere core ...\n" + rundir=$1 + mpasdir=$2 + + # See if the init_amtosphere_model is compiled + if ! [ -f "${mpasdir}/init_atmosphere_model" ]; then + printf "The MPAS directory does not appear to have the init_atmosphere core compiled!\n" + return 1 + fi + + ln -s "${mpasdir}/init_atmosphere_model" $rundir + if [ $? -ne 0 ]; then + printf "Failed to link 'init_atmosphere_model' from %s\n" $mpasdir + return 1 + fi + + cp "${mpasdir}/default_inputs/namelist.init_atmosphere" $rundir + if [ $? -ne 0 ]; then + printf "Failed to copy 'namelist.init_atmosphere' from %s\n" "${mpasdir}/default_inputs" + return 1 + fi + + cp "${mpasdir}/default_inputs/streams.init_atmosphere" $rundir + if [ $? -ne 0 ]; then + printf "Failed to copy 'streams.init_atmosphere' from %s\n" "${mpasdir}/default_inputs" + return 1 + fi + + printf "Succesfully setup the run directory for the init_atmosphere model\n" + return 0 +} + +###################################################################### +# atmosphere_setup() +# +# $1 - Run directory +# $2 - MPAS-Model Source Code Directory +# Setup the atmosphere core in the run directory ($1) by linking the +# atmosphere_model exeutable, physics lookup tables, and by copying +# the needed namelist, streams, and stream_lists from the default_inputs/ +# directory. +# +# On error, this function 1 will be returned, otherwise, 0 will be +# returned. +# +###################################################################### +atmosphere_setup() +{ + printf "Setting up the atmosphere core ...\n" + rundir=$1 + mpasdir=$2 + + # See if the amtosphere core is compiled + if ! [ -f "${mpasdir}/atmosphere_model" ]; then + printf "The MPAS directory does not appear to have the atmosphere core compiled!\n" + return 1 + fi + + ln -s "${mpasdir}/atmosphere_model" $rundir + if [ $? -ne 0 ]; then + printf "Failed to link 'atmosphere_model' from %s\n" $mpasdir + return 1 + fi + + cp "${mpasdir}/default_inputs/namelist.atmosphere" $rundir + if [ $? -ne 0 ]; then + printf "Failed to copy 'namelist.atmosphere' from %s\n" "${mpasdir}/default_inputs" + return 1 + fi + + cp "${mpasdir}/default_inputs/streams.atmosphere" $rundir + if [ $? -ne 0 ]; then + printf "Failed to copy 'streams.atmosphere' from %s\n" "${mpasdir}/default_inputs" + return 1 + fi + + cp ${mpasdir}/default_inputs/stream_list.atmosphere.* $rundir + if [ $? -ne 0 ]; then + printf "Failed to copy 'stream_list.atmosphere.*' from %s\n" "${mpasdir}/default_inputs" + return 1 + fi + + ln -s ${mpasdir}/src/core_atmosphere/physics/physics_wrf/files/* $rundir + if [ $? -ne 0 ]; then + printf "Failed to link physics files from %s\n" "${mpasdir}/src_core_atmosphere_physics/physics_wrf/files" + return 1 + fi + + printf "Succesfully setup the run directory for the atmosphere model\n" + return 0 +} + + +######################################################## +# +# setup_run_atm_run_dir.sh +# +# \brief Copy and link the needed files for running the init_atmosphere, and +# atmosphere core. +# \details +# Given a directory, copy or link all the needed executables, namelist, +# streams, stream_lists and physics lookup tables needed for both the +# init_atmosphere and atmosphere core. +# +# Currently, this script will need to be in the +# testing_and_setup/atmosphere directory of the MPAS-Model repository that is +# desired to be setup. If either the init_atmosphere or atmosphere core is +# compiled in the MPAS-Model directory, then it will be copied into the run +# directory. If a core is not compiled, it will not be copied. +# +######################################################## + +if [ $# -ne 1 ]; then + printf "Please provide a directory to setup a MPAS run\n" + usage + exit 1 +fi + +rundir=$1 + +if ! [ -d $rundir ]; then + printf "The given directory does not appear to be a directory\n" + exit 1 +fi + +# Find the location of this script, which will be used to find the the needed +# MPAS files. Note: $0 may fail here with some shells and some uncommon +# executions, see: http://mywiki.wooledge.org/BashFAQ/028 +cwd=`pwd` +cd `dirname $0` +this_script=`pwd` +cd $cwd +mpasdir=`dirname "$this_script"` +mpasdir=`dirname "$mpasdir"` + +# See if this is an MPAS directory (Check for src/core_atmosphere, +# and src/core_init_atmosphere) +if ! [ -d "${mpasdir}/src/core_atmosphere" ] || ! [ -d "${mpasdir}/src/core_init_atmosphere" ]; then + printf "ERROR: Can't seem to locate MPAS-Model directory!\n" + printf "ERROR: Please ensure that this script is in the testing_and_setup/atmosphere directory of\n" + printf "ERROR: the MPAS-Model you want to setup\n" + exit 1 +fi + +init_atmosphere_setup $rundir $mpasdir + +atmosphere_setup $rundir $mpasdir diff --git a/testing_and_setup/compass/manage_regression_suite.py b/testing_and_setup/compass/manage_regression_suite.py index a8ef17551a..a64e708305 100755 --- a/testing_and_setup/compass/manage_regression_suite.py +++ b/testing_and_setup/compass/manage_regression_suite.py @@ -147,7 +147,7 @@ def process_test_setup(test_tag, config_file, work_dir, model_runtime, # }}} -def process_test_clean(test_tag, work_dir, suite_script): # {{{ +def process_test_clean(test_tag, work_dir): # {{{ dev_null = open('/dev/null', 'a') # Process test attributes @@ -304,15 +304,13 @@ def clean_suite(suite_tag, work_dir): # {{{ for child in suite_tag: # Process <test> children within the <regression_suite> if child.tag == 'test': - process_test_clean(child, work_dir, regression_script) + process_test_clean(child, work_dir) # }}} -def summarize_suite(suite_tag): # {{{ +def get_test_case_procs(suite_tag): # {{{ - max_procs = 1 - max_threads = 1 - max_cores = 1 + testcases = {} for child in suite_tag: if child.tag == 'test': @@ -367,9 +365,36 @@ def summarize_suite(suite_tag): # {{{ name = case.attrib['name'] cases.append(name) + prereqs = list() + for config_prereq in config_root.iter('prerequisite'): + prereq = dict() + for tag in ['core', 'configuration', 'resolution', 'test']: + prereq[tag] = config_prereq.attrib[tag] + + # Make sure the prerequisite is already in the test suite + found = False + for other_name, other_test in testcases.items(): + match = [prereq[tag] == other_test[tag] for tag in + ['core', 'configuration', 'resolution', 'test']] + if all(match): + found = True + prereq['name'] = other_name + break + + if not found: + raise ValueError( + 'Prerequisite of {} does not precede it in the test ' + 'suite: {} {} {} {}'.format( + test_name, prereq['core'], prereq['configuration'], + prereq['resolution'], prereq['test'])) + + prereqs.append(prereq) + del config_root del config_tree + procs = 1 + threads = 1 # Loop over all files in test_path that have the .xml extension. for file in os.listdir('{}'.format(test_path)): if fnmatch.fnmatch(file, '*.xml'): @@ -395,19 +420,33 @@ def summarize_suite(suite_tag): # {{{ except (KeyError, ValueError): threads = 1 - cores = threads * procs + del config_root + del config_tree + testcases[test_name] = {'core': test_core, + 'configuration': test_configuration, + 'resolution': test_resolution, + 'test': test_test, + 'path': test_path, + 'procs': procs, + 'threads': threads, + 'prereqs': prereqs} - if procs > max_procs: - max_procs = procs + return testcases # }}} - if threads > max_threads: - max_threads = threads - if cores > max_cores: - max_cores = cores +def summarize_suite(testcases): # {{{ - del config_root - del config_tree + max_procs = 1 + max_threads = 1 + max_cores = 1 + for name in testcases: + procs = testcases[name]['procs'] + threads = testcases[name]['threads'] + cores = threads * procs + + max_procs = max(max_procs, procs) + max_threads = max(max_threads, threads) + max_cores = max(max_cores, cores) print("\n") print(" Summary of test cases:") @@ -417,7 +456,7 @@ def summarize_suite(suite_tag): # {{{ # }}} -if __name__ == "__main__": +def main (): # {{{ # Define and process input arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) @@ -495,14 +534,15 @@ def summarize_suite(suite_tag): # {{{ if args.setup: print("\n") print("Setting Up Test Cases:") + testcases = get_test_case_procs(suite_root) setup_suite(suite_root, args.work_dir, args.model_runtime, args.config_file, args.baseline_dir, args.verbose) - summarize_suite(suite_root) + summarize_suite(testcases) if args.verbose: cmd = ['cat', args.work_dir + '/manage_regression_suite.py.out'] print('\nCase setup output:') - print(subprocess.check_output(cmd)) + print(subprocess.check_output(cmd).decode('utf-8')) write_history = True # Write the history of this command to the command_history file, for @@ -533,5 +573,10 @@ def summarize_suite(suite_tag): # {{{ history_file.write('**************************************************' '*********************\n') history_file.close() +# }}} + + +if __name__ == "__main__": + main() # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/testing_and_setup/compass/setup_testcase.py b/testing_and_setup/compass/setup_testcase.py index d0ed603ed3..ed3538d707 100755 --- a/testing_and_setup/compass/setup_testcase.py +++ b/testing_and_setup/compass/setup_testcase.py @@ -25,6 +25,8 @@ from six.moves import configparser import textwrap import netCDF4 +import shutil +import errno try: from collections import defaultdict @@ -516,6 +518,8 @@ def generate_driver_scripts(config_file, configs): # {{{ if not os.path.exists(init_path): os.makedirs(init_path) + link_load_compass_env(init_path, configs) + # Create script file script = open('{}/{}'.format(init_path, name), 'w') @@ -1176,61 +1180,7 @@ def add_links(config_file, configs): # {{{ for child in config_root: # Process an <add_link> tag if child.tag == 'add_link': - try: - source = child.attrib['source'] - except KeyError: - print(" add_link tag missing a 'source' attribute.") - print(" Exiting...") - sys.exit(1) - - try: - source_path_name = child.attrib['source_path'] - - keyword_path = False - if source_path_name.find('work_') >= 0: - keyword_path = True - elif source_path_name.find('script_') >= 0: - keyword_path = True - - if not keyword_path: - if configs.has_option('paths', source_path_name): - source_path = configs.get('paths', source_path_name) - else: - source_path = 'NONE' - - if source_path == 'NONE': - if configs.has_option('script_paths', - source_path_name): - source_path = configs.get('script_paths', - source_path_name) - else: - source_path = 'NONE' - - if source_path == 'NONE': - print("ERROR: source_path on <add_link> tag is '{}' " - "which is not defined".format(source_path_name)) - print("Exiting...") - sys.exit(1) - - else: - source_arr = source_path_name.split('_') - base_name = source_arr[0] - subname = '{}_{}'.format(source_arr[1], source_arr[2]) - - if base_name == 'work': - file_base_path = 'work_dir' - elif base_name == 'script': - file_base_path = 'script_path' - - if subname in {'core_dir', 'configuration_dir', - 'resolution_dir', 'test_dir', 'case_dir'}: - source_path = '{}/{}'.format( - configs.get('script_paths', file_base_path), - configs.get('script_paths', subname)) - - source_file = '{}/{}'.format(source_path, source) - except KeyError: - source_file = '{}'.format(source) + source_file = get_source_file(child, configs) dest = child.attrib['dest'] old_cwd = os.getcwd() @@ -1240,32 +1190,95 @@ def add_links(config_file, configs): # {{{ '{}'.format(dest)], stdout=dev_null, stderr=dev_null) os.chdir(old_cwd) - del source - del dest # Process an <add_executable> tag elif child.tag == 'add_executable': source_attr = child.attrib['source'] dest = child.attrib['dest'] if not configs.has_option("executables", source_attr): - print('ERROR: Configuration {} requires a definition of ' + raise ValueError('Configuration {} requires a definition of ' '{}.'.format(config_file, source_attr)) - sys.exit(1) - else: - source = configs.get("executables", source_attr) + source = configs.get("executables", source_attr) subprocess.check_call(['ln', '-sf', '{}'.format(source), '{}/{}'.format(base_path, dest)], stdout=dev_null, stderr=dev_null) - del source_attr - del source - del dest - del config_tree - del config_root dev_null.close() # }}} +def get_source_file(child, config): # {{{ + try: + source = child.attrib['source'] + except KeyError: + raise KeyError("{} tag missing a 'source' attribute.".format( + child.tag)) + + try: + source_path_name = child.attrib['source_path'] + except KeyError: + return source + + keyword_path = any(substring in source_path_name for + substring in ['work_', 'script_']) + + if keyword_path: + source_arr = source_path_name.split('_') + base_name = source_arr[0] + if base_name == 'work': + file_base_path = 'work_dir' + elif base_name == 'script': + file_base_path = 'script_path' + else: + raise ValueError('Unexpected source prefix {} in {} tag'.format( + base_name, child.tag)) + + subname = '{}_{}'.format(source_arr[1], source_arr[2]) + if subname not in ['core_dir', 'configuration_dir', + 'resolution_dir', 'test_dir', 'case_dir']: + raise ValueError('Unexpected source suffix {} in {} tag'.format( + subname, child.tag)) + + source_path = '{}/{}'.format( + config.get('script_paths', file_base_path), + config.get('script_paths', subname)) + else: + if config.has_option('paths', source_path_name): + source_path = config.get('paths', source_path_name) + else: + if not config.has_option('script_paths', source_path_name): + raise ValueError('Undefined source_path on {} tag: {}'.format( + child.tag, source_path_name)) + source_path = config.get('script_paths', source_path_name) + + source_file = '{}/{}'.format(source_path, source) + return source_file +# }}} + + +def copy_files(config_file, config): # {{{ + config_tree = ET.parse(config_file) + config_root = config_tree.getroot() + + case = config_root.attrib['case'] + + # Determine the path for the case directory + test_path = '{}/{}'.format(config.get('script_paths', 'test_dir'), case) + base_path = '{}/{}'.format(config.get('script_paths', 'work_dir'), + test_path) + + # Process all children tags + for child in config_root: + # Process an <copy_file> tag + if child.tag == 'copy_file': + source = get_source_file(child, config) + + dest = '{}/{}'.format(base_path, child.attrib['dest']) + + shutil.copy(source, dest) +# }}} + + def make_case_dir(config_file, base_path): # {{{ config_tree = ET.parse(config_file) config_root = config_tree.getroot() @@ -1539,6 +1552,24 @@ def get_case_name(config_file): # {{{ return name # }}} + +def link_load_compass_env(init_path, configs): # {{{ + + if configs.getboolean('conda', 'link_load_compass'): + target = '{}/{}/load_compass_env.sh'.format( + configs.get('script_paths', 'script_path'), + configs.get('script_paths', 'core_dir')) + + link_name = '{}/load_compass_env.sh'.format(init_path) + try: + os.symlink(target, link_name) + except OSError as e: + if e.errno == errno.EEXIST: + os.remove(link_name) + os.symlink(target, link_name) + else: + raise e +# }}} # }}} @@ -1580,6 +1611,10 @@ def get_case_name(config_file): # {{{ help="If set, script will create case directories in " "work_dir rather than the current directory.", metavar="PATH") + parser.add_argument("--link_load_compass", dest="link_load_compass", + action="store_true", + help="If set, a link to <core>/load_compass_env.sh is " + "included with each test case") args = parser.parse_args() @@ -1668,6 +1703,15 @@ def get_case_name(config_file): # {{{ config.set('script_input_arguments', 'model_runtime', args.model_runtime) + if not config.has_section('conda'): + config.add_section('conda') + + if not config.has_option('conda', 'link_load_compass'): + config.set('conda', 'link_load_compass', 'False') + + if args.link_load_compass: + config.set('conda', 'link_load_compass', 'True') + # Build variables for history output old_dir = os.getcwd() os.chdir(config.get('script_paths', 'script_path')) @@ -1765,6 +1809,8 @@ def get_case_name(config_file): # {{{ # Process all links for this case add_links(config_file, config) + copy_files(config_file, config) + # Generate run scripts for this case. generate_run_scripts(config_file, '{}'.format(case_path), config)