diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 4548c2e25..000000000 --- a/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -# -# .coveragerc to control coverage.py -# - -[run] -branch = True -omit = - setup.py - iris_grib/tests/* - .eggs/* - - -[report] -exclude_lines = - pragma: no cover - def __repr__ - if __name__ == .__main__.: diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 2a930a038..000000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -exclude = - # Auto-generated file - iris_grib/_grib_cf_map.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..773fd4644 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,19 @@ +# Reference: +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot +# - https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#groups + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" + groups: + dependencies: + patterns: + - "*" + labels: + - "New: Pull Request" + - "Bot" diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml new file mode 100644 index 000000000..2c9e24673 --- /dev/null +++ b/.github/workflows/ci-manifest.yml @@ -0,0 +1,23 @@ +name: ci-manifest + +on: + pull_request: + branches: + - "*" + + push: + branches-ignore: + - "auto-update-lockfiles" + - "pre-commit-ci-update-config" + - "dependabot/*" + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + manifest: + name: "check-manifest" + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.04.2 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index e4a6c80dd..7f356b4d4 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -10,5 +10,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.03.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.04.2 secrets: inherit diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5032e66dd..9786d478c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,15 +29,14 @@ repos: # Don't commit to main branch. - id: no-commit-to-branch -# TODO: pending the addition of ruff, including config file(s) - iris-grib#384 -#- repo: https://github.com/astral-sh/ruff-pre-commit -# rev: "v0.3.4" -# hooks: -# - id: ruff -# types: [file, python] -# args: [--fix, --show-fixes] -# - id: ruff-format -# types: [file, python] +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.3.4" + hooks: + - id: ruff + types: [file, python] + args: [--fix, --show-fixes] + - id: ruff-format + types: [file, python] - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" @@ -46,12 +45,6 @@ repos: types_or: [asciidoc, python, markdown, rst] additional_dependencies: [tomli] -- repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 - hooks: - - id: flake8 - types: [file, python] - - repo: https://github.com/asottile/blacken-docs rev: 1.16.0 hooks: diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 000000000..a976ca9f3 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,207 @@ +extend = "pyproject.toml" + +lint.ignore = [ + # pydocstyle (D) + # https://docs.astral.sh/ruff/rules/#pydocstyle-d + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D105", # Missing docstring in magic method + "D200", # One-line docstring should fit on one line + "D202", # No blank lines allowed after function docstring + "D205", # 1 blank line required between summary line and description + "D212", # Multi-line docstring summary should start at the second line + "D300", # Use triple double quotes """ + "D301", # Use r""" if any backslashes in a docstring + "D400", # First line should end with a period + "D401", # First line of docstring should be in imperative mood + "D406", # Section name should end with a newline + "D407", # Missing dashed underline after section + + # isort (I) + # https://docs.astral.sh/ruff/rules/#isort-i + "I001", # Import block is un-sorted or un-formatted + + # Numpy-specific rules (NPY) + # https://docs.astral.sh/ruff/rules/#numpy-specific-rules-npy + "NPY002", + + # flake8-builtins (A) + # https://docs.astral.sh/ruff/rules/#flake8-builtins-a + "A001", + "A002", + + # flake8-annotations (ANN) + "ANN001", + "ANN002", + "ANN003", + "ANN101", + "ANN102", + "ANN201", + "ANN202", + "ANN204", + "ANN205", + "ANN206", + + # flake8-unused-arguments (ARG) + # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg + "ARG001", + "ARG002", + "ARG005", + + # flake8-bugbear (B) + # https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "B007", + "B018", + "B028", + "B904", + + # flake8-blind-except (BLE) + # https://docs.astral.sh/ruff/rules/#flake8-blind-except-ble + "BLE001", + + # flake8-comprehensions (C4) + # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "C408", + "C901", + + # flake8-datetimez (DTZ) + # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz + "DTZ001", + "DTZ006", + + # flake8-errmsg (EM) + "EM101", + "EM102", + "EM103", + + # eradicate (ERA) + # https://docs.astral.sh/ruff/rules/#eradicate-era + "ERA001", + + # flake8-boolean-trap (FBT) + # https://docs.astral.sh/ruff/rules/#flake8-boolean-trap-fbt + "FBT001", + "FBT002", + "FBT003", + + # flake8-fixme (FIX) + # https://docs.astral.sh/ruff/rules/#flake8-fixme-fix + "FIX002", + "FIX003", + + # pep8-naming (N) + # https://docs.astral.sh/ruff/rules/#pep8-naming-n + "N801", + "N802", + "N803", + "N806", + "N999", + + # Perflint (PERF) + # https://docs.astral.sh/ruff/rules/#perflint-perf + "PERF203", + "PERF401", + + # Refactor (R) + # https://docs.astral.sh/ruff/rules/#refactor-r + "PLR0402", + "PLR0912", + "PLR0913", + "PLR0915", + "PLR1714", + "PLR1722", + "PLR2004", + "PLR5501", + + # Warning (W) + # https://docs.astral.sh/ruff/rules/#warning-w + "PLW0602", + "PLW2901", + + # flake8-pytest-style (PT) + "PT009", + "PT027", + + # flake8-use-pathlib (PTH) + # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth + "PTH100", + "PTH107", + "PTH110", + "PTH111", + "PTH112", + "PTH113", + "PTH118", + "PTH120", + "PTH122", + "PTH123", + + # flake8-pyi (PYI) + # https://docs.astral.sh/ruff/rules/#flake8-pyi-pyi + "PYI024", + + # flake8-return (RET) + # https://docs.astral.sh/ruff/rules/#flake8-return-ret + "RET503", + "RET504", + "RET505", + "RET506", + + # flake8-raise (RSE) + # https://docs.astral.sh/ruff/rules/#flake8-raise-rse + "RSE102", + + # Ruff-specific rules (RUF) + # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf + "RUF005", + "RUF012", + "RUF015", + + # flake8-bandit (S) + # https://docs.astral.sh/ruff/rules/#flake8-bandit-s + "S101", + "S110", + "S603", + "S607", + + # flake8-simplify (SIM) + # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "SIM102", + "SIM108", + "SIM115", + "SIM117", + "SIM118", + + # flake8-self (SLF) + # https://docs.astral.sh/ruff/rules/#flake8-self-slf + "SLF001", + + # flake8-print (T20) + # https://docs.astral.sh/ruff/rules/#flake8-print-t20 + "T201", + + # flake8-todos (TD) + # https://docs.astral.sh/ruff/rules/#flake8-todos-td + "TD001", + "TD002", + "TD003", + "TD004", + "TD005", + "TD006", + + # tryceratops (TRY) + # https://docs.astral.sh/ruff/rules/#tryceratops-try + "TRY003", + "TRY004", + "TRY301", + + # pyupgrade (UP) + # https://docs.astral.sh/ruff/rules/#pyupgrade-up + "UP008", + "UP009", + "UP018", + "UP027", + "UP031", + "UP032", +] \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..f0382ab23 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,15 @@ +prune .github +prune .nox +prune docs +prune requirements +recursive-include requirements *.txt +recursive-include iris_grib *.cml *.grib2 *.json *.nc *.py + +exclude .coveragerc +exclude .flake8 +exclude .gitignore +exclude .pre-commit-config.yaml +exclude .readthedocs.yml +exclude .ruff.toml +include *.md +include *.py diff --git a/docs/conf.py b/docs/conf.py index eaad2e0c8..d70991dbb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,7 +19,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('..')) +sys.path.append(os.path.abspath("..")) # -- General configuration ------------------------------------------------ @@ -30,29 +30,29 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'iris-grib' -copyright = u'2022, Met Office' -author = u'Met Office' +project = "iris-grib" +copyright = "2022, Met Office" +author = "Met Office" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -76,7 +76,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -94,7 +94,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -116,15 +116,15 @@ # -- Autodoc ------------------------------------------------------------------ -autodoc_member_order = 'groupwise' -autodoc_default_flags = ['show-inheritance'] +autodoc_member_order = "groupwise" +autodoc_default_flags = ["show-inheritance"] # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -218,20 +218,17 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'iris-gribdoc' +htmlhelp_basename = "iris-gribdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', - # Latex figure (float) alignment # 'figure_align': 'htbp', } @@ -240,8 +237,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'iris-grib.tex', u'iris-grib Documentation', - u'Met Office', 'manual'), + (master_doc, "iris-grib.tex", "iris-grib Documentation", "Met Office", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -269,10 +265,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'iris-grib', u'iris-grib Documentation', - [author], 1) -] +man_pages = [(master_doc, "iris-grib", "iris-grib Documentation", [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -284,9 +277,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'iris-grib', u'iris-grib Documentation', - author, 'iris-grib', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "iris-grib", + "iris-grib Documentation", + author, + "iris-grib", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. @@ -304,6 +303,6 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('https://docs.python.org/', None), - 'iris': ('https://scitools-iris.readthedocs.io/en/latest/', None), + "python": ("https://docs.python.org/", None), + "iris": ("https://scitools-iris.readthedocs.io/en/stable/", None), } diff --git a/docs/index.rst b/docs/index.rst index a0914cdc9..d021c57d5 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,92 +3,210 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Iris-grib v0.19 -=============== +Iris-grib v0.20 (unreleased) +============================ The library ``iris-grib`` provides functionality for converting between weather and -climate datasets that are stored as GRIB files and :class:`Iris cubes `. +climate datasets that are stored as GRIB files and Iris :class:`~iris.cube.Cube`\s. GRIB files can be loaded as Iris cubes using ``iris-grib`` so that you can use Iris -for analysing and visualising the contents of the GRIB files. Iris cubes can be saved to -GRIB files using ``iris-grib``. +for analysing and visualising the contents of the GRIB files. Iris cubes can also be +saved to GRIB edition-2 files using ``iris-grib``. + + +Simple GRIB Loading and Saving with Iris +---------------------------------------- +You can use the functionality provided by ``iris-grib`` directly within Iris +without having to explicitly import ``iris-grib``, as long as you have both Iris +and ``iris-grib`` installed in your Python environment. + +**This is the preferred route if no special control is required.** + +.. testsetup:: + + import iris + import iris_grib + import warnings + warnings.simplefilter('ignore') + cube = iris.load_cube(iris.sample_data_path("rotated_pole.nc")) + iris.save(cube, 'testfile.grib', saver='grib2') + +For example, to load GRIB data : + + >>> cube = iris.load_cube('testfile.grib') + +Similarly, you can save cubes to a GRIB file directly from Iris : + + >>> iris.save(cube, 'my_file.grib2') + +.. note:: + As the filename suggests, **only saving to GRIB2 is currently supported**. + + +Phenomenon translation +---------------------- +``iris-grib`` attempts to translate between CF phenomenon identities +(i.e. 'standard_name' and possibly 'long_name' attributes), and GRIB parameter codes, +when converting cubes to or from the GRIB format. + +A set of tables define known CF translations for GRIB1 and GRIB2 parameters, and can be +interrogated with the functions in :mod:`iris_grib.grib_phenom_translation`. + + +Parameter loading record +^^^^^^^^^^^^^^^^^^^^^^^^ +All cubes loaded from GRIB have a ``GRIB_PARAM`` attribute, which records the parameter +encodings present in the original file message. + +Examples : + +* ``"GRIB2:d000c003n005"`` represents GRIB2, discipline=0 ("Meteorological products"), + category=3 ("Mass") and indicatorOfParameter=5 ("Geopotential height (gpm)"). -The contents of ``iris-grib`` represent the former grib loading and saving capabilities -of :mod:`Iris ` itself. These capabilities have been separated into a discrete library -so that Iris becomes less monolithic as a library. + * This translates to a standard_name and units of "geopotential_height / m" +* ``"GRIB1:t002c007n033"`` is GRIB1 with table2Version=2, centre=7 + ("US National Weather Service - NCEP (WMC)"), and indicatorOfParameter=33 + ("U-component of wind m s**-1"). -Loading -------- + * This translates to a standard_name and units of "x_wind / m s-1". -To use ``iris-grib`` to load existing GRIB files we can make use of the -:func:`~iris_grib.load_cubes` function:: +Parameter saving control +^^^^^^^^^^^^^^^^^^^^^^^^ +When a cube has a ``GRIB_PARAM`` attribute, as described above, this controls what the +relevant message keys are set to on saving. +(N.B. at present applies only to GRIB2, since we don't support GRIB1 saving) - >>> import os - >>> import iris_sample_data - >>> import iris_grib - >>> cubes = iris_grib.load_cubes(os.path.join(iris_sample_data.path, - 'polar_stereo.grib2')) - >>> print cubes - + +Iris-grib Load and Save API +--------------------------- +In addition to direct load and save with Iris, as described above, +it is also possible to load and save GRIB data using iris-grib functions. + +Loading and saving Cubes +^^^^^^^^^^^^^^^^^^^^^^^^ +Load +~~~~ +To load from a GRIB file with ``iris-grib``, you can call the +:func:`~iris_grib.load_cubes` function : + + >>> cubes_iter = iris_grib.load_cubes('testfile.grib') + >>> print(cubes_iter) + As we can see, this returns a generator object. The generator object may be iterated over to access all the Iris cubes loaded from the GRIB file, or converted directly to a list:: - >>> cubes = list(cubes) - >>> print cubes + >>> cubes = list(cubes_iter) + >>> print(cubes) [] -.. note:: - There is no functionality in iris-grib that directly replicates - ``iris.load_cube`` (that is, load a single cube directly rather than returning - a length-one `CubeList`. Instead you could use the following, assuming that the - GRIB file you have loaded contains data that can be loaded to a single cube:: +In effect, this is the same as using ``iris.load_raw(...)``. +So, in most cases, **that is preferable.** - >>> cube, = list(cubes) - >>> print cube - air_temperature / (K) (projection_y_coordinate: 200; projection_x_coordinate: 247) - Dimension coordinates: - projection_y_coordinate x - - projection_x_coordinate - x - Scalar coordinates: - forecast_period: 6 hours - forecast_reference_time: 2013-05-20 00:00:00 - pressure: 101500.0 Pa - time: 2013-05-20 06:00:00 +Save +~~~~ +To use ``iris-grib`` to save Iris cubes to a GRIB file we can make use of the +:func:`~iris_grib.save_grib2` function : - This makes use of an idiom known as variable unpacking. + >>> iris_grib.save_grib2(cube, 'my_file.grib2') +In effect, this is the same as using ``iris.save(cube, ...)``. +So, in most cases, **that is preferable.** -Saving ------- -To use ``iris-grib`` to save Iris cubes to a GRIB file we can make use of the -:func:`~iris_grib.save_grib2` function:: +Working with GRIB messages +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Iris-grib also provides lower-level functions which allow the user to inspect and +adjust actual GRIB encoding details, for precise custom control of loading and saving. + +These functions use intermediate objects which represent individual GRIB file +"messages", with all the GRIB metadata. + +For example: - >>> iris_grib.save_grib2(my_cube, 'my_file.grib2') +* correct loading of some messages with incorrectly encoded parameter number +* save messages with adjusted parameter encodings +* load messages with an unsupported parameter definition template : adjust them to + mimic a similar type which *is* supported by cube translation, and post-modify the + resulting cubes to correct the Iris metadata + +You can load and save messages to and from files, and convert them to and from Cubes. .. note:: - As the function name suggests, only saving to GRIB2 is supported. + at present this only works with GRIB2 data. +.. note:: + Messages are not represented in the same way for loading and saving : the messages + generated by loading *from* files are represented by + :class:`iris_grib.message.GribMessage` objects, whereas messages generated from + cubes, for saving *to* files, are represented as message handles from the + `Python eccodes library `_ . -Interconnectivity with Iris ---------------------------- +Load +~~~~ +The key functions are :func:`~iris_grib.load_pairs_from_fields` and +:func:`~iris_grib.message.GribMessage.messages_from_filename`. +See those for more detail. -You can use the functionality provided by ``iris-grib`` directly within Iris -without having to explicitly import ``iris-grib``, as long as you have both Iris -and ``iris-grib`` available to your Python interpreter. +You can load data to 'messages', and filter or modify them to enable or correct +how Iris converts them to 'raw' cubes (i.e. individual 2-dimensional fields). + +For example: + + >>> from iris_grib.message import GribMessage + >>> fields_iter = GribMessage.messages_from_filename('testfile.grib') + >>> # select only wanted data + >>> selected_fields = [ + ... field + ... for field in fields_iter + ... if field.sections[4]['parameterNumber'] == 33 + ... ] + >>> cube_field_pairs = iris_grib.load_pairs_from_fields(selected_fields) -For example:: +Filtering fields can be very useful to speed up loading, since otherwise all data must +be converted to Iris *before* selection with constraints, which can be quite costly. - >>> import iris - >>> import iris_sample_data - >>> cube = iris.load_cube(iris.sample_data_path('polar_stereo.grib2')) -Similarly, you can save your cubes to a GRIB file directly from Iris -using ``iris-grib``:: +Save +~~~~ +The key functions are :func:`~iris_grib.save_pairs_from_cubes` and +:func:`~iris_grib.save_messages`. +See those for more detail. - >>> iris.save(my_cube, 'my_file.grib2') +You can convert Iris cubes to eccodes messages, and modify or filter them before saving. + +.. note:: + The messages here are eccodes message "ids", essentially integers, and *not* + :class:`~iris_grib.message.GribMessages`. Thus, they must be inspected and + manipulated using the eccodes library functions. + +.. testsetup:: + + from iris.coords import DimCoord + import eccodes + cube_height_2m5 = iris.load_cube(iris.sample_data_path("rotated_pole.nc")) + cube_height_2m5.add_aux_coord(DimCoord([2.5], standard_name="height", units="m"), ()) + +For example: + + >>> # translate data to grib2 fields + >>> cube_field_pairs = list(iris_grib.save_pairs_from_cube(cube_height_2m5)) + >>> # adjust some of them + >>> for cube, field in cube_field_pairs: + ... if cube.coords('height') and cube.coord('height').points[0] == 2.5: + ... # we know this will have been rounded, badly, so needs re-scaling. + ... assert eccodes.codes_get_long(field, 'scaleFactorOfFirstFixedSurface') == 0 + ... assert eccodes.codes_get_long(field, 'scaledValueOfFirstFixedSurface') == 2 + ... eccodes.codes_set_long(field, 'scaleFactorOfFirstFixedSurface', 1) + ... eccodes.codes_set_long(field, 'scaledValueOfFirstFixedSurface', 25) + ... + >>> # save to file + >>> messages = [msg for (cube, msg) in cube_field_pairs] + >>> iris_grib.save_messages(messages, 'temp.grib2') + >>> # check result + >>> print(iris.load_cube('temp.grib2').coord('height').points) + [2.5] Getting Started @@ -99,11 +217,17 @@ To ensure all ``iris-grib`` dependencies, it is sufficient to have installed `ecCodes `_ . The simplest way to install is with -`conda `_ , -using the `conda-forge channel `_ , +`conda `_ , using the +`package on conda-forge `_ , +with the command + + $ conda install -c conda-forge iris-grib + +Pip can also be used, to install from the +`package on PyPI `_ , with the command - $ conda install -c conda-forge iris-grib + $ pip install iris-grib Development sources are hosted at ``_ . diff --git a/docs/ref/release_notes.rst b/docs/ref/release_notes.rst index b099775a2..cd93f5e4d 100644 --- a/docs/ref/release_notes.rst +++ b/docs/ref/release_notes.rst @@ -18,6 +18,19 @@ Features 4.6, i.e. percentile forecasts. `(PR#401) `_ +* `@pp-mo `_ expanded the use of the "GRIB_PARAM" + attributes to GRIB1 loading, and document it more thoroughly. + `(ISSUE#330) `_, + `(PR#402) `_ + +Documentation +^^^^^^^^^^^^^ +* `@pp-mo `_ reworked the main docs page to : + headline basic load + save with Iris, rather than lower-level functions; + better explain load-pairs and save-pairs usage; make all usage examples into + doctests. + `(ISSUE#398) `_ + Dependencies ^^^^^^^^^^^^ * `@bjlittle `_ migrated to ``pytest``. diff --git a/iris_grib/__init__.py b/iris_grib/__init__.py index 8cb57e548..6b7ca90a6 100644 --- a/iris_grib/__init__.py +++ b/iris_grib/__init__.py @@ -30,7 +30,7 @@ from .message import GribMessage -__version__ = '0.20.dev0' +__version__ = "0.20.dev0" __all__ = [ "load_cubes", @@ -41,31 +41,53 @@ ] -CENTRE_TITLES = {'egrr': 'U.K. Met Office - Exeter', - 'ecmf': 'European Centre for Medium Range Weather Forecasts', - 'rjtd': 'Tokyo, Japan Meteorological Agency', - '55': 'San Francisco', - 'kwbc': ('US National Weather Service, National Centres for ' - 'Environmental Prediction')} +CENTRE_TITLES = { + "egrr": "U.K. Met Office - Exeter", + "ecmf": "European Centre for Medium Range Weather Forecasts", + "rjtd": "Tokyo, Japan Meteorological Agency", + "55": "San Francisco", + "kwbc": ( + "US National Weather Service, National Centres for " "Environmental Prediction" + ), +} -TIME_RANGE_INDICATORS = {0: 'none', 1: 'none', 3: 'time mean', 4: 'time sum', - 5: 'time _difference', 10: 'none', - # TODO #567 Further exploration of following mappings - 51: 'time mean', 113: 'time mean', 114: 'time sum', - 115: 'time mean', 116: 'time sum', 117: 'time mean', - 118: 'time _covariance', 123: 'time mean', - 124: 'time sum', 125: 'time standard_deviation'} +TIME_RANGE_INDICATORS = { + 0: "none", + 1: "none", + 3: "time mean", + 4: "time sum", + 5: "time _difference", + 10: "none", + # TODO #567 Further exploration of following mappings + 51: "time mean", + 113: "time mean", + 114: "time sum", + 115: "time mean", + 116: "time sum", + 117: "time mean", + 118: "time _covariance", + 123: "time mean", + 124: "time sum", + 125: "time standard_deviation", +} -PROCESSING_TYPES = {0: 'time mean', 1: 'time sum', 2: 'time maximum', - 3: 'time minimum', 4: 'time _difference', - 5: 'time _root mean square', 6: 'time standard_deviation', - 7: 'time _convariance', 8: 'time _difference', - 9: 'time _ratio'} +PROCESSING_TYPES = { + 0: "time mean", + 1: "time sum", + 2: "time maximum", + 3: "time minimum", + 4: "time _difference", + 5: "time _root mean square", + 6: "time standard_deviation", + 7: "time _convariance", + 8: "time _difference", + 9: "time _ratio", +} TIME_CODES_EDITION1 = { - 0: ('minutes', 60), - 1: ('hours', 60*60), - 2: ('days', 24*60*60), + 0: ("minutes", 60), + 1: ("hours", 60 * 60), + 2: ("days", 24 * 60 * 60), # NOTE: do *not* support calendar-dependent units at all. # So the following possible keys remain unsupported: # 3: 'months', @@ -73,12 +95,12 @@ # 5: 'decades', # 6: '30 years', # 7: 'century', - 10: ('3 hours', 3*60*60), - 11: ('6 hours', 6*60*60), - 12: ('12 hours', 12*60*60), - 13: ('15 minutes', 15*60), - 14: ('30 minutes', 30*60), - 254: ('seconds', 1), + 10: ("3 hours", 3 * 60 * 60), + 11: ("6 hours", 6 * 60 * 60), + 12: ("12 hours", 12 * 60 * 60), + 13: ("15 minutes", 15 * 60), + 14: ("30 minutes", 30 * 60), + 254: ("seconds", 1), } unknown_string = "???" @@ -87,7 +109,7 @@ class GribDataProxy: """A reference to the data payload of a single Grib message.""" - __slots__ = ('shape', 'dtype', 'path', 'offset') + __slots__ = ("shape", "dtype", "path", "offset") def __init__(self, shape, dtype, path, offset): self.shape = shape @@ -100,7 +122,7 @@ def ndim(self): return len(self.shape) def __getitem__(self, keys): - with open(self.path, 'rb') as grib_fh: + with open(self.path, "rb") as grib_fh: grib_fh.seek(self.offset) grib_message = eccodes.codes_new_from_file( grib_fh, eccodes.CODES_PRODUCT_GRIB @@ -113,9 +135,11 @@ def __getitem__(self, keys): return result def __repr__(self): - msg = '<{self.__class__.__name__} shape={self.shape} ' \ - 'dtype={self.dtype!r} ' \ - 'path={self.path!r} offset={self.offset}>' + msg = ( + "<{self.__class__.__name__} shape={self.shape} " + "dtype={self.dtype!r} " + "path={self.path!r} offset={self.offset}>" + ) return msg.format(self=self) def __getstate__(self): @@ -140,9 +164,8 @@ def __init__(self, grib_message, grib_fh=None): self.grib_message = grib_message if self.edition != 1: - emsg = 'GRIB edition {} is not supported by {!r}.' - raise TranslationError(emsg.format(self.edition, - type(self).__name__)) + emsg = "GRIB edition {} is not supported by {!r}." + raise TranslationError(emsg.format(self.edition, type(self).__name__)) deferred = grib_fh is not None @@ -159,19 +182,17 @@ def __init__(self, grib_message, grib_fh=None): self._compute_extra_keys() # Calculate the data payload shape. - shape = (eccodes.codes_get_long(grib_message, 'numberOfValues'),) + shape = (eccodes.codes_get_long(grib_message, "numberOfValues"),) - if not self.gridType.startswith('reduced'): + if not self.gridType.startswith("reduced"): ni, nj = self.Ni, self.Nj - j_fast = eccodes.codes_get_long(grib_message, - 'jPointsAreConsecutive') + j_fast = eccodes.codes_get_long(grib_message, "jPointsAreConsecutive") shape = (nj, ni) if j_fast == 0 else (ni, nj) if deferred: # Wrap the reference to the data payload within the data proxy # in order to support deferred data loading. - proxy = GribDataProxy(shape, np.array([0.]).dtype, grib_fh.name, - offset) + proxy = GribDataProxy(shape, np.array([0.0]).dtype, grib_fh.name, offset) self._data = as_lazy_data(proxy) else: self.data = _message_values(grib_message, shape) @@ -193,23 +214,17 @@ def __getattr__(self, key): # array...special case here... if key in ["values", "pv", "latitudes", "longitudes"]: res = eccodes.codes_get_double_array(self.grib_message, key) - elif key in ('typeOfFirstFixedSurface', - 'typeOfSecondFixedSurface'): + elif key in ("typeOfFirstFixedSurface", "typeOfSecondFixedSurface"): res = np.int32(eccodes.codes_get_long(self.grib_message, key)) else: - key_type = eccodes.codes_get_native_type( - self.grib_message, key) + key_type = eccodes.codes_get_native_type(self.grib_message, key) if key_type == int: - res = np.int32(eccodes.codes_get_long(self.grib_message, - key)) + res = np.int32(eccodes.codes_get_long(self.grib_message, key)) elif key_type == float: # Because some computer keys are floats, like # longitudeOfFirstGridPointInDegrees, a float32 # is not always enough... - res = np.float64(eccodes.codes_get_double( - self.grib_message, key - ) - ) + res = np.float64(eccodes.codes_get_double(self.grib_message, key)) elif key_type == str: res = eccodes.codes_get_string(self.grib_message, key) else: @@ -232,8 +247,10 @@ def _timeunit_detail(self): """Return the (string, seconds) describing the message time unit.""" unit_code = self.indicatorOfUnitOfTimeRange if unit_code not in TIME_CODES_EDITION1: - message = 'Unhandled time unit for forecast ' \ - 'indicatorOfUnitOfTimeRange : ' + str(unit_code) + message = ( + "Unhandled time unit for forecast " + "indicatorOfUnitOfTimeRange : " + str(unit_code) + ) raise NotYetImplementedError(message) return TIME_CODES_EDITION1[unit_code] @@ -254,10 +271,8 @@ def _compute_extra_keys(self): # regular or rotated grid? try: - longitudeOfSouthernPoleInDegrees = \ - self.longitudeOfSouthernPoleInDegrees - latitudeOfSouthernPoleInDegrees = \ - self.latitudeOfSouthernPoleInDegrees + longitudeOfSouthernPoleInDegrees = self.longitudeOfSouthernPoleInDegrees + latitudeOfSouthernPoleInDegrees = self.latitudeOfSouthernPoleInDegrees except AttributeError: longitudeOfSouthernPoleInDegrees = 0.0 latitudeOfSouthernPoleInDegrees = 90.0 @@ -265,29 +280,32 @@ def _compute_extra_keys(self): centre = eccodes.codes_get_string(self.grib_message, "centre") # default values - self.extra_keys = {'_referenceDateTime': -1.0, - '_phenomenonDateTime': -1.0, - '_periodStartDateTime': -1.0, - '_periodEndDateTime': -1.0, - '_levelTypeName': unknown_string, - '_levelTypeUnits': unknown_string, - '_firstLevelTypeName': unknown_string, - '_firstLevelTypeUnits': unknown_string, - '_firstLevel': -1.0, - '_secondLevelTypeName': unknown_string, - '_secondLevel': -1.0, - '_originatingCentre': unknown_string, - '_forecastTime': None, - '_forecastTimeUnit': unknown_string, - '_coord_system': None, - '_x_circular': False, - '_x_coord_name': unknown_string, - '_y_coord_name': unknown_string, - # These are here to avoid repetition in the rules - # files, and reduce the very long line lengths. - '_x_points': None, - '_y_points': None, - '_cf_data': None} + self.extra_keys = { + "_referenceDateTime": -1.0, + "_phenomenonDateTime": -1.0, + "_periodStartDateTime": -1.0, + "_periodEndDateTime": -1.0, + "_levelTypeName": unknown_string, + "_levelTypeUnits": unknown_string, + "_firstLevelTypeName": unknown_string, + "_firstLevelTypeUnits": unknown_string, + "_firstLevel": -1.0, + "_secondLevelTypeName": unknown_string, + "_secondLevel": -1.0, + "_originatingCentre": unknown_string, + "_forecastTime": None, + "_forecastTimeUnit": unknown_string, + "_coord_system": None, + "_x_circular": False, + "_x_coord_name": unknown_string, + "_y_coord_name": unknown_string, + # These are here to avoid repetition in the rules + # files, and reduce the very long line lengths. + "_x_points": None, + "_y_points": None, + "_cf_data": None, + "_grib_code": None, + } # cf phenomenon translation # Get centre code (N.B. self.centre has default type = string) @@ -296,16 +314,29 @@ def _compute_extra_keys(self): cf_data = gptx.grib1_phenom_to_cf_info( table2_version=self.table2Version, centre_number=centre_number, - param_number=self.indicatorOfParameter) - self.extra_keys['_cf_data'] = cf_data + param_number=self.indicatorOfParameter, + ) + self.extra_keys["_cf_data"] = cf_data + + # Record the original parameter encoding + self.extra_keys["_grib_code"] = gptx.GRIBCode( + edition=1, + table_version=self.table2Version, + centre_number=centre_number, + number=self.indicatorOfParameter, + ) # reference date - self.extra_keys['_referenceDateTime'] = \ - datetime.datetime(int(self.year), int(self.month), int(self.day), - int(self.hour), int(self.minute)) + self.extra_keys["_referenceDateTime"] = datetime.datetime( + int(self.year), + int(self.month), + int(self.day), + int(self.hour), + int(self.minute), + ) # forecast time with workarounds - self.extra_keys['_forecastTime'] = forecastTime + self.extra_keys["_forecastTime"] = forecastTime # verification date processingDone = self._get_processing_done() @@ -320,24 +351,24 @@ def _compute_extra_keys(self): endMinute = int(validityTime[2:4]) # fixed forecastTime in hours - self.extra_keys['_periodStartDateTime'] = \ - (self.extra_keys['_referenceDateTime'] + - datetime.timedelta(hours=int(forecastTime))) - self.extra_keys['_periodEndDateTime'] = \ - datetime.datetime(endYear, endMonth, endDay, endHour, - endMinute) + self.extra_keys["_periodStartDateTime"] = self.extra_keys[ + "_referenceDateTime" + ] + datetime.timedelta(hours=int(forecastTime)) + self.extra_keys["_periodEndDateTime"] = datetime.datetime( + endYear, endMonth, endDay, endHour, endMinute + ) else: - self.extra_keys['_phenomenonDateTime'] = \ - self._get_verification_date() + self.extra_keys["_phenomenonDateTime"] = self._get_verification_date() # originating centre # TODO #574 Expand to include sub-centre - self.extra_keys['_originatingCentre'] = CENTRE_TITLES.get( - centre, "unknown centre %s" % centre) + self.extra_keys["_originatingCentre"] = CENTRE_TITLES.get( + centre, "unknown centre %s" % centre + ) # forecast time unit as a cm string # TODO #575 Do we want PP or GRIB style forecast delta? - self.extra_keys['_forecastTimeUnit'] = self._timeunit_string() + self.extra_keys["_forecastTimeUnit"] = self._timeunit_string() # shape of the Earth oblate_Earth = self.resolutionAndComponentFlags & 0b0100000 @@ -351,26 +382,26 @@ def _compute_extra_keys(self): gridType = eccodes.codes_get_string(self.grib_message, "gridType") - if gridType in ["regular_ll", "regular_gg", "reduced_ll", - "reduced_gg"]: - self.extra_keys['_x_coord_name'] = "longitude" - self.extra_keys['_y_coord_name'] = "latitude" - self.extra_keys['_coord_system'] = geoid - elif gridType == 'rotated_ll': + if gridType in ["regular_ll", "regular_gg", "reduced_ll", "reduced_gg"]: + self.extra_keys["_x_coord_name"] = "longitude" + self.extra_keys["_y_coord_name"] = "latitude" + self.extra_keys["_coord_system"] = geoid + elif gridType == "rotated_ll": # TODO: Confirm the translation from angleOfRotation to # north_pole_lon (usually 0 for both) - self.extra_keys['_x_coord_name'] = "grid_longitude" - self.extra_keys['_y_coord_name'] = "grid_latitude" + self.extra_keys["_x_coord_name"] = "grid_longitude" + self.extra_keys["_y_coord_name"] = "grid_latitude" southPoleLon = longitudeOfSouthernPoleInDegrees southPoleLat = latitudeOfSouthernPoleInDegrees - self.extra_keys['_coord_system'] = \ - coord_systems.RotatedGeogCS( - -southPoleLat, - math.fmod(southPoleLon + 180.0, 360.0), - self.angleOfRotation, geoid) - elif gridType == 'polar_stereographic': - self.extra_keys['_x_coord_name'] = "projection_x_coordinate" - self.extra_keys['_y_coord_name'] = "projection_y_coordinate" + self.extra_keys["_coord_system"] = coord_systems.RotatedGeogCS( + -southPoleLat, + math.fmod(southPoleLon + 180.0, 360.0), + self.angleOfRotation, + geoid, + ) + elif gridType == "polar_stereographic": + self.extra_keys["_x_coord_name"] = "projection_x_coordinate" + self.extra_keys["_y_coord_name"] = "projection_y_coordinate" if self.projectionCentreFlag == 0: pole_lat = 90 @@ -380,14 +411,18 @@ def _compute_extra_keys(self): raise TranslationError("Unhandled projectionCentreFlag") # Note: I think the grib api defaults LaDInDegrees to 60 for grib1. - self.extra_keys['_coord_system'] = \ - coord_systems.Stereographic( - pole_lat, self.orientationOfTheGridInDegrees, 0, 0, - self.LaDInDegrees, ellipsoid=geoid) + self.extra_keys["_coord_system"] = coord_systems.Stereographic( + pole_lat, + self.orientationOfTheGridInDegrees, + 0, + 0, + self.LaDInDegrees, + ellipsoid=geoid, + ) - elif gridType == 'lambert': - self.extra_keys['_x_coord_name'] = "projection_x_coordinate" - self.extra_keys['_y_coord_name'] = "projection_y_coordinate" + elif gridType == "lambert": + self.extra_keys["_x_coord_name"] = "projection_x_coordinate" + self.extra_keys["_y_coord_name"] = "projection_y_coordinate" flag_name = "projectionCenterFlag" @@ -399,10 +434,14 @@ def _compute_extra_keys(self): raise TranslationError("Unhandled projectionCentreFlag") LambertConformal = coord_systems.LambertConformal - self.extra_keys['_coord_system'] = LambertConformal( - self.LaDInDegrees, self.LoVInDegrees, 0, 0, + self.extra_keys["_coord_system"] = LambertConformal( + self.LaDInDegrees, + self.LoVInDegrees, + 0, + 0, secant_latitudes=(self.Latin1InDegrees, self.Latin2InDegrees), - ellipsoid=geoid) + ellipsoid=geoid, + ) else: raise TranslationError("unhandled grid type: {}".format(gridType)) @@ -411,17 +450,20 @@ def _compute_extra_keys(self): j_step = self.jDirectionIncrementInDegrees if not self.jScansPositively: j_step = -j_step - self._y_points = (np.arange(self.Nj, dtype=np.float64) * j_step + - self.latitudeOfFirstGridPointInDegrees) + self._y_points = ( + np.arange(self.Nj, dtype=np.float64) * j_step + + self.latitudeOfFirstGridPointInDegrees + ) - elif gridType in ['regular_gg']: + elif gridType in ["regular_gg"]: # longitude coordinate is straight-forward self._regular_longitude_common() # get the distinct latitudes, and make sure they are sorted # (south-to-north) and then put them in the right direction # depending on the scan direction latitude_points = eccodes.codes_get_double_array( - self.grib_message, 'distinctLatitudes').astype(np.float64) + self.grib_message, "distinctLatitudes" + ).astype(np.float64) latitude_points.sort() if not self.jScansPositively: # we require latitudes north-to-south @@ -431,20 +473,21 @@ def _compute_extra_keys(self): elif gridType in ["polar_stereographic", "lambert"]: # convert the starting latlon into meters - cartopy_crs = self.extra_keys['_coord_system'].as_cartopy_crs() + cartopy_crs = self.extra_keys["_coord_system"].as_cartopy_crs() x1, y1 = cartopy_crs.transform_point( self.longitudeOfFirstGridPointInDegrees, self.latitudeOfFirstGridPointInDegrees, - ccrs.Geodetic()) + ccrs.Geodetic(), + ) if not np.all(np.isfinite([x1, y1])): - raise TranslationError("Could not determine the first latitude" - " and/or longitude grid point.") + raise TranslationError( + "Could not determine the first latitude" + " and/or longitude grid point." + ) - self._x_points = x1 + self.DxInMetres * np.arange(self.Nx, - dtype=np.float64) - self._y_points = y1 + self.DyInMetres * np.arange(self.Ny, - dtype=np.float64) + self._x_points = x1 + self.DxInMetres * np.arange(self.Nx, dtype=np.float64) + self._y_points = y1 + self.DyInMetres * np.arange(self.Ny, dtype=np.float64) elif gridType in ["reduced_ll", "reduced_gg"]: self._x_points = self.longitudes @@ -458,18 +501,20 @@ def _regular_longitude_common(self): i_step = self.iDirectionIncrementInDegrees if self.iScansNegatively: i_step = -i_step - self._x_points = (np.arange(self.Ni, dtype=np.float64) * i_step + - self.longitudeOfFirstGridPointInDegrees) - if "longitude" in self.extra_keys['_x_coord_name'] and self.Ni > 1: + self._x_points = ( + np.arange(self.Ni, dtype=np.float64) * i_step + + self.longitudeOfFirstGridPointInDegrees + ) + if "longitude" in self.extra_keys["_x_coord_name"] and self.Ni > 1: if _longitude_is_cyclic(self._x_points): - self.extra_keys['_x_circular'] = True + self.extra_keys["_x_circular"] = True def _get_processing_done(self): """Determine the type of processing that was done on the data.""" - processingDone = 'unknown' + processingDone = "unknown" timeRangeIndicator = self.timeRangeIndicator - default = 'time _grib1_process_unknown_%i' % timeRangeIndicator + default = "time _grib1_process_unknown_%i" % timeRangeIndicator processingDone = TIME_RANGE_INDICATORS.get(timeRangeIndicator, default) return processingDone @@ -526,8 +571,9 @@ def _get_verification_date(self): # only at the time (hour, minute) given in the reference time, # for all the days included in the P2 period. The units of P2 # are given by the contents of octet 18 and Table 4. - raise TranslationError("unhandled grib1 timeRangeIndicator " - "= 51 (avg of avgs)") + raise TranslationError( + "unhandled grib1 timeRangeIndicator " "= 51 (avg of avgs)" + ) elif time_range_indicator == 113: # Average of N forecasts (or initialized analyses); each # product has forecast period of P1 (P1=0 for initialized @@ -574,16 +620,16 @@ def _get_verification_date(self): # the reference time, at intervals of P2. time_diff = P1 else: - raise TranslationError("unhandled grib1 timeRangeIndicator " - "= %i" % time_range_indicator) + raise TranslationError( + "unhandled grib1 timeRangeIndicator " "= %i" % time_range_indicator + ) # Get the timeunit interval. interval_secs = self._timeunit_seconds() # Multiply by start-offset and convert to a timedelta. # NOTE: a 'float' conversion is required here, as time_diff may be # a numpy scalar, which timedelta will not accept. - interval_delta = datetime.timedelta( - seconds=float(time_diff * interval_secs)) + interval_delta = datetime.timedelta(seconds=float(time_diff * interval_secs)) # Return validity_time = (reference_time + start_offset*time_unit). return reference_date_time + interval_delta @@ -606,11 +652,10 @@ def phenomenon_points(self, time_unit): measured in the appropriate time units. """ - time_reference = '%s since epoch' % time_unit + time_reference = "%s since epoch" % time_unit return float( cf_units.date2num( - self._phenomenonDateTime, time_reference, - cf_units.CALENDAR_GREGORIAN + self._phenomenonDateTime, time_reference, cf_units.CALENDAR_GREGORIAN ) ) @@ -621,10 +666,12 @@ def phenomenon_bounds(self, time_unit): """ # TODO #576 Investigate when it's valid to get phenomenon_bounds - time_reference = '%s since epoch' % time_unit + time_reference = "%s since epoch" % time_unit unit = cf_units.Unit(time_reference, cf_units.CALENDAR_GREGORIAN) - return [float(unit.date2num(self._periodStartDateTime)), - float(unit.date2num(self._periodEndDateTime))] + return [ + float(unit.date2num(self._periodStartDateTime)), + float(unit.date2num(self._periodEndDateTime)), + ] def _longitude_is_cyclic(points): @@ -643,8 +690,8 @@ def _longitude_is_cyclic(points): def _message_values(grib_message, shape): - eccodes.codes_set_double(grib_message, 'missingValue', np.nan) - data = eccodes.codes_get_double_array(grib_message, 'values') + eccodes.codes_set_double(grib_message, "missingValue", np.nan) + data = eccodes.codes_get_double_array(grib_message, "values") data = data.reshape(shape) # Handle missing values in a sensible way. @@ -657,21 +704,20 @@ def _message_values(grib_message, shape): def _load_generate(filename): messages = GribMessage.messages_from_filename(filename) for message in messages: - editionNumber = message.sections[0]['editionNumber'] + editionNumber = message.sections[0]["editionNumber"] if editionNumber == 1: message_id = message._raw_message._message_id grib_fh = message._file_ref.open_file message = GribWrapper(message_id, grib_fh=grib_fh) elif editionNumber != 2: - emsg = 'GRIB edition {} is not supported by {!r}.' - raise TranslationError(emsg.format(editionNumber, - type(message).__name__)) + emsg = "GRIB edition {} is not supported by {!r}." + raise TranslationError(emsg.format(editionNumber, type(message).__name__)) yield message def load_cubes(filenames, callback=None): """ - Returns a generator of cubes from the given list of filenames. + Returns an iterator over cubes from the given list of filenames. Args: @@ -684,13 +730,12 @@ def load_cubes(filenames, callback=None): Function which can be passed on to :func:`iris.io.run_callback`. Returns: - A generator containing Iris cubes loaded from the GRIB files. + An iterator returning Iris cubes loaded from the GRIB files. """ import iris.fileformats.rules as iris_rules - grib_loader = iris_rules.Loader(_load_generate, - {}, - load_convert) + + grib_loader = iris_rules.Loader(_load_generate, {}, load_convert) return iris_rules.load_cubes(filenames, callback, grib_loader) @@ -708,16 +753,16 @@ def load_pairs_from_fields(grib_messages): >>> import iris >>> from iris_grib import load_pairs_from_fields >>> from iris_grib.message import GribMessage - >>> filename = iris.sample_data_path('polar_stereo.grib2') + >>> filename = iris.sample_data_path("polar_stereo.grib2") >>> filtered_messages = [] >>> for message in GribMessage.messages_from_filename(filename): - ... if message.sections[1]['productionStatusOfProcessedData'] == 0: + ... if message.sections[1]["productionStatusOfProcessedData"] == 0: ... filtered_messages.append(message) >>> cubes_messages = load_pairs_from_fields(filtered_messages) >>> for cube, msg in cubes_messages: - ... prod_stat = msg.sections[1]['productionStatusOfProcessedData'] - ... cube.attributes['productionStatusOfProcessedData'] = prod_stat - >>> print(cube.attributes['productionStatusOfProcessedData']) + ... prod_stat = msg.sections[1]["productionStatusOfProcessedData"] + ... cube.attributes["productionStatusOfProcessedData"] = prod_stat + >>> print(cube.attributes["productionStatusOfProcessedData"]) 0 This capability can also be used to alter fields before they are passed to @@ -727,8 +772,8 @@ def load_pairs_from_fields(grib_messages): >>> from iris_grib import load_pairs_from_fields >>> cleaned_messages = GribMessage.messages_from_filename(filename) >>> for message in cleaned_messages: - ... if message.sections[1]['productionStatusOfProcessedData'] == 0: - ... message.sections[1]['productionStatusOfProcessedData'] = 4 + ... if message.sections[1]["productionStatusOfProcessedData"] == 0: + ... message.sections[1]["productionStatusOfProcessedData"] = 4 >>> cubes = load_pairs_from_fields(cleaned_messages) Args: @@ -742,6 +787,7 @@ def load_pairs_from_fields(grib_messages): """ import iris.fileformats.rules as iris_rules + return iris_rules.load_pairs_from_fields(grib_messages, load_convert) @@ -772,10 +818,10 @@ def save_grib2(cube, target, append=False): def save_pairs_from_cube(cube): """ - Convert one or more cubes to (2D cube, GRIB message) pairs. - Returns an iterable of tuples each consisting of one 2D cube and - one GRIB message ID, the result of the 2D cube being processed by the GRIB - save rules. + Convert one or more cubes to (2D cube, GRIB-message-id) pairs. + + Produces pairs of 2D cubes and GRIB messages, the result of the 2D cube + being processed by the GRIB save rules. Args: @@ -783,9 +829,13 @@ def save_pairs_from_cube(cube): A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes. + Returns: + a iterator returning (cube, field) pairs, where each ``cube`` is a 2d + slice of the input and each``field`` is an eccodes message "id". + N.B. the message "id"s are integer handles. """ - x_coords = cube.coords(axis='x', dim_coords=True) - y_coords = cube.coords(axis='y', dim_coords=True) + x_coords = cube.coords(axis="x", dim_coords=True) + y_coords = cube.coords(axis="y", dim_coords=True) if len(x_coords) != 1 or len(y_coords) != 1: raise TranslationError("Did not find one (and only one) x or y coord") diff --git a/iris_grib/_grib1_load_rules.py b/iris_grib/_grib1_load_rules.py index 73afe5aa5..0b670ad96 100644 --- a/iris_grib/_grib1_load_rules.py +++ b/iris_grib/_grib1_load_rules.py @@ -12,7 +12,7 @@ from iris.aux_factory import HybridPressureFactory from iris.coords import AuxCoord, CellMethod, DimCoord from iris.exceptions import TranslationError -from iris.fileformats.rules import (ConversionMetadata, Factory, Reference) +from iris.fileformats.rules import ConversionMetadata, Factory, Reference def grib1_convert(grib): @@ -29,9 +29,8 @@ def grib1_convert(grib): """ if grib.edition != 1: - emsg = 'GRIB edition {} is not supported by {!r}.' - raise TranslationError(emsg.format(grib.edition, - type(grib).__name__)) + emsg = "GRIB edition {} is not supported by {!r}." + raise TranslationError(emsg.format(grib.edition, type(grib).__name__)) factories = [] references = [] @@ -49,10 +48,10 @@ def grib1_convert(grib): AuxCoord( grib._y_points, grib._y_coord_name, - units='degrees', + units="degrees", coord_system=grib._coord_system, ), - 0 + 0, ) ) aux_coords_and_dims.append( @@ -60,10 +59,10 @@ def grib1_convert(grib): AuxCoord( grib._x_points, grib._x_coord_name, - units='degrees', + units="degrees", coord_system=grib._coord_system, ), - 0 + 0, ) ) elif grib.gridType in ("regular_ll", "rotated_ll", "regular_gg"): @@ -77,10 +76,10 @@ def grib1_convert(grib): DimCoord( grib._y_points, grib._y_coord_name, - units='degrees', + units="degrees", coord_system=grib._coord_system, ), - j_points_are_consecutive + j_points_are_consecutive, ) ) dim_coords_and_dims.append( @@ -88,11 +87,11 @@ def grib1_convert(grib): DimCoord( grib._x_points, grib._x_coord_name, - units='degrees', + units="degrees", coord_system=grib._coord_system, circular=grib._x_circular, ), - int(not j_points_are_consecutive) + int(not j_points_are_consecutive), ) ) @@ -105,7 +104,7 @@ def grib1_convert(grib): units="m", coord_system=grib._coord_system, ), - 0 + 0, ) ) dim_coords_and_dims.append( @@ -116,49 +115,53 @@ def grib1_convert(grib): units="m", coord_system=grib._coord_system, ), - 1 + 1, ) ) - if \ - (grib.table2Version < 128) and \ - (grib.indicatorOfParameter == 11) and \ - (grib._cf_data is None): + if ( + (grib.table2Version < 128) + and (grib.indicatorOfParameter == 11) + and (grib._cf_data is None) + ): standard_name = "air_temperature" units = "kelvin" - if \ - (grib.table2Version < 128) and \ - (grib.indicatorOfParameter == 33) and \ - (grib._cf_data is None): + if ( + (grib.table2Version < 128) + and (grib.indicatorOfParameter == 33) + and (grib._cf_data is None) + ): standard_name = "x_wind" units = "m s-1" - if \ - (grib.table2Version < 128) and \ - (grib.indicatorOfParameter == 34) and \ - (grib._cf_data is None): + if ( + (grib.table2Version < 128) + and (grib.indicatorOfParameter == 34) + and (grib._cf_data is None) + ): standard_name = "y_wind" units = "m s-1" - if \ - (grib._cf_data is not None): + if grib._cf_data is not None: standard_name = grib._cf_data.standard_name long_name = grib._cf_data.standard_name or grib._cf_data.long_name units = grib._cf_data.units - if \ - (grib.table2Version >= 128) and \ - (grib._cf_data is None): - long_name = f"UNKNOWN LOCAL PARAM {grib.indicatorOfParameter}"\ - f".{grib.table2Version}" - units = "???" - - if \ - (grib.table2Version == 1) and \ - (grib.indicatorOfParameter >= 128): - long_name = f"UNKNOWN LOCAL PARAM {grib.indicatorOfParameter}"\ - f".{grib.table2Version}" + # N.B. in addition to the previous cf translated phenomenon info, + # **always** add a GRIB_PARAM attribute to identify the input phenomenon + # identity. + attributes["GRIB_PARAM"] = grib._grib_code + + if ( + (grib.table2Version >= 128) + and (grib._cf_data is None) + or (grib.table2Version == 1) + and (grib.indicatorOfParameter >= 128) + ): + long_name = ( + f"UNKNOWN LOCAL PARAM {grib.indicatorOfParameter}" f".{grib.table2Version}" + ) units = "???" if grib._phenomenonDateTime != -1.0: @@ -166,119 +169,110 @@ def grib1_convert(grib): ( DimCoord( points=grib.startStep, - standard_name='forecast_period', + standard_name="forecast_period", units=grib._forecastTimeUnit, ), - None + None, ) ) aux_coords_and_dims.append( ( DimCoord( - points=grib.phenomenon_points('hours'), - standard_name='time', - units=Unit('hours since epoch', CALENDAR_GREGORIAN), + points=grib.phenomenon_points("hours"), + standard_name="time", + units=Unit("hours since epoch", CALENDAR_GREGORIAN), ), - None + None, ) ) def add_bounded_time_coords(aux_coords_and_dims, grib): - t_bounds = grib.phenomenon_bounds('hours') + t_bounds = grib.phenomenon_bounds("hours") period = t_bounds[1] - t_bounds[0] - aux_coords_and_dims.append(( - DimCoord(standard_name='forecast_period', - units="hours", - points=grib._forecastTime + 0.5 * period, - bounds=[grib._forecastTime, grib._forecastTime + period]), - None)) - aux_coords_and_dims.append(( - DimCoord(standard_name='time', - units=Unit('hours since epoch', CALENDAR_GREGORIAN), - points=0.5 * (t_bounds[0] + t_bounds[1]), - bounds=t_bounds), - None)) - - if \ - (grib.timeRangeIndicator == 2): + aux_coords_and_dims.append( + ( + DimCoord( + standard_name="forecast_period", + units="hours", + points=grib._forecastTime + 0.5 * period, + bounds=[grib._forecastTime, grib._forecastTime + period], + ), + None, + ) + ) + aux_coords_and_dims.append( + ( + DimCoord( + standard_name="time", + units=Unit("hours since epoch", CALENDAR_GREGORIAN), + points=0.5 * (t_bounds[0] + t_bounds[1]), + bounds=t_bounds, + ), + None, + ) + ) + + if grib.timeRangeIndicator == 2: add_bounded_time_coords(aux_coords_and_dims, grib) - if \ - (grib.timeRangeIndicator == 3): + if grib.timeRangeIndicator == 3: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 4): + if grib.timeRangeIndicator == 4: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("sum", coords="time")) - if \ - (grib.timeRangeIndicator == 5): + if grib.timeRangeIndicator == 5: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("_difference", coords="time")) - if \ - (grib.timeRangeIndicator == 51): + if grib.timeRangeIndicator == 51: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 113): + if grib.timeRangeIndicator == 113: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 114): + if grib.timeRangeIndicator == 114: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("sum", coords="time")) - if \ - (grib.timeRangeIndicator == 115): + if grib.timeRangeIndicator == 115: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 116): + if grib.timeRangeIndicator == 116: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("sum", coords="time")) - if \ - (grib.timeRangeIndicator == 117): + if grib.timeRangeIndicator == 117: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 118): + if grib.timeRangeIndicator == 118: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("_covariance", coords="time")) - if \ - (grib.timeRangeIndicator == 123): + if grib.timeRangeIndicator == 123: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("mean", coords="time")) - if \ - (grib.timeRangeIndicator == 124): + if grib.timeRangeIndicator == 124: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("sum", coords="time")) - if \ - (grib.timeRangeIndicator == 125): + if grib.timeRangeIndicator == 125: add_bounded_time_coords(aux_coords_and_dims, grib) cell_methods.append(CellMethod("standard_deviation", coords="time")) - if grib.levelType == 'pl': + if grib.levelType == "pl": aux_coords_and_dims.append( - ( - DimCoord( - points=grib.level, long_name="pressure", units="hPa" - ), - None - ) + (DimCoord(points=grib.level, long_name="pressure", units="hPa"), None) ) - if grib.levelType == 'sfc': + if grib.levelType == "sfc": if grib._cf_data is not None and grib._cf_data.set_height is not None: aux_coords_and_dims.append( ( @@ -286,77 +280,83 @@ def add_bounded_time_coords(aux_coords_and_dims, grib): points=grib._cf_data.set_height, long_name="height", units="m", - attributes={'positive': 'up'}, + attributes={"positive": "up"}, ), - None + None, ) ) - elif grib.typeOfLevel == 'heightAboveGround': # required for NCAR + elif grib.typeOfLevel == "heightAboveGround": # required for NCAR aux_coords_and_dims.append( ( DimCoord( points=grib.level, long_name="height", units="m", - attributes={'positive': 'up'}, + attributes={"positive": "up"}, ), - None + None, ) ) - if grib.levelType == 'ml' and hasattr(grib, 'pv'): + if grib.levelType == "ml" and hasattr(grib, "pv"): aux_coords_and_dims.append( ( AuxCoord( grib.level, - standard_name='model_level_number', + standard_name="model_level_number", units=1, - attributes={'positive': 'up'}, + attributes={"positive": "up"}, ), - None + None, ) ) aux_coords_and_dims.append( ( - DimCoord( - grib.pv[grib.level], long_name='level_pressure', units='Pa' - ), - None + DimCoord(grib.pv[grib.level], long_name="level_pressure", units="Pa"), + None, ) ) aux_coords_and_dims.append( ( AuxCoord( - grib.pv[grib.numberOfCoordinatesValues//2 + grib.level], - long_name='sigma', + grib.pv[grib.numberOfCoordinatesValues // 2 + grib.level], + long_name="sigma", units=1, ), - None + None, ) ) factories.append( Factory( HybridPressureFactory, [ - {'long_name': 'level_pressure'}, - {'long_name': 'sigma'}, - Reference('surface_pressure'), - ] + {"long_name": "level_pressure"}, + {"long_name": "sigma"}, + Reference("surface_pressure"), + ], ) ) - if grib._originatingCentre != 'unknown': + if grib._originatingCentre != "unknown": aux_coords_and_dims.append( ( AuxCoord( points=grib._originatingCentre, - long_name='originating_centre', - units='no_unit', + long_name="originating_centre", + units="no_unit", ), - None + None, ) ) - return ConversionMetadata(factories, references, standard_name, long_name, - units, attributes, cell_methods, - dim_coords_and_dims, aux_coords_and_dims) + return ConversionMetadata( + factories, + references, + standard_name, + long_name, + units, + attributes, + cell_methods, + dim_coords_and_dims, + aux_coords_and_dims, + ) diff --git a/iris_grib/_grib_cf_map.py b/iris_grib/_grib_cf_map.py index 554be6ea3..bb2714e16 100644 --- a/iris_grib/_grib_cf_map.py +++ b/iris_grib/_grib_cf_map.py @@ -11,235 +11,322 @@ from collections import namedtuple -CFName = namedtuple('CFName', 'standard_name long_name units') +CFName = namedtuple("CFName", "standard_name long_name units") -DimensionCoordinate = namedtuple('DimensionCoordinate', - 'standard_name units points') +DimensionCoordinate = namedtuple("DimensionCoordinate", "standard_name units points") -G1LocalParam = namedtuple('G1LocalParam', 'edition t2version centre iParam') -G2Param = namedtuple('G2Param', 'edition discipline category number') +G1LocalParam = namedtuple("G1LocalParam", "edition t2version centre iParam") +G2Param = namedtuple("G2Param", "edition discipline category number") GRIB1_LOCAL_TO_CF_CONSTRAINED = { - G1LocalParam(1, 128, 98, 165): (CFName('x_wind', None, 'm s-1'), DimensionCoordinate('height', 'm', (10,))), - G1LocalParam(1, 128, 98, 166): (CFName('y_wind', None, 'm s-1'), DimensionCoordinate('height', 'm', (10,))), - G1LocalParam(1, 128, 98, 167): (CFName('air_temperature', None, 'K'), DimensionCoordinate('height', 'm', (2,))), - G1LocalParam(1, 128, 98, 168): (CFName('dew_point_temperature', None, 'K'), DimensionCoordinate('height', 'm', (2,))), - } + G1LocalParam(1, 128, 98, 165): ( + CFName("x_wind", None, "m s-1"), + DimensionCoordinate("height", "m", (10,)), + ), + G1LocalParam(1, 128, 98, 166): ( + CFName("y_wind", None, "m s-1"), + DimensionCoordinate("height", "m", (10,)), + ), + G1LocalParam(1, 128, 98, 167): ( + CFName("air_temperature", None, "K"), + DimensionCoordinate("height", "m", (2,)), + ), + G1LocalParam(1, 128, 98, 168): ( + CFName("dew_point_temperature", None, "K"), + DimensionCoordinate("height", "m", (2,)), + ), +} GRIB1_LOCAL_TO_CF = { - G1LocalParam(1, 128, 98, 31): CFName('sea_ice_area_fraction', None, '1'), - G1LocalParam(1, 128, 98, 34): CFName('sea_surface_temperature', None, 'K'), - G1LocalParam(1, 128, 98, 59): CFName('atmosphere_specific_convective_available_potential_energy', None, 'J kg-1'), - G1LocalParam(1, 128, 98, 129): CFName('geopotential', None, 'm2 s-2'), - G1LocalParam(1, 128, 98, 130): CFName('air_temperature', None, 'K'), - G1LocalParam(1, 128, 98, 131): CFName('x_wind', None, 'm s-1'), - G1LocalParam(1, 128, 98, 132): CFName('y_wind', None, 'm s-1'), - G1LocalParam(1, 128, 98, 135): CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'), - G1LocalParam(1, 128, 98, 141): CFName('thickness_of_snowfall_amount', None, 'm'), - G1LocalParam(1, 128, 98, 151): CFName('air_pressure_at_sea_level', None, 'Pa'), - G1LocalParam(1, 128, 98, 157): CFName('relative_humidity', None, '%'), - G1LocalParam(1, 128, 98, 164): CFName('cloud_area_fraction', None, '1'), - G1LocalParam(1, 128, 98, 173): CFName('surface_roughness_length', None, 'm'), - G1LocalParam(1, 128, 98, 174): CFName(None, 'grib_physical_atmosphere_albedo', '1'), - G1LocalParam(1, 128, 98, 186): CFName('low_type_cloud_area_fraction', None, '1'), - G1LocalParam(1, 128, 98, 187): CFName('medium_type_cloud_area_fraction', None, '1'), - G1LocalParam(1, 128, 98, 188): CFName('high_type_cloud_area_fraction', None, '1'), - G1LocalParam(1, 128, 98, 235): CFName(None, 'grib_skin_temperature', 'K'), - } + G1LocalParam(1, 128, 98, 31): CFName("sea_ice_area_fraction", None, "1"), + G1LocalParam(1, 128, 98, 34): CFName("sea_surface_temperature", None, "K"), + G1LocalParam(1, 128, 98, 59): CFName( + "atmosphere_specific_convective_available_potential_energy", None, "J kg-1" + ), + G1LocalParam(1, 128, 98, 129): CFName("geopotential", None, "m2 s-2"), + G1LocalParam(1, 128, 98, 130): CFName("air_temperature", None, "K"), + G1LocalParam(1, 128, 98, 131): CFName("x_wind", None, "m s-1"), + G1LocalParam(1, 128, 98, 132): CFName("y_wind", None, "m s-1"), + G1LocalParam(1, 128, 98, 135): CFName( + "lagrangian_tendency_of_air_pressure", None, "Pa s-1" + ), + G1LocalParam(1, 128, 98, 141): CFName("thickness_of_snowfall_amount", None, "m"), + G1LocalParam(1, 128, 98, 151): CFName("air_pressure_at_sea_level", None, "Pa"), + G1LocalParam(1, 128, 98, 157): CFName("relative_humidity", None, "%"), + G1LocalParam(1, 128, 98, 164): CFName("cloud_area_fraction", None, "1"), + G1LocalParam(1, 128, 98, 173): CFName("surface_roughness_length", None, "m"), + G1LocalParam(1, 128, 98, 174): CFName(None, "grib_physical_atmosphere_albedo", "1"), + G1LocalParam(1, 128, 98, 186): CFName("low_type_cloud_area_fraction", None, "1"), + G1LocalParam(1, 128, 98, 187): CFName("medium_type_cloud_area_fraction", None, "1"), + G1LocalParam(1, 128, 98, 188): CFName("high_type_cloud_area_fraction", None, "1"), + G1LocalParam(1, 128, 98, 235): CFName(None, "grib_skin_temperature", "K"), +} GRIB2_TO_CF = { - G2Param(2, 0, 0, 0): CFName('air_temperature', None, 'K'), - G2Param(2, 0, 0, 2): CFName('air_potential_temperature', None, 'K'), - G2Param(2, 0, 0, 6): CFName('dew_point_temperature', None, 'K'), - G2Param(2, 0, 0, 10): CFName('surface_upward_latent_heat_flux', None, 'W m-2'), - G2Param(2, 0, 0, 11): CFName('surface_upward_sensible_heat_flux', None, 'W m-2'), - G2Param(2, 0, 0, 17): CFName('surface_temperature', None, 'K'), - G2Param(2, 0, 0, 32): CFName('wet_bulb_potential_temperature', None, 'K'), - G2Param(2, 0, 1, 0): CFName('specific_humidity', None, 'kg kg-1'), - G2Param(2, 0, 1, 1): CFName('relative_humidity', None, '%'), - G2Param(2, 0, 1, 2): CFName('humidity_mixing_ratio', None, 'kg kg-1'), - G2Param(2, 0, 1, 3): CFName(None, 'precipitable_water', 'kg m-2'), - G2Param(2, 0, 1, 7): CFName('precipitation_flux', None, 'kg m-2 s-1'), - G2Param(2, 0, 1, 9): CFName('stratiform_rainfall_amount', 'Large-scale precipitation (non-convective)', 'kg m-2'), - G2Param(2, 0, 1, 10): CFName('convective_rainfall_amount', 'Convective precipitation', 'kg m-2'), - G2Param(2, 0, 1, 11): CFName('thickness_of_snowfall_amount', None, 'm'), - G2Param(2, 0, 1, 13): CFName('liquid_water_content_of_surface_snow', None, 'kg m-2'), - G2Param(2, 0, 1, 15): CFName('stratiform_snowfall_amount', 'Large-scale snow', 'kg m-2'), - G2Param(2, 0, 1, 22): CFName(None, 'cloud_mixing_ratio', 'kg kg-1'), - G2Param(2, 0, 1, 37): CFName('convective_rainfall_flux', 'Convective precipitation rate', 'kg m-2 s-1'), - G2Param(2, 0, 1, 49): CFName('precipitation_amount', None, 'kg m-2'), - G2Param(2, 0, 1, 51): CFName('atmosphere_mass_content_of_water', None, 'kg m-2'), - G2Param(2, 0, 1, 53): CFName('snowfall_flux', None, 'kg m-2 s-1'), - G2Param(2, 0, 1, 58): CFName('convective_snowfall_flux', 'Convective snowfall rate', 'kg m-2 s-1'), - G2Param(2, 0, 1, 59): CFName('stratiform_snowfall_flux', 'Large scale snowfall rate', 'kg m-2 s-1'), - G2Param(2, 0, 1, 60): CFName('snowfall_amount', None, 'kg m-2'), - G2Param(2, 0, 1, 64): CFName('atmosphere_mass_content_of_water_vapor', None, 'kg m-2'), - G2Param(2, 0, 1, 77): CFName('stratiform_rainfall_flux', 'Large scale rain rate', 'kg m-2 s-1'), - G2Param(2, 0, 1, 83): CFName('mass_fraction_of_cloud_liquid_water_in_air', None, 'kg kg-1'), - G2Param(2, 0, 1, 84): CFName('mass_fraction_of_cloud_ice_in_air', None, 'kg kg-1'), - G2Param(2, 0, 2, 0): CFName('wind_from_direction', None, 'degrees'), - G2Param(2, 0, 2, 1): CFName('wind_speed', None, 'm s-1'), - G2Param(2, 0, 2, 2): CFName('x_wind', None, 'm s-1'), - G2Param(2, 0, 2, 3): CFName('y_wind', None, 'm s-1'), - G2Param(2, 0, 2, 8): CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'), - G2Param(2, 0, 2, 9): CFName('upward_air_velocity', 'Vertical velocity (geometric)', 'm s-1'), - G2Param(2, 0, 2, 10): CFName('atmosphere_absolute_vorticity', None, 's-1'), - G2Param(2, 0, 2, 12): CFName('atmosphere_relative_vorticity', 'Relative vorticity', 's-1'), - G2Param(2, 0, 2, 14): CFName(None, 'ertel_potential_velocity', 'K m2 kg-1 s-1'), - G2Param(2, 0, 2, 22): CFName('wind_speed_of_gust', None, 'm s-1'), - G2Param(2, 0, 3, 0): CFName('air_pressure', None, 'Pa'), - G2Param(2, 0, 3, 1): CFName('air_pressure_at_sea_level', None, 'Pa'), - G2Param(2, 0, 3, 3): CFName(None, 'icao_standard_atmosphere_reference_height', 'm'), - G2Param(2, 0, 3, 4): CFName('geopotential', None, 'm2 s-2'), - G2Param(2, 0, 3, 5): CFName('geopotential_height', None, 'm'), - G2Param(2, 0, 3, 6): CFName('altitude', None, 'm'), - G2Param(2, 0, 3, 9): CFName('geopotential_height_anomaly', None, 'm'), - G2Param(2, 0, 4, 7): CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'), - G2Param(2, 0, 4, 9): CFName('surface_net_downward_shortwave_flux', None, 'W m-2'), - G2Param(2, 0, 5, 3): CFName('surface_downwelling_longwave_flux_in_air', None, 'W m-2'), - G2Param(2, 0, 5, 4): CFName('toa_outgoing_longwave_flux', 'Upward long-wave radiation flux', 'W m-2'), - G2Param(2, 0, 5, 5): CFName('surface_net_downward_longwave_flux', None, 'W m-2'), - G2Param(2, 0, 6, 1): CFName(None, 'cloud_area_fraction_assuming_maximum_random_overlap', '1'), - G2Param(2, 0, 6, 3): CFName('low_type_cloud_area_fraction', None, '%'), - G2Param(2, 0, 6, 4): CFName('medium_type_cloud_area_fraction', None, '%'), - G2Param(2, 0, 6, 5): CFName('high_type_cloud_area_fraction', None, '%'), - G2Param(2, 0, 6, 6): CFName('atmosphere_mass_content_of_cloud_liquid_water', None, 'kg m-2'), - G2Param(2, 0, 6, 7): CFName('cloud_area_fraction_in_atmosphere_layer', None, '%'), - G2Param(2, 0, 6, 25): CFName(None, 'WAFC_CB_horizontal_extent', '1'), - G2Param(2, 0, 6, 26): CFName(None, 'WAFC_ICAO_height_at_cloud_base', 'm'), - G2Param(2, 0, 6, 27): CFName(None, 'WAFC_ICAO_height_at_cloud_top', 'm'), - G2Param(2, 0, 7, 6): CFName('atmosphere_specific_convective_available_potential_energy', None, 'J kg-1'), - G2Param(2, 0, 7, 7): CFName(None, 'convective_inhibition', 'J kg-1'), - G2Param(2, 0, 7, 8): CFName(None, 'storm_relative_helicity', 'J kg-1'), - G2Param(2, 0, 14, 0): CFName('atmosphere_mole_content_of_ozone', None, 'Dobson'), - G2Param(2, 0, 19, 1): CFName(None, 'grib_physical_atmosphere_albedo', '%'), - G2Param(2, 0, 19, 20): CFName(None, 'WAFC_icing_potential', '1'), - G2Param(2, 0, 19, 21): CFName(None, 'WAFC_in-cloud_turb_potential', '1'), - G2Param(2, 0, 19, 22): CFName(None, 'WAFC_CAT_potential', '1'), - G2Param(2, 2, 0, 0): CFName('land_binary_mask', None, '1'), - G2Param(2, 2, 0, 0): CFName('land_area_fraction', None, '1'), - G2Param(2, 2, 0, 1): CFName('surface_roughness_length', None, 'm'), - G2Param(2, 2, 0, 2): CFName('soil_temperature', None, 'K'), - G2Param(2, 2, 0, 3): CFName('soil_moisture_content', 'Soil moisture content', 'kg m-2'), - G2Param(2, 2, 0, 7): CFName('surface_altitude', None, 'm'), - G2Param(2, 2, 0, 22): CFName('moisture_content_of_soil_layer', None, 'kg m-2'), - G2Param(2, 2, 0, 34): CFName('surface_runoff_flux', None, 'kg m-2 s-1'), - G2Param(2, 10, 1, 2): CFName('sea_water_x_velocity', None, 'm s-1'), - G2Param(2, 10, 1, 3): CFName('sea_water_y_velocity', None, 'm s-1'), - G2Param(2, 10, 2, 0): CFName('sea_ice_area_fraction', None, '1'), - G2Param(2, 10, 3, 0): CFName('sea_surface_temperature', None, 'K'), - } + G2Param(2, 0, 0, 0): CFName("air_temperature", None, "K"), + G2Param(2, 0, 0, 2): CFName("air_potential_temperature", None, "K"), + G2Param(2, 0, 0, 6): CFName("dew_point_temperature", None, "K"), + G2Param(2, 0, 0, 10): CFName("surface_upward_latent_heat_flux", None, "W m-2"), + G2Param(2, 0, 0, 11): CFName("surface_upward_sensible_heat_flux", None, "W m-2"), + G2Param(2, 0, 0, 17): CFName("surface_temperature", None, "K"), + G2Param(2, 0, 0, 32): CFName("wet_bulb_potential_temperature", None, "K"), + G2Param(2, 0, 1, 0): CFName("specific_humidity", None, "kg kg-1"), + G2Param(2, 0, 1, 1): CFName("relative_humidity", None, "%"), + G2Param(2, 0, 1, 2): CFName("humidity_mixing_ratio", None, "kg kg-1"), + G2Param(2, 0, 1, 3): CFName(None, "precipitable_water", "kg m-2"), + G2Param(2, 0, 1, 7): CFName("precipitation_flux", None, "kg m-2 s-1"), + G2Param(2, 0, 1, 9): CFName( + "stratiform_rainfall_amount", + "Large-scale precipitation (non-convective)", + "kg m-2", + ), + G2Param(2, 0, 1, 10): CFName( + "convective_rainfall_amount", "Convective precipitation", "kg m-2" + ), + G2Param(2, 0, 1, 11): CFName("thickness_of_snowfall_amount", None, "m"), + G2Param(2, 0, 1, 13): CFName( + "liquid_water_content_of_surface_snow", None, "kg m-2" + ), + G2Param(2, 0, 1, 15): CFName( + "stratiform_snowfall_amount", "Large-scale snow", "kg m-2" + ), + G2Param(2, 0, 1, 22): CFName(None, "cloud_mixing_ratio", "kg kg-1"), + G2Param(2, 0, 1, 37): CFName( + "convective_rainfall_flux", "Convective precipitation rate", "kg m-2 s-1" + ), + G2Param(2, 0, 1, 49): CFName("precipitation_amount", None, "kg m-2"), + G2Param(2, 0, 1, 51): CFName("atmosphere_mass_content_of_water", None, "kg m-2"), + G2Param(2, 0, 1, 53): CFName("snowfall_flux", None, "kg m-2 s-1"), + G2Param(2, 0, 1, 58): CFName( + "convective_snowfall_flux", "Convective snowfall rate", "kg m-2 s-1" + ), + G2Param(2, 0, 1, 59): CFName( + "stratiform_snowfall_flux", "Large scale snowfall rate", "kg m-2 s-1" + ), + G2Param(2, 0, 1, 60): CFName("snowfall_amount", None, "kg m-2"), + G2Param(2, 0, 1, 64): CFName( + "atmosphere_mass_content_of_water_vapor", None, "kg m-2" + ), + G2Param(2, 0, 1, 77): CFName( + "stratiform_rainfall_flux", "Large scale rain rate", "kg m-2 s-1" + ), + G2Param(2, 0, 1, 83): CFName( + "mass_fraction_of_cloud_liquid_water_in_air", None, "kg kg-1" + ), + G2Param(2, 0, 1, 84): CFName("mass_fraction_of_cloud_ice_in_air", None, "kg kg-1"), + G2Param(2, 0, 2, 0): CFName("wind_from_direction", None, "degrees"), + G2Param(2, 0, 2, 1): CFName("wind_speed", None, "m s-1"), + G2Param(2, 0, 2, 2): CFName("x_wind", None, "m s-1"), + G2Param(2, 0, 2, 3): CFName("y_wind", None, "m s-1"), + G2Param(2, 0, 2, 8): CFName("lagrangian_tendency_of_air_pressure", None, "Pa s-1"), + G2Param(2, 0, 2, 9): CFName( + "upward_air_velocity", "Vertical velocity (geometric)", "m s-1" + ), + G2Param(2, 0, 2, 10): CFName("atmosphere_absolute_vorticity", None, "s-1"), + G2Param(2, 0, 2, 12): CFName( + "atmosphere_relative_vorticity", "Relative vorticity", "s-1" + ), + G2Param(2, 0, 2, 14): CFName(None, "ertel_potential_velocity", "K m2 kg-1 s-1"), + G2Param(2, 0, 2, 22): CFName("wind_speed_of_gust", None, "m s-1"), + G2Param(2, 0, 3, 0): CFName("air_pressure", None, "Pa"), + G2Param(2, 0, 3, 1): CFName("air_pressure_at_sea_level", None, "Pa"), + G2Param(2, 0, 3, 3): CFName(None, "icao_standard_atmosphere_reference_height", "m"), + G2Param(2, 0, 3, 4): CFName("geopotential", None, "m2 s-2"), + G2Param(2, 0, 3, 5): CFName("geopotential_height", None, "m"), + G2Param(2, 0, 3, 6): CFName("altitude", None, "m"), + G2Param(2, 0, 3, 9): CFName("geopotential_height_anomaly", None, "m"), + G2Param(2, 0, 4, 7): CFName( + "surface_downwelling_shortwave_flux_in_air", None, "W m-2" + ), + G2Param(2, 0, 4, 9): CFName("surface_net_downward_shortwave_flux", None, "W m-2"), + G2Param(2, 0, 5, 3): CFName( + "surface_downwelling_longwave_flux_in_air", None, "W m-2" + ), + G2Param(2, 0, 5, 4): CFName( + "toa_outgoing_longwave_flux", "Upward long-wave radiation flux", "W m-2" + ), + G2Param(2, 0, 5, 5): CFName("surface_net_downward_longwave_flux", None, "W m-2"), + G2Param(2, 0, 6, 1): CFName( + None, "cloud_area_fraction_assuming_maximum_random_overlap", "1" + ), + G2Param(2, 0, 6, 3): CFName("low_type_cloud_area_fraction", None, "%"), + G2Param(2, 0, 6, 4): CFName("medium_type_cloud_area_fraction", None, "%"), + G2Param(2, 0, 6, 5): CFName("high_type_cloud_area_fraction", None, "%"), + G2Param(2, 0, 6, 6): CFName( + "atmosphere_mass_content_of_cloud_liquid_water", None, "kg m-2" + ), + G2Param(2, 0, 6, 7): CFName("cloud_area_fraction_in_atmosphere_layer", None, "%"), + G2Param(2, 0, 6, 25): CFName(None, "WAFC_CB_horizontal_extent", "1"), + G2Param(2, 0, 6, 26): CFName(None, "WAFC_ICAO_height_at_cloud_base", "m"), + G2Param(2, 0, 6, 27): CFName(None, "WAFC_ICAO_height_at_cloud_top", "m"), + G2Param(2, 0, 7, 6): CFName( + "atmosphere_specific_convective_available_potential_energy", None, "J kg-1" + ), + G2Param(2, 0, 7, 7): CFName(None, "convective_inhibition", "J kg-1"), + G2Param(2, 0, 7, 8): CFName(None, "storm_relative_helicity", "J kg-1"), + G2Param(2, 0, 14, 0): CFName("atmosphere_mole_content_of_ozone", None, "Dobson"), + G2Param(2, 0, 19, 1): CFName(None, "grib_physical_atmosphere_albedo", "%"), + G2Param(2, 0, 19, 20): CFName(None, "WAFC_icing_potential", "1"), + G2Param(2, 0, 19, 21): CFName(None, "WAFC_in-cloud_turb_potential", "1"), + G2Param(2, 0, 19, 22): CFName(None, "WAFC_CAT_potential", "1"), + G2Param(2, 2, 0, 0): CFName("land_binary_mask", None, "1"), + G2Param(2, 2, 0, 0): CFName("land_area_fraction", None, "1"), + G2Param(2, 2, 0, 1): CFName("surface_roughness_length", None, "m"), + G2Param(2, 2, 0, 2): CFName("soil_temperature", None, "K"), + G2Param(2, 2, 0, 3): CFName( + "soil_moisture_content", "Soil moisture content", "kg m-2" + ), + G2Param(2, 2, 0, 7): CFName("surface_altitude", None, "m"), + G2Param(2, 2, 0, 22): CFName("moisture_content_of_soil_layer", None, "kg m-2"), + G2Param(2, 2, 0, 34): CFName("surface_runoff_flux", None, "kg m-2 s-1"), + G2Param(2, 10, 1, 2): CFName("sea_water_x_velocity", None, "m s-1"), + G2Param(2, 10, 1, 3): CFName("sea_water_y_velocity", None, "m s-1"), + G2Param(2, 10, 2, 0): CFName("sea_ice_area_fraction", None, "1"), + G2Param(2, 10, 3, 0): CFName("sea_surface_temperature", None, "K"), +} CF_CONSTRAINED_TO_GRIB1_LOCAL = { - (CFName('air_temperature', None, 'K'), DimensionCoordinate('height', 'm', (2,))): G1LocalParam(1, 128, 98, 167), - (CFName('dew_point_temperature', None, 'K'), DimensionCoordinate('height', 'm', (2,))): G1LocalParam(1, 128, 98, 168), - (CFName('x_wind', None, 'm s-1'), DimensionCoordinate('height', 'm', (10,))): G1LocalParam(1, 128, 98, 165), - (CFName('y_wind', None, 'm s-1'), DimensionCoordinate('height', 'm', (10,))): G1LocalParam(1, 128, 98, 166), - } + ( + CFName("air_temperature", None, "K"), + DimensionCoordinate("height", "m", (2,)), + ): G1LocalParam(1, 128, 98, 167), + ( + CFName("dew_point_temperature", None, "K"), + DimensionCoordinate("height", "m", (2,)), + ): G1LocalParam(1, 128, 98, 168), + ( + CFName("x_wind", None, "m s-1"), + DimensionCoordinate("height", "m", (10,)), + ): G1LocalParam(1, 128, 98, 165), + ( + CFName("y_wind", None, "m s-1"), + DimensionCoordinate("height", "m", (10,)), + ): G1LocalParam(1, 128, 98, 166), +} CF_TO_GRIB1_LOCAL = { - CFName(None, 'grib_physical_atmosphere_albedo', '1'): G1LocalParam(1, 128, 98, 174), - CFName(None, 'grib_skin_temperature', 'K'): G1LocalParam(1, 128, 98, 235), - CFName('air_pressure_at_sea_level', None, 'Pa'): G1LocalParam(1, 128, 98, 151), - CFName('air_temperature', None, 'K'): G1LocalParam(1, 128, 98, 130), - CFName('atmosphere_specific_convective_available_potential_energy', None, 'J kg-1'): G1LocalParam(1, 128, 98, 59), - CFName('cloud_area_fraction', None, '1'): G1LocalParam(1, 128, 98, 164), - CFName('geopotential', None, 'm2 s-2'): G1LocalParam(1, 128, 98, 129), - CFName('high_type_cloud_area_fraction', None, '1'): G1LocalParam(1, 128, 98, 188), - CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'): G1LocalParam(1, 128, 98, 135), - CFName('low_type_cloud_area_fraction', None, '1'): G1LocalParam(1, 128, 98, 186), - CFName('medium_type_cloud_area_fraction', None, '1'): G1LocalParam(1, 128, 98, 187), - CFName('relative_humidity', None, '%'): G1LocalParam(1, 128, 98, 157), - CFName('sea_ice_area_fraction', None, '1'): G1LocalParam(1, 128, 98, 31), - CFName('sea_surface_temperature', None, 'K'): G1LocalParam(1, 128, 98, 34), - CFName('surface_roughness_length', None, 'm'): G1LocalParam(1, 128, 98, 173), - CFName('thickness_of_snowfall_amount', None, 'm'): G1LocalParam(1, 128, 98, 141), - CFName('x_wind', None, 'm s-1'): G1LocalParam(1, 128, 98, 131), - CFName('y_wind', None, 'm s-1'): G1LocalParam(1, 128, 98, 132), - } + CFName(None, "grib_physical_atmosphere_albedo", "1"): G1LocalParam(1, 128, 98, 174), + CFName(None, "grib_skin_temperature", "K"): G1LocalParam(1, 128, 98, 235), + CFName("air_pressure_at_sea_level", None, "Pa"): G1LocalParam(1, 128, 98, 151), + CFName("air_temperature", None, "K"): G1LocalParam(1, 128, 98, 130), + CFName( + "atmosphere_specific_convective_available_potential_energy", None, "J kg-1" + ): G1LocalParam(1, 128, 98, 59), + CFName("cloud_area_fraction", None, "1"): G1LocalParam(1, 128, 98, 164), + CFName("geopotential", None, "m2 s-2"): G1LocalParam(1, 128, 98, 129), + CFName("high_type_cloud_area_fraction", None, "1"): G1LocalParam(1, 128, 98, 188), + CFName("lagrangian_tendency_of_air_pressure", None, "Pa s-1"): G1LocalParam( + 1, 128, 98, 135 + ), + CFName("low_type_cloud_area_fraction", None, "1"): G1LocalParam(1, 128, 98, 186), + CFName("medium_type_cloud_area_fraction", None, "1"): G1LocalParam(1, 128, 98, 187), + CFName("relative_humidity", None, "%"): G1LocalParam(1, 128, 98, 157), + CFName("sea_ice_area_fraction", None, "1"): G1LocalParam(1, 128, 98, 31), + CFName("sea_surface_temperature", None, "K"): G1LocalParam(1, 128, 98, 34), + CFName("surface_roughness_length", None, "m"): G1LocalParam(1, 128, 98, 173), + CFName("thickness_of_snowfall_amount", None, "m"): G1LocalParam(1, 128, 98, 141), + CFName("x_wind", None, "m s-1"): G1LocalParam(1, 128, 98, 131), + CFName("y_wind", None, "m s-1"): G1LocalParam(1, 128, 98, 132), +} CF_TO_GRIB2 = { - CFName(None, 'WAFC_CAT_potential', '1'): G2Param(2, 0, 19, 22), - CFName(None, 'WAFC_CB_horizontal_extent', '1'): G2Param(2, 0, 6, 25), - CFName(None, 'WAFC_ICAO_height_at_cloud_base', 'm'): G2Param(2, 0, 6, 26), - CFName(None, 'WAFC_ICAO_height_at_cloud_top', 'm'): G2Param(2, 0, 6, 27), - CFName(None, 'WAFC_icing_potential', '1'): G2Param(2, 0, 19, 20), - CFName(None, 'WAFC_in-cloud_turb_potential', '1'): G2Param(2, 0, 19, 21), - CFName(None, 'cloud_area_fraction_assuming_maximum_random_overlap', '1'): G2Param(2, 0, 6, 1), - CFName(None, 'cloud_mixing_ratio', 'kg kg-1'): G2Param(2, 0, 1, 22), - CFName(None, 'convective_inhibition', 'J kg-1'): G2Param(2, 0, 7, 7), - CFName(None, 'ertel_potential_velocity', 'K m2 kg-1 s-1'): G2Param(2, 0, 2, 14), - CFName(None, 'grib_physical_atmosphere_albedo', '%'): G2Param(2, 0, 19, 1), - CFName(None, 'icao_standard_atmosphere_reference_height', 'm'): G2Param(2, 0, 3, 3), - CFName(None, 'precipitable_water', 'kg m-2'): G2Param(2, 0, 1, 3), - CFName(None, 'storm_relative_helicity', 'J kg-1'): G2Param(2, 0, 7, 8), - CFName('air_potential_temperature', None, 'K'): G2Param(2, 0, 0, 2), - CFName('air_pressure', None, 'Pa'): G2Param(2, 0, 3, 0), - CFName('air_pressure_at_sea_level', None, 'Pa'): G2Param(2, 0, 3, 0), - CFName('air_pressure_at_sea_level', None, 'Pa'): G2Param(2, 0, 3, 1), - CFName('air_temperature', None, 'K'): G2Param(2, 0, 0, 0), - CFName('altitude', None, 'm'): G2Param(2, 0, 3, 6), - CFName('atmosphere_absolute_vorticity', None, 's-1'): G2Param(2, 0, 2, 10), - CFName('atmosphere_mass_content_of_cloud_liquid_water', None, 'kg m-2'): G2Param(2, 0, 6, 6), - CFName('atmosphere_mass_content_of_water', None, 'kg m-2'): G2Param(2, 0, 1, 51), - CFName('atmosphere_mass_content_of_water_vapor', None, 'kg m-2'): G2Param(2, 0, 1, 64), - CFName('atmosphere_mole_content_of_ozone', None, 'Dobson'): G2Param(2, 0, 14, 0), - CFName('atmosphere_relative_vorticity', None, 's-1'): G2Param(2, 0, 2, 12), - CFName('atmosphere_specific_convective_available_potential_energy', None, 'J kg-1'): G2Param(2, 0, 7, 6), - CFName('convective_rainfall_amount', None, 'kg m-2'): G2Param(2, 0, 1, 10), - CFName('convective_rainfall_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 37), - CFName('convective_snowfall_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 58), - CFName('cloud_area_fraction_in_atmosphere_layer', None, '%'): G2Param(2, 0, 6, 7), - CFName('dew_point_temperature', None, 'K'): G2Param(2, 0, 0, 6), - CFName('geopotential', None, 'm2 s-2'): G2Param(2, 0, 3, 4), - CFName('geopotential_height', None, 'm'): G2Param(2, 0, 3, 5), - CFName('geopotential_height_anomaly', None, 'm'): G2Param(2, 0, 3, 9), - CFName('high_type_cloud_area_fraction', None, '%'): G2Param(2, 0, 6, 5), - CFName('humidity_mixing_ratio', None, 'kg kg-1'): G2Param(2, 0, 1, 2), - CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'): G2Param(2, 0, 2, 8), - CFName('land_area_fraction', None, '1'): G2Param(2, 2, 0, 0), - CFName('land_binary_mask', None, '1'): G2Param(2, 2, 0, 0), - CFName('liquid_water_content_of_surface_snow', None, 'kg m-2'): G2Param(2, 0, 1, 13), - CFName('low_type_cloud_area_fraction', None, '%'): G2Param(2, 0, 6, 3), - CFName('mass_fraction_of_cloud_ice_in_air', None, 'kg kg-1'): G2Param(2, 0, 1, 84), - CFName('mass_fraction_of_cloud_liquid_water_in_air', None, 'kg kg-1'): G2Param(2, 0, 1, 83), - CFName('medium_type_cloud_area_fraction', None, '%'): G2Param(2, 0, 6, 4), - CFName('moisture_content_of_soil_layer', None, 'kg m-2'): G2Param(2, 2, 0, 22), - CFName('precipitation_amount', None, 'kg m-2'): G2Param(2, 0, 1, 49), - CFName('precipitation_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 7), - CFName('relative_humidity', None, '%'): G2Param(2, 0, 1, 1), - CFName('sea_ice_area_fraction', None, '1'): G2Param(2, 10, 2, 0), - CFName('sea_surface_temperature', None, 'K'): G2Param(2, 10, 3, 0), - CFName('sea_water_x_velocity', None, 'm s-1'): G2Param(2, 10, 1, 2), - CFName('sea_water_y_velocity', None, 'm s-1'): G2Param(2, 10, 1, 3), - CFName('snowfall_amount', None, 'kg m-2'): G2Param(2, 0, 1, 60), - CFName('snowfall_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 53), - CFName('soil_moisture_content', None, 'kg m-2'): G2Param(2, 2, 0, 3), - CFName('soil_temperature', None, 'K'): G2Param(2, 2, 0, 2), - CFName('specific_humidity', None, 'kg kg-1'): G2Param(2, 0, 1, 0), - CFName('stratiform_rainfall_amount', None, 'kg m-2'): G2Param(2, 0, 1, 9), - CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 77), - CFName('stratiform_snowfall_amount', None, 'kg m-2'): G2Param(2, 0, 1, 15), - CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'): G2Param(2, 0, 1, 59), - CFName('surface_air_pressure', None, 'Pa'): G2Param(2, 0, 3, 0), - CFName('surface_altitude', None, 'm'): G2Param(2, 2, 0, 7), - CFName('surface_downwelling_longwave_flux_in_air', None, 'W m-2'): G2Param(2, 0, 5, 3), - CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'): G2Param(2, 0, 4, 7), - CFName('surface_net_downward_longwave_flux', None, 'W m-2'): G2Param(2, 0, 5, 5), - CFName('surface_net_downward_shortwave_flux', None, 'W m-2'): G2Param(2, 0, 4, 9), - CFName('surface_roughness_length', None, 'm'): G2Param(2, 2, 0, 1), - CFName('surface_runoff_flux', None, 'kg m-2 s-1'): G2Param(2, 2, 0, 34), - CFName('surface_temperature', None, 'K'): G2Param(2, 0, 0, 17), - CFName('surface_upward_latent_heat_flux', None, 'W m-2'): G2Param(2, 0, 0, 10), - CFName('surface_upward_sensible_heat_flux', None, 'W m-2'): G2Param(2, 0, 0, 11), - CFName('toa_outgoing_longwave_flux', None, 'W m-2'): G2Param(2, 0, 5, 4), - CFName('thickness_of_snowfall_amount', None, 'm'): G2Param(2, 0, 1, 11), - CFName('upward_air_velocity', None, 'm s-1'): G2Param(2, 0, 2, 9), - CFName('wet_bulb_potential_temperature', None, 'K'): G2Param(2, 0, 0, 32), - CFName('wind_from_direction', None, 'degrees'): G2Param(2, 0, 2, 0), - CFName('wind_speed', None, 'm s-1'): G2Param(2, 0, 2, 1), - CFName('wind_speed_of_gust', None, 'm s-1'): G2Param(2, 0, 2, 22), - CFName('x_wind', None, 'm s-1'): G2Param(2, 0, 2, 2), - CFName('y_wind', None, 'm s-1'): G2Param(2, 0, 2, 3), - } + CFName(None, "WAFC_CAT_potential", "1"): G2Param(2, 0, 19, 22), + CFName(None, "WAFC_CB_horizontal_extent", "1"): G2Param(2, 0, 6, 25), + CFName(None, "WAFC_ICAO_height_at_cloud_base", "m"): G2Param(2, 0, 6, 26), + CFName(None, "WAFC_ICAO_height_at_cloud_top", "m"): G2Param(2, 0, 6, 27), + CFName(None, "WAFC_icing_potential", "1"): G2Param(2, 0, 19, 20), + CFName(None, "WAFC_in-cloud_turb_potential", "1"): G2Param(2, 0, 19, 21), + CFName(None, "cloud_area_fraction_assuming_maximum_random_overlap", "1"): G2Param( + 2, 0, 6, 1 + ), + CFName(None, "cloud_mixing_ratio", "kg kg-1"): G2Param(2, 0, 1, 22), + CFName(None, "convective_inhibition", "J kg-1"): G2Param(2, 0, 7, 7), + CFName(None, "ertel_potential_velocity", "K m2 kg-1 s-1"): G2Param(2, 0, 2, 14), + CFName(None, "grib_physical_atmosphere_albedo", "%"): G2Param(2, 0, 19, 1), + CFName(None, "icao_standard_atmosphere_reference_height", "m"): G2Param(2, 0, 3, 3), + CFName(None, "precipitable_water", "kg m-2"): G2Param(2, 0, 1, 3), + CFName(None, "storm_relative_helicity", "J kg-1"): G2Param(2, 0, 7, 8), + CFName("air_potential_temperature", None, "K"): G2Param(2, 0, 0, 2), + CFName("air_pressure", None, "Pa"): G2Param(2, 0, 3, 0), + CFName("air_pressure_at_sea_level", None, "Pa"): G2Param(2, 0, 3, 0), + CFName("air_pressure_at_sea_level", None, "Pa"): G2Param(2, 0, 3, 1), + CFName("air_temperature", None, "K"): G2Param(2, 0, 0, 0), + CFName("altitude", None, "m"): G2Param(2, 0, 3, 6), + CFName("atmosphere_absolute_vorticity", None, "s-1"): G2Param(2, 0, 2, 10), + CFName("atmosphere_mass_content_of_cloud_liquid_water", None, "kg m-2"): G2Param( + 2, 0, 6, 6 + ), + CFName("atmosphere_mass_content_of_water", None, "kg m-2"): G2Param(2, 0, 1, 51), + CFName("atmosphere_mass_content_of_water_vapor", None, "kg m-2"): G2Param( + 2, 0, 1, 64 + ), + CFName("atmosphere_mole_content_of_ozone", None, "Dobson"): G2Param(2, 0, 14, 0), + CFName("atmosphere_relative_vorticity", None, "s-1"): G2Param(2, 0, 2, 12), + CFName( + "atmosphere_specific_convective_available_potential_energy", None, "J kg-1" + ): G2Param(2, 0, 7, 6), + CFName("convective_rainfall_amount", None, "kg m-2"): G2Param(2, 0, 1, 10), + CFName("convective_rainfall_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 37), + CFName("convective_snowfall_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 58), + CFName("cloud_area_fraction_in_atmosphere_layer", None, "%"): G2Param(2, 0, 6, 7), + CFName("dew_point_temperature", None, "K"): G2Param(2, 0, 0, 6), + CFName("geopotential", None, "m2 s-2"): G2Param(2, 0, 3, 4), + CFName("geopotential_height", None, "m"): G2Param(2, 0, 3, 5), + CFName("geopotential_height_anomaly", None, "m"): G2Param(2, 0, 3, 9), + CFName("high_type_cloud_area_fraction", None, "%"): G2Param(2, 0, 6, 5), + CFName("humidity_mixing_ratio", None, "kg kg-1"): G2Param(2, 0, 1, 2), + CFName("lagrangian_tendency_of_air_pressure", None, "Pa s-1"): G2Param(2, 0, 2, 8), + CFName("land_area_fraction", None, "1"): G2Param(2, 2, 0, 0), + CFName("land_binary_mask", None, "1"): G2Param(2, 2, 0, 0), + CFName("liquid_water_content_of_surface_snow", None, "kg m-2"): G2Param( + 2, 0, 1, 13 + ), + CFName("low_type_cloud_area_fraction", None, "%"): G2Param(2, 0, 6, 3), + CFName("mass_fraction_of_cloud_ice_in_air", None, "kg kg-1"): G2Param(2, 0, 1, 84), + CFName("mass_fraction_of_cloud_liquid_water_in_air", None, "kg kg-1"): G2Param( + 2, 0, 1, 83 + ), + CFName("medium_type_cloud_area_fraction", None, "%"): G2Param(2, 0, 6, 4), + CFName("moisture_content_of_soil_layer", None, "kg m-2"): G2Param(2, 2, 0, 22), + CFName("precipitation_amount", None, "kg m-2"): G2Param(2, 0, 1, 49), + CFName("precipitation_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 7), + CFName("relative_humidity", None, "%"): G2Param(2, 0, 1, 1), + CFName("sea_ice_area_fraction", None, "1"): G2Param(2, 10, 2, 0), + CFName("sea_surface_temperature", None, "K"): G2Param(2, 10, 3, 0), + CFName("sea_water_x_velocity", None, "m s-1"): G2Param(2, 10, 1, 2), + CFName("sea_water_y_velocity", None, "m s-1"): G2Param(2, 10, 1, 3), + CFName("snowfall_amount", None, "kg m-2"): G2Param(2, 0, 1, 60), + CFName("snowfall_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 53), + CFName("soil_moisture_content", None, "kg m-2"): G2Param(2, 2, 0, 3), + CFName("soil_temperature", None, "K"): G2Param(2, 2, 0, 2), + CFName("specific_humidity", None, "kg kg-1"): G2Param(2, 0, 1, 0), + CFName("stratiform_rainfall_amount", None, "kg m-2"): G2Param(2, 0, 1, 9), + CFName("stratiform_rainfall_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 77), + CFName("stratiform_snowfall_amount", None, "kg m-2"): G2Param(2, 0, 1, 15), + CFName("stratiform_snowfall_flux", None, "kg m-2 s-1"): G2Param(2, 0, 1, 59), + CFName("surface_air_pressure", None, "Pa"): G2Param(2, 0, 3, 0), + CFName("surface_altitude", None, "m"): G2Param(2, 2, 0, 7), + CFName("surface_downwelling_longwave_flux_in_air", None, "W m-2"): G2Param( + 2, 0, 5, 3 + ), + CFName("surface_downwelling_shortwave_flux_in_air", None, "W m-2"): G2Param( + 2, 0, 4, 7 + ), + CFName("surface_net_downward_longwave_flux", None, "W m-2"): G2Param(2, 0, 5, 5), + CFName("surface_net_downward_shortwave_flux", None, "W m-2"): G2Param(2, 0, 4, 9), + CFName("surface_roughness_length", None, "m"): G2Param(2, 2, 0, 1), + CFName("surface_runoff_flux", None, "kg m-2 s-1"): G2Param(2, 2, 0, 34), + CFName("surface_temperature", None, "K"): G2Param(2, 0, 0, 17), + CFName("surface_upward_latent_heat_flux", None, "W m-2"): G2Param(2, 0, 0, 10), + CFName("surface_upward_sensible_heat_flux", None, "W m-2"): G2Param(2, 0, 0, 11), + CFName("toa_outgoing_longwave_flux", None, "W m-2"): G2Param(2, 0, 5, 4), + CFName("thickness_of_snowfall_amount", None, "m"): G2Param(2, 0, 1, 11), + CFName("upward_air_velocity", None, "m s-1"): G2Param(2, 0, 2, 9), + CFName("wet_bulb_potential_temperature", None, "K"): G2Param(2, 0, 0, 32), + CFName("wind_from_direction", None, "degrees"): G2Param(2, 0, 2, 0), + CFName("wind_speed", None, "m s-1"): G2Param(2, 0, 2, 1), + CFName("wind_speed_of_gust", None, "m s-1"): G2Param(2, 0, 2, 22), + CFName("x_wind", None, "m s-1"): G2Param(2, 0, 2, 2), + CFName("y_wind", None, "m s-1"): G2Param(2, 0, 2, 3), +} diff --git a/iris_grib/_iris_mercator_support.py b/iris_grib/_iris_mercator_support.py deleted file mode 100644 index 6d23831fc..000000000 --- a/iris_grib/_iris_mercator_support.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright iris-grib contributors -# -# This file is part of iris-grib and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -""" -Temporary module to check for the extended Mercator class in Iris, -which iris-grib requires for its Mercator support. - -""" - -from packaging.version import Version - -import iris - - -def confirm_extended_mercator_supported(): - # Check that Iris version is at least 2.1, required for 'standard_parallel' - # support in the Mercator coord-system. - # This is a temporary fix allowing us to state Iris>=2.0 as a dependency, - # required for this release because Iris 2.1 is not yet available. - iris_version = Version(iris.__version__) - min_mercator_version = Version('2.1.0') - if iris_version < min_mercator_version: - msg = 'Support for Mercator projections requires Iris version >= {}' - raise ValueError(msg.format(min_mercator_version)) diff --git a/iris_grib/_load_convert.py b/iris_grib/_load_convert.py index df4ba7e41..c4ea31ec7 100644 --- a/iris_grib/_load_convert.py +++ b/iris_grib/_load_convert.py @@ -26,11 +26,14 @@ from iris.exceptions import TranslationError from . import grib_phenom_translation as itranslation from .grib_phenom_translation import GRIBCode -from iris.fileformats.rules import ConversionMetadata, Factory, Reference, \ - ReferenceTarget +from iris.fileformats.rules import ( + ConversionMetadata, + Factory, + Reference, + ReferenceTarget, +) from iris.util import _is_circular -from ._iris_mercator_support import confirm_extended_mercator_supported from ._grib1_load_rules import grib1_convert @@ -38,65 +41,58 @@ __all__ = ["convert"] -options = Namespace(warn_on_unsupported=False, - support_hindcast_values=True) +options = Namespace(warn_on_unsupported=False, support_hindcast_values=True) -ScanningMode = namedtuple('ScanningMode', ['i_negative', - 'j_positive', - 'j_consecutive', - 'i_alternative']) +ScanningMode = namedtuple( + "ScanningMode", ["i_negative", "j_positive", "j_consecutive", "i_alternative"] +) -ProjectionCentre = namedtuple('ProjectionCentre', - ['south_pole_on_projection_plane', - 'bipolar_and_symmetric']) +ProjectionCentre = namedtuple( + "ProjectionCentre", ["south_pole_on_projection_plane", "bipolar_and_symmetric"] +) -ResolutionFlags = namedtuple('ResolutionFlags', - ['i_increments_given', - 'j_increments_given', - 'uv_resolved']) +ResolutionFlags = namedtuple( + "ResolutionFlags", ["i_increments_given", "j_increments_given", "uv_resolved"] +) -FixedSurface = namedtuple('FixedSurface', ['standard_name', - 'long_name', - 'units']) +FixedSurface = namedtuple("FixedSurface", ["standard_name", "long_name", "units"]) -InterpolationParameters = namedtuple('InterpolationParameters', - ['interpolation_type', - 'statistical_process', - 'number_of_points_used']) +InterpolationParameters = namedtuple( + "InterpolationParameters", + ["interpolation_type", "statistical_process", "number_of_points_used"], +) # Regulations 92.1.6. _GRID_ACCURACY_IN_DEGREES = 1e-6 # 1/1,000,000 of a degree # Reference Common Code Table C-1. -_CENTRES = { - 'ecmf': 'European Centre for Medium Range Weather Forecasts' -} +_CENTRES = {"ecmf": "European Centre for Medium Range Weather Forecasts"} # Reference Code Table 1.0 _CODE_TABLES_MISSING = 255 # UDUNITS-2 units time string. Reference GRIB2 Code Table 4.4. _TIME_RANGE_UNITS = { - 0: 'minutes', - 1: 'hours', - 2: 'days', + 0: "minutes", + 1: "hours", + 2: "days", # 3: 'months', Unsupported # 4: 'years', Unsupported # 5: '10 years', Unsupported # 6: '30 years', Unsupported # 7: '100 years', Unsupported # 8-9 Reserved - 10: '3 hours', - 11: '6 hours', - 12: '12 hours', - 13: 'seconds' + 10: "3 hours", + 11: "6 hours", + 12: "12 hours", + 13: "seconds", } # Regulation 92.1.4 -_TIME_RANGE_MISSING = 2 ** 32 - 1 +_TIME_RANGE_MISSING = 2**32 - 1 # Reference Code Table 4.5. _FIXED_SURFACE = { - 100: FixedSurface(None, 'pressure', 'Pa'), # Isobaric surface - 103: FixedSurface(None, 'height', 'm') # Height level above ground + 100: FixedSurface(None, "pressure", "Pa"), # Isobaric surface + 103: FixedSurface(None, "height", "m"), # Height level above ground } _TYPE_OF_FIXED_SURFACE_MISSING = 255 @@ -106,16 +102,16 @@ # Reference Code Table 4.10. _STATISTIC_TYPE_NAMES = { - 0: 'mean', - 1: 'sum', - 2: 'maximum', - 3: 'minimum', - 6: 'standard_deviation' + 0: "mean", + 1: "sum", + 2: "maximum", + 3: "minimum", + 6: "standard_deviation", } # Reference Code Table 4.11. _STATISTIC_TYPE_OF_TIME_INTERVAL = { - 2: 'same start time of forecast, forecast time is incremented' + 2: "same start time of forecast, forecast time is incremented" } # NOTE: Our test data contains the value 2, which is all we currently support. # The exact interpretation of this is still unclear. @@ -126,18 +122,17 @@ # InterpolationParameters(spatial process descriptor, statistical process # (octet 35), number of points used in interpolation (octet 37)) _SPATIAL_PROCESSING_TYPES = { - 0: InterpolationParameters('No interpolation', 'cell_method', 0), - 1: InterpolationParameters('Bilinear interpolation', None, 4), - 2: InterpolationParameters('Bicubic interpolation', None, 4), - 3: InterpolationParameters('Nearest neighbour interpolation', None, 1), - 4: InterpolationParameters('Budget interpolation', None, 4), - 5: InterpolationParameters('Spectral interpolation', None, 4), - 6: InterpolationParameters('Neighbour-budget interpolation', None, 4) + 0: InterpolationParameters("No interpolation", "cell_method", 0), + 1: InterpolationParameters("Bilinear interpolation", None, 4), + 2: InterpolationParameters("Bicubic interpolation", None, 4), + 3: InterpolationParameters("Nearest neighbour interpolation", None, 1), + 4: InterpolationParameters("Budget interpolation", None, 4), + 5: InterpolationParameters("Spectral interpolation", None, 4), + 6: InterpolationParameters("Neighbour-budget interpolation", None, 4), } # Class containing details of a probability analysis. -Probability = namedtuple('Probability', - ('probability_type_name', 'threshold')) +Probability = namedtuple("Probability", ("probability_type_name", "threshold")) # List of grid definition template numbers which use either (i,j) or (x,y) # for (lat,lon) @@ -169,10 +164,12 @@ def unscale(value, factor): is returned. """ + def _unscale(v, f): - return v / 10.0 ** f + return v / 10.0**f if isinstance(value, Iterable) or isinstance(factor, Iterable): + def _masker(item): # This is a small work around for an edge case, which is not # evident in any of our sample GRIB2 messages, where an array @@ -182,7 +179,7 @@ def _masker(item): # value is used, selected from a legacy implementation of iris, # to construct the masked array. The valure is transient, only in # scope for this function. - numerical_mdi = 2 ** 32 - 1 + numerical_mdi = 2**32 - 1 item = [numerical_mdi if i is None else i for i in item] result = ma.masked_equal(item, numerical_mdi) if ma.count_masked(result): @@ -191,6 +188,7 @@ def _masker(item): # for data containing _MDI. Remove transient _MDI value. result.data[result.mask] = 0 return result + value = _masker(value) factor = _masker(factor) result = _unscale(value, factor) @@ -219,9 +217,9 @@ def _hindcast_fix(forecast_time): original_forecast_time = forecast_time forecast_time = -(uft - 2 * HIGHBIT) if options.warn_on_unsupported: - msg = ('Re-interpreting large grib forecastTime ' - 'from {} to {}.'.format(original_forecast_time, - forecast_time)) + msg = "Re-interpreting large grib forecastTime " "from {} to {}.".format( + original_forecast_time, forecast_time + ) warnings.warn(msg) return forecast_time @@ -241,8 +239,8 @@ def fixup_float32_from_int32(value): # to treat an integer 0 as a positive zero. if value < 0: value = 0x80000000 - value - value_as_uint32 = np.array(value, dtype='u4') - value_as_float32 = value_as_uint32.view(dtype='f4') + value_as_uint32 = np.array(value, dtype="u4") + value_as_float32 = value_as_uint32.view(dtype="f4") return float(value_as_float32) @@ -267,6 +265,7 @@ def fixup_int32_from_uint32(value): # ############################################################################### + def reference_time_coord(section): """ Translate section 1 reference time according to its significance. @@ -279,27 +278,37 @@ def reference_time_coord(section): """ # Look-up standard name by significanceOfReferenceTime. - _lookup = {0: 'forecast_reference_time', - 1: 'forecast_reference_time', - 2: 'time', - 3: 'time'} + _lookup = { + 0: "forecast_reference_time", + 1: "forecast_reference_time", + 2: "time", + 3: "time", + } # Calculate the reference time and units. - dt = datetime(section['year'], section['month'], section['day'], - section['hour'], section['minute'], section['second']) + dt = datetime( + section["year"], + section["month"], + section["day"], + section["hour"], + section["minute"], + section["second"], + ) # XXX Defaulting to a Gregorian calendar. # Current GRIBAPI does not cover GRIB Section 1 - Octets 22-nn (optional) # which are part of GRIB spec v12. - unit = Unit('hours since epoch', calendar=CALENDAR_GREGORIAN) + unit = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) point = float(unit.date2num(dt)) # Reference Code Table 1.2. - significanceOfReferenceTime = section['significanceOfReferenceTime'] + significanceOfReferenceTime = section["significanceOfReferenceTime"] standard_name = _lookup.get(significanceOfReferenceTime) if standard_name is None: - msg = 'Identificaton section 1 contains an unsupported significance ' \ - 'of reference time [{}]'.format(significanceOfReferenceTime) + msg = ( + "Identificaton section 1 contains an unsupported significance " + "of reference time [{}]".format(significanceOfReferenceTime) + ) raise TranslationError(msg) # Create the associated reference time of data coordinate. @@ -314,6 +323,7 @@ def reference_time_coord(section): # ############################################################################### + def projection_centre(projectionCentreFlag): """ Translate the projection centre flag bitmask. @@ -331,8 +341,7 @@ def projection_centre(projectionCentreFlag): """ south_pole_on_projection_plane = bool(projectionCentreFlag & 0x80) bipolar_and_symmetric = bool(projectionCentreFlag & 0x40) - return ProjectionCentre(south_pole_on_projection_plane, - bipolar_and_symmetric) + return ProjectionCentre(south_pole_on_projection_plane, bipolar_and_symmetric) def scanning_mode(scanningMode): @@ -356,12 +365,13 @@ def scanning_mode(scanningMode): i_alternative = bool(scanningMode & 0x10) if i_alternative: - msg = 'Grid definition section 3 contains unsupported ' \ - 'alternative row scanning mode' + msg = ( + "Grid definition section 3 contains unsupported " + "alternative row scanning mode" + ) raise TranslationError(msg) - return ScanningMode(i_negative, j_positive, - j_consecutive, i_alternative) + return ScanningMode(i_negative, j_positive, j_consecutive, i_alternative) def resolution_flags(resolutionAndComponentFlags): @@ -417,8 +427,10 @@ def ellipsoid(shapeOfTheEarth, major, minor, radius): """ # Supported shapeOfTheEarth values. if shapeOfTheEarth not in (0, 1, 2, 3, 4, 5, 6, 7): - msg = 'Grid definition section 3 contains an unsupported ' \ - 'shape of the earth [{}]'.format(shapeOfTheEarth) + msg = ( + "Grid definition section 3 contains an unsupported " + "shape of the earth [{}]".format(shapeOfTheEarth) + ) raise TranslationError(msg) if shapeOfTheEarth == 0: @@ -428,8 +440,10 @@ def ellipsoid(shapeOfTheEarth, major, minor, radius): # Earth assumed spherical with radius specified (in m) by # data producer. if ma.is_masked(radius): - msg = 'Ellipsoid for shape of the earth {} requires a' \ - 'radius to be specified.'.format(shapeOfTheEarth) + msg = ( + "Ellipsoid for shape of the earth {} requires a" + "radius to be specified.".format(shapeOfTheEarth) + ) raise ValueError(msg) result = icoord_systems.GeogCS(radius) elif shapeOfTheEarth == 2: @@ -438,12 +452,14 @@ def ellipsoid(shapeOfTheEarth, major, minor, radius): elif shapeOfTheEarth in [3, 7]: # Earth assumed oblate spheroid with major and minor axes # specified (in km)/(in m) by data producer. - emsg_oblate = 'Ellipsoid for shape of the earth [{}] requires a' \ - 'semi-{} axis to be specified.' + emsg_oblate = ( + "Ellipsoid for shape of the earth [{}] requires a" + "semi-{} axis to be specified." + ) if ma.is_masked(major): - raise ValueError(emsg_oblate.format(shapeOfTheEarth, 'major')) + raise ValueError(emsg_oblate.format(shapeOfTheEarth, "major")) if ma.is_masked(minor): - raise ValueError(emsg_oblate.format(shapeOfTheEarth, 'minor')) + raise ValueError(emsg_oblate.format(shapeOfTheEarth, "minor")) # Check whether to convert from km to m. if shapeOfTheEarth == 3: major *= 1000 @@ -451,12 +467,10 @@ def ellipsoid(shapeOfTheEarth, major, minor, radius): result = icoord_systems.GeogCS(major, minor) elif shapeOfTheEarth == 4: # Earth assumed oblate spheroid as defined in IAG-GRS80 model. - result = icoord_systems.GeogCS(6378137, - inverse_flattening=298.257222101) + result = icoord_systems.GeogCS(6378137, inverse_flattening=298.257222101) elif shapeOfTheEarth == 5: # Earth assumed represented by WGS84 (as used by ICAO since 1998). - result = icoord_systems.GeogCS(6378137, - inverse_flattening=298.257223563) + result = icoord_systems.GeogCS(6378137, inverse_flattening=298.257223563) elif shapeOfTheEarth == 6: # Earth assumed spherical with radius of 6 371 229.0m result = icoord_systems.GeogCS(6371229) @@ -477,12 +491,16 @@ def ellipsoid_geometry(section): Tuple containing the major-axis, minor-axis and radius. """ - major = unscale(section['scaledValueOfEarthMajorAxis'], - section['scaleFactorOfEarthMajorAxis']) - minor = unscale(section['scaledValueOfEarthMinorAxis'], - section['scaleFactorOfEarthMinorAxis']) - radius = unscale(section['scaledValueOfRadiusOfSphericalEarth'], - section['scaleFactorOfRadiusOfSphericalEarth']) + major = unscale( + section["scaledValueOfEarthMajorAxis"], section["scaleFactorOfEarthMajorAxis"] + ) + minor = unscale( + section["scaledValueOfEarthMinorAxis"], section["scaleFactorOfEarthMinorAxis"] + ) + radius = unscale( + section["scaledValueOfRadiusOfSphericalEarth"], + section["scaleFactorOfRadiusOfSphericalEarth"], + ) return major, minor, radius @@ -522,50 +540,63 @@ def grid_definition_template_0_and_1(section, metadata, y_name, x_name, cs): """ # Abort if this is a reduced grid, that case isn't handled yet. - if section['numberOfOctectsForNumberOfPoints'] != 0 or \ - section['interpretationOfNumberOfPoints'] != 0: - msg = 'Grid definition section 3 contains unsupported ' \ - 'quasi-regular grid' + if ( + section["numberOfOctectsForNumberOfPoints"] != 0 + or section["interpretationOfNumberOfPoints"] != 0 + ): + msg = "Grid definition section 3 contains unsupported " "quasi-regular grid" raise TranslationError(msg) - scan = scanning_mode(section['scanningMode']) + scan = scanning_mode(section["scanningMode"]) # Set resolution flags - res_flags = resolution_flags(section['resolutionAndComponentFlags']) + res_flags = resolution_flags(section["resolutionAndComponentFlags"]) # Calculate longitude points. - x_inc = (section['iDirectionIncrement'] - if res_flags.i_increments_given - else _calculate_increment(section['longitudeOfFirstGridPoint'], - section['longitudeOfLastGridPoint'], - section['Ni'] - 1, - 360.0 / _GRID_ACCURACY_IN_DEGREES)) + x_inc = ( + section["iDirectionIncrement"] + if res_flags.i_increments_given + else _calculate_increment( + section["longitudeOfFirstGridPoint"], + section["longitudeOfLastGridPoint"], + section["Ni"] - 1, + 360.0 / _GRID_ACCURACY_IN_DEGREES, + ) + ) x_inc *= _GRID_ACCURACY_IN_DEGREES - x_offset = section['longitudeOfFirstGridPoint'] * _GRID_ACCURACY_IN_DEGREES + x_offset = section["longitudeOfFirstGridPoint"] * _GRID_ACCURACY_IN_DEGREES x_direction = -1 if scan.i_negative else 1 - Ni = section['Ni'] + Ni = section["Ni"] x_points = np.arange(Ni, dtype=np.float64) * x_inc * x_direction + x_offset # Determine whether the x-points (in degrees) are circular. circular = _is_circular(x_points, 360.0) # Calculate latitude points. - y_inc = (section['jDirectionIncrement'] - if res_flags.j_increments_given - else _calculate_increment(section['latitudeOfFirstGridPoint'], - section['latitudeOfLastGridPoint'], - section['Nj'] - 1)) + y_inc = ( + section["jDirectionIncrement"] + if res_flags.j_increments_given + else _calculate_increment( + section["latitudeOfFirstGridPoint"], + section["latitudeOfLastGridPoint"], + section["Nj"] - 1, + ) + ) y_inc *= _GRID_ACCURACY_IN_DEGREES - y_offset = section['latitudeOfFirstGridPoint'] * _GRID_ACCURACY_IN_DEGREES + y_offset = section["latitudeOfFirstGridPoint"] * _GRID_ACCURACY_IN_DEGREES y_direction = 1 if scan.j_positive else -1 - Nj = section['Nj'] + Nj = section["Nj"] y_points = np.arange(Nj, dtype=np.float64) * y_inc * y_direction + y_offset # Create the lat/lon coordinates. - y_coord = DimCoord(y_points, standard_name=y_name, units='degrees', - coord_system=cs) - x_coord = DimCoord(x_points, standard_name=x_name, units='degrees', - coord_system=cs, circular=circular) + y_coord = DimCoord(y_points, standard_name=y_name, units="degrees", coord_system=cs) + x_coord = DimCoord( + x_points, + standard_name=x_name, + units="degrees", + coord_system=cs, + circular=circular, + ) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -573,8 +604,8 @@ def grid_definition_template_0_and_1(section, metadata, y_name, x_name, cs): y_dim, x_dim = 1, 0 # Add the lat/lon coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_0(section, metadata): @@ -595,9 +626,8 @@ def grid_definition_template_0(section, metadata): """ # Determine the coordinate system. major, minor, radius = ellipsoid_geometry(section) - cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) - grid_definition_template_0_and_1(section, metadata, - 'latitude', 'longitude', cs) + cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) def grid_definition_template_1(section, metadata): @@ -617,17 +647,17 @@ def grid_definition_template_1(section, metadata): """ # Determine the coordinate system. major, minor, radius = ellipsoid_geometry(section) - south_pole_lat = (section['latitudeOfSouthernPole'] * - _GRID_ACCURACY_IN_DEGREES) - south_pole_lon = (section['longitudeOfSouthernPole'] * - _GRID_ACCURACY_IN_DEGREES) - cs = icoord_systems.RotatedGeogCS(-south_pole_lat, - math.fmod(south_pole_lon + 180, 360), - section['angleOfRotation'], - ellipsoid(section['shapeOfTheEarth'], - major, minor, radius)) - grid_definition_template_0_and_1(section, metadata, - 'grid_latitude', 'grid_longitude', cs) + south_pole_lat = section["latitudeOfSouthernPole"] * _GRID_ACCURACY_IN_DEGREES + south_pole_lon = section["longitudeOfSouthernPole"] * _GRID_ACCURACY_IN_DEGREES + cs = icoord_systems.RotatedGeogCS( + -south_pole_lat, + math.fmod(south_pole_lon + 180, 360), + section["angleOfRotation"], + ellipsoid(section["shapeOfTheEarth"], major, minor, radius), + ) + grid_definition_template_0_and_1( + section, metadata, "grid_latitude", "grid_longitude", cs + ) def grid_definition_template_4_and_5(section, metadata, y_name, x_name, cs): @@ -657,40 +687,44 @@ def grid_definition_template_4_and_5(section, metadata, y_name, x_name, cs): """ # Determine the (variable) units of resolution. - key = 'basicAngleOfTheInitialProductionDomain' + key = "basicAngleOfTheInitialProductionDomain" basicAngleOfTheInitialProductDomain = section[key] - subdivisionsOfBasicAngle = section['subdivisionsOfBasicAngle'] + subdivisionsOfBasicAngle = section["subdivisionsOfBasicAngle"] if basicAngleOfTheInitialProductDomain in [0, _MDI]: - basicAngleOfTheInitialProductDomain = 1. + basicAngleOfTheInitialProductDomain = 1.0 if subdivisionsOfBasicAngle in [0, _MDI]: - subdivisionsOfBasicAngle = 1. / _GRID_ACCURACY_IN_DEGREES + subdivisionsOfBasicAngle = 1.0 / _GRID_ACCURACY_IN_DEGREES resolution = np.float64(basicAngleOfTheInitialProductDomain) resolution /= subdivisionsOfBasicAngle - flags = resolution_flags(section['resolutionAndComponentFlags']) + flags = resolution_flags(section["resolutionAndComponentFlags"]) # Grid Definition Template 3.4. Notes (2). # Flag bits 3-4 are not applicable for this template. if flags.uv_resolved and options.warn_on_unsupported: - msg = 'Unable to translate resolution and component flags.' + msg = "Unable to translate resolution and component flags." warnings.warn(msg) # Calculate the latitude and longitude points. - x_points = np.array(section['longitudes'], dtype=np.float64) * resolution - y_points = np.array(section['latitudes'], dtype=np.float64) * resolution + x_points = np.array(section["longitudes"], dtype=np.float64) * resolution + y_points = np.array(section["latitudes"], dtype=np.float64) * resolution # Determine whether the x-points (in degrees) are circular. circular = _is_circular(x_points, 360.0) # Create the lat/lon coordinates. - y_coord = DimCoord(y_points, standard_name=y_name, units='degrees', - coord_system=cs) - x_coord = DimCoord(x_points, standard_name=x_name, units='degrees', - coord_system=cs, circular=circular) + y_coord = DimCoord(y_points, standard_name=y_name, units="degrees", coord_system=cs) + x_coord = DimCoord( + x_points, + standard_name=x_name, + units="degrees", + coord_system=cs, + circular=circular, + ) - scan = scanning_mode(section['scanningMode']) + scan = scanning_mode(section["scanningMode"]) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -698,8 +732,8 @@ def grid_definition_template_4_and_5(section, metadata, y_name, x_name, cs): y_dim, x_dim = 1, 0 # Add the lat/lon coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_4(section, metadata): @@ -719,9 +753,8 @@ def grid_definition_template_4(section, metadata): """ # Determine the coordinate system. major, minor, radius = ellipsoid_geometry(section) - cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) - grid_definition_template_4_and_5(section, metadata, - 'latitude', 'longitude', cs) + cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) + grid_definition_template_4_and_5(section, metadata, "latitude", "longitude", cs) def grid_definition_template_5(section, metadata): @@ -742,17 +775,17 @@ def grid_definition_template_5(section, metadata): """ # Determine the coordinate system. major, minor, radius = ellipsoid_geometry(section) - south_pole_lat = (section['latitudeOfSouthernPole'] * - _GRID_ACCURACY_IN_DEGREES) - south_pole_lon = (section['longitudeOfSouthernPole'] * - _GRID_ACCURACY_IN_DEGREES) - cs = icoord_systems.RotatedGeogCS(-south_pole_lat, - math.fmod(south_pole_lon + 180, 360), - section['angleOfRotation'], - ellipsoid(section['shapeOfTheEarth'], - major, minor, radius)) - grid_definition_template_4_and_5(section, metadata, - 'grid_latitude', 'grid_longitude', cs) + south_pole_lat = section["latitudeOfSouthernPole"] * _GRID_ACCURACY_IN_DEGREES + south_pole_lon = section["longitudeOfSouthernPole"] * _GRID_ACCURACY_IN_DEGREES + cs = icoord_systems.RotatedGeogCS( + -south_pole_lat, + math.fmod(south_pole_lon + 180, 360), + section["angleOfRotation"], + ellipsoid(section["shapeOfTheEarth"], major, minor, radius), + ) + grid_definition_template_4_and_5( + section, metadata, "grid_latitude", "grid_longitude", cs + ) def grid_definition_template_10(section, metadata): @@ -771,21 +804,16 @@ def grid_definition_template_10(section, metadata): """ major, minor, radius = ellipsoid_geometry(section) - geog_cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) + geog_cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) # standard_parallel is the latitude at which the Mercator projection # intersects the Earth - standard_parallel = section['LaD'] * _GRID_ACCURACY_IN_DEGREES + standard_parallel = section["LaD"] * _GRID_ACCURACY_IN_DEGREES - # Check and raise a more intelligible error, if the Iris version is too old - # to support the Mercator 'standard_parallel' keyword. - confirm_extended_mercator_supported() - cs = icoord_systems.Mercator(standard_parallel=standard_parallel, - ellipsoid=geog_cs) + cs = icoord_systems.Mercator(standard_parallel=standard_parallel, ellipsoid=geog_cs) # Create the X and Y coordinates. - x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, - cs) + x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, cs) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -793,8 +821,8 @@ def grid_definition_template_10(section, metadata): y_dim, x_dim = 1, 0 # Add the X and Y coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_12(section, metadata): @@ -813,27 +841,26 @@ def grid_definition_template_12(section, metadata): """ major, minor, radius = ellipsoid_geometry(section) - geog_cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) + geog_cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) - lat = section['latitudeOfReferencePoint'] * _GRID_ACCURACY_IN_DEGREES - lon = section['longitudeOfReferencePoint'] * _GRID_ACCURACY_IN_DEGREES - scale = section['scaleFactorAtReferencePoint'] + lat = section["latitudeOfReferencePoint"] * _GRID_ACCURACY_IN_DEGREES + lon = section["longitudeOfReferencePoint"] * _GRID_ACCURACY_IN_DEGREES + scale = section["scaleFactorAtReferencePoint"] # Catch bug in ECMWF GRIB API (present at 1.12.1) where the scale # is treated as a signed, 4-byte integer. if isinstance(scale, int): scale = fixup_float32_from_int32(scale) CM_TO_M = 0.01 - easting = section['XR'] * CM_TO_M - northing = section['YR'] * CM_TO_M - cs = icoord_systems.TransverseMercator(lat, lon, easting, northing, - scale, geog_cs) + easting = section["XR"] * CM_TO_M + northing = section["YR"] * CM_TO_M + cs = icoord_systems.TransverseMercator(lat, lon, easting, northing, scale, geog_cs) # Deal with bug in ECMWF GRIB API (present at 1.12.1) where these # values are treated as unsigned, 4-byte integers. - x1 = fixup_int32_from_uint32(section['X1']) - y1 = fixup_int32_from_uint32(section['Y1']) - x2 = fixup_int32_from_uint32(section['X2']) - y2 = fixup_int32_from_uint32(section['Y2']) + x1 = fixup_int32_from_uint32(section["X1"]) + y1 = fixup_int32_from_uint32(section["Y1"]) + x2 = fixup_int32_from_uint32(section["X2"]) + y2 = fixup_int32_from_uint32(section["Y2"]) # Rather unhelpfully this grid definition template seems to be # overspecified, and thus open to inconsistency. But for determining @@ -848,13 +875,14 @@ def check_range(v1, v2, n, d, axis_name): max_last = small + (n - 1) * (d + 1) if not (min_last < large < max_last): message = ( - f'File grid {axis_name} definition inconsistent: ' - f'{v1} to {v2} in {n} steps is incompatible with step-size ' - f'{d} .' + f"File grid {axis_name} definition inconsistent: " + f"{v1} to {v2} in {n} steps is incompatible with step-size " + f"{d} ." ) raise TranslationError(message) - check_range(x1, x2, section['Ni'], section['Di'], 'X') - check_range(y1, y2, section['Nj'], section['Dj'], 'Y') + + check_range(x1, x2, section["Ni"], section["Di"], "X") + check_range(y1, y2, section["Nj"], section["Dj"], "Y") # Further over-specification - the sequence of X1 & X2 is enough to # generate the sequence in the correct direction (also Y1 & Y2). All @@ -862,6 +890,7 @@ def check_range(v1, v2, n, d, axis_name): def validate_scanning(axis: str, stated: bool, encoded: bool): def scan_str(scanning_bool): return "positive" if scanning_bool else "negative" + if stated != encoded: message = ( f"File grid {axis} definition inconsistent: " @@ -869,18 +898,17 @@ def scan_str(scanning_bool): f"direction is {scan_str(encoded)}." ) warnings.warn(message) - scan = scanning_mode(section['scanningMode']) + + scan = scanning_mode(section["scanningMode"]) validate_scanning("X", not scan.i_negative, x1 < x2) validate_scanning("Y", scan.j_positive, y1 < y2) - x_points = np.linspace(x1 * CM_TO_M, x2 * CM_TO_M, section['Ni']) - y_points = np.linspace(y1 * CM_TO_M, y2 * CM_TO_M, section['Nj']) + x_points = np.linspace(x1 * CM_TO_M, x2 * CM_TO_M, section["Ni"]) + y_points = np.linspace(y1 * CM_TO_M, y2 * CM_TO_M, section["Nj"]) # Create the X and Y coordinates. - y_coord = DimCoord(y_points, 'projection_y_coordinate', units='m', - coord_system=cs) - x_coord = DimCoord(x_points, 'projection_x_coordinate', units='m', - coord_system=cs) + y_coord = DimCoord(y_points, "projection_y_coordinate", units="m", coord_system=cs) + x_coord = DimCoord(x_points, "projection_x_coordinate", units="m", coord_system=cs) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -888,8 +916,8 @@ def scan_str(scanning_bool): y_dim, x_dim = 1, 0 # Add the X and Y coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_20(section, metadata): @@ -908,25 +936,28 @@ def grid_definition_template_20(section, metadata): """ major, minor, radius = ellipsoid_geometry(section) - geog_cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) + geog_cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) - proj_centre = projection_centre(section['projectionCentreFlag']) + proj_centre = projection_centre(section["projectionCentreFlag"]) if proj_centre.bipolar_and_symmetric: - raise TranslationError('Bipolar and symmetric polar stereo projections' - ' are not supported by the ' - 'grid_definition_template_20 translation.') + raise TranslationError( + "Bipolar and symmetric polar stereo projections" + " are not supported by the " + "grid_definition_template_20 translation." + ) if proj_centre.south_pole_on_projection_plane: - central_lat = -90. + central_lat = -90.0 else: - central_lat = 90. - central_lon = section['orientationOfTheGrid'] * _GRID_ACCURACY_IN_DEGREES - true_scale_lat = section['LaD'] * _GRID_ACCURACY_IN_DEGREES - cs = icoord_systems.Stereographic(central_lat=central_lat, - central_lon=central_lon, - true_scale_lat=true_scale_lat, - ellipsoid=geog_cs) - x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, - cs) + central_lat = 90.0 + central_lon = section["orientationOfTheGrid"] * _GRID_ACCURACY_IN_DEGREES + true_scale_lat = section["LaD"] * _GRID_ACCURACY_IN_DEGREES + cs = icoord_systems.Stereographic( + central_lat=central_lat, + central_lon=central_lon, + true_scale_lat=true_scale_lat, + ellipsoid=geog_cs, + ) + x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, cs) # Determine the order of the dimensions. y_dim, x_dim = 0, 1 @@ -934,8 +965,8 @@ def grid_definition_template_20(section, metadata): y_dim, x_dim = 1, 0 # Add the projection coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def _calculate_proj_coords_from_grid_lengths(section, cs): @@ -946,30 +977,29 @@ def _calculate_proj_coords_from_grid_lengths(section, cs): # Conversion factor millimetres to metres mm_to_m = 1e-3 - if section['gridDefinitionTemplateNumber'] in _XYGRIDLENGTH_GDT_NUMBERS: - if section['gridDefinitionTemplateNumber'] == 140: - dx = section['xDirectionGridLengthInMillimetres'] - dy = section['yDirectionGridLengthInMillimetres'] - nx = section['numberOfPointsAlongXAxis'] - ny = section['numberOfPointsAlongYAxis'] + if section["gridDefinitionTemplateNumber"] in _XYGRIDLENGTH_GDT_NUMBERS: + if section["gridDefinitionTemplateNumber"] == 140: + dx = section["xDirectionGridLengthInMillimetres"] + dy = section["yDirectionGridLengthInMillimetres"] + nx = section["numberOfPointsAlongXAxis"] + ny = section["numberOfPointsAlongYAxis"] else: - dx = section['Dx'] - dy = section['Dy'] - nx = section['Nx'] - ny = section['Ny'] - elif section['gridDefinitionTemplateNumber'] in _IJGRIDLENGTH_GDT_NUMBERS: - dx = section['Di'] - dy = section['Dj'] - nx = section['Ni'] - ny = section['Nj'] + dx = section["Dx"] + dy = section["Dy"] + nx = section["Nx"] + ny = section["Ny"] + elif section["gridDefinitionTemplateNumber"] in _IJGRIDLENGTH_GDT_NUMBERS: + dx = section["Di"] + dy = section["Dj"] + nx = section["Ni"] + ny = section["Nj"] else: - raise TranslationError('Unsupported lat-lon point parameters') + raise TranslationError("Unsupported lat-lon point parameters") - scan = scanning_mode(section['scanningMode']) - lon_0 = section['longitudeOfFirstGridPoint'] * _GRID_ACCURACY_IN_DEGREES - lat_0 = section['latitudeOfFirstGridPoint'] * _GRID_ACCURACY_IN_DEGREES - x0_m, y0_m = cs.as_cartopy_crs().transform_point( - lon_0, lat_0, ccrs.Geodetic()) + scan = scanning_mode(section["scanningMode"]) + lon_0 = section["longitudeOfFirstGridPoint"] * _GRID_ACCURACY_IN_DEGREES + lat_0 = section["latitudeOfFirstGridPoint"] * _GRID_ACCURACY_IN_DEGREES + x0_m, y0_m = cs.as_cartopy_crs().transform_point(lon_0, lat_0, ccrs.Geodetic()) dx_m = dx * mm_to_m dy_m = dy * mm_to_m x_dir = -1 if scan.i_negative else 1 @@ -978,10 +1008,12 @@ def _calculate_proj_coords_from_grid_lengths(section, cs): y_points = y0_m + dy_m * y_dir * np.arange(ny, dtype=np.float64) # Create the dimension coordinates. - x_coord = DimCoord(x_points, standard_name='projection_x_coordinate', - units='m', coord_system=cs) - y_coord = DimCoord(y_points, standard_name='projection_y_coordinate', - units='m', coord_system=cs) + x_coord = DimCoord( + x_points, standard_name="projection_x_coordinate", units="m", coord_system=cs + ) + y_coord = DimCoord( + y_points, standard_name="projection_y_coordinate", units="m", coord_system=cs + ) return x_coord, y_coord, scan @@ -1001,42 +1033,45 @@ def grid_definition_template_30(section, metadata): """ major, minor, radius = ellipsoid_geometry(section) - geog_cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) + geog_cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) - central_latitude = section['LaD'] * _GRID_ACCURACY_IN_DEGREES - central_longitude = section['LoV'] * _GRID_ACCURACY_IN_DEGREES + central_latitude = section["LaD"] * _GRID_ACCURACY_IN_DEGREES + central_longitude = section["LoV"] * _GRID_ACCURACY_IN_DEGREES false_easting = 0 false_northing = 0 - secant_latitudes = (section['Latin1'] * _GRID_ACCURACY_IN_DEGREES, - section['Latin2'] * _GRID_ACCURACY_IN_DEGREES) - - cs = icoord_systems.LambertConformal(central_latitude, - central_longitude, - false_easting, - false_northing, - secant_latitudes=secant_latitudes, - ellipsoid=geog_cs) + secant_latitudes = ( + section["Latin1"] * _GRID_ACCURACY_IN_DEGREES, + section["Latin2"] * _GRID_ACCURACY_IN_DEGREES, + ) + + cs = icoord_systems.LambertConformal( + central_latitude, + central_longitude, + false_easting, + false_northing, + secant_latitudes=secant_latitudes, + ellipsoid=geog_cs, + ) # A projection centre flag is defined for GDT30. However, we don't need to # know which pole is in the projection plane as Cartopy handles that. The # Other component of the projection centre flag determines if there are # multiple projection centres. There is no support for this in Proj4 or # Cartopy so a translation error is raised if this flag is set. - proj_centre = projection_centre(section['projectionCentreFlag']) + proj_centre = projection_centre(section["projectionCentreFlag"]) if proj_centre.bipolar_and_symmetric: - msg = 'Unsupported projection centre: Bipolar and symmetric.' + msg = "Unsupported projection centre: Bipolar and symmetric." raise TranslationError(msg) - res_flags = resolution_flags(section['resolutionAndComponentFlags']) + res_flags = resolution_flags(section["resolutionAndComponentFlags"]) if not res_flags.uv_resolved and options.warn_on_unsupported: # Vector components are given as relative to east an north, rather than # relative to the projection coordinates, issue a warning in this case. # (ideally we need a way to add this information to a cube) - msg = 'Unable to translate resolution and component flags.' + msg = "Unable to translate resolution and component flags." warnings.warn(msg) - x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, - cs) + x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, cs) # Determine the order of the dimensions. y_dim, x_dim = 0, 1 @@ -1044,8 +1079,8 @@ def grid_definition_template_30(section, metadata): y_dim, x_dim = 1, 0 # Add the projection coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_40(section, metadata): @@ -1064,10 +1099,12 @@ def grid_definition_template_40(section, metadata): """ major, minor, radius = ellipsoid_geometry(section) - cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) + cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) - if section['numberOfOctectsForNumberOfPoints'] != 0 or \ - section['interpretationOfNumberOfPoints'] != 0: + if ( + section["numberOfOctectsForNumberOfPoints"] != 0 + or section["interpretationOfNumberOfPoints"] != 0 + ): grid_definition_template_40_reduced(section, metadata, cs) else: grid_definition_template_40_regular(section, metadata, cs) @@ -1078,22 +1115,26 @@ def grid_definition_template_40_regular(section, metadata, cs): Translate template representing a regular Gaussian grid. """ - scan = scanning_mode(section['scanningMode']) + scan = scanning_mode(section["scanningMode"]) # Set resolution flags - res_flags = resolution_flags(section['resolutionAndComponentFlags']) + res_flags = resolution_flags(section["resolutionAndComponentFlags"]) # Calculate longitude points. - x_inc = (section['iDirectionIncrement'] - if res_flags.i_increments_given - else _calculate_increment(section['longitudeOfFirstGridPoint'], - section['longitudeOfLastGridPoint'], - section['Ni'] - 1, - 360.0 / _GRID_ACCURACY_IN_DEGREES)) + x_inc = ( + section["iDirectionIncrement"] + if res_flags.i_increments_given + else _calculate_increment( + section["longitudeOfFirstGridPoint"], + section["longitudeOfLastGridPoint"], + section["Ni"] - 1, + 360.0 / _GRID_ACCURACY_IN_DEGREES, + ) + ) x_inc *= _GRID_ACCURACY_IN_DEGREES - x_offset = section['longitudeOfFirstGridPoint'] * _GRID_ACCURACY_IN_DEGREES + x_offset = section["longitudeOfFirstGridPoint"] * _GRID_ACCURACY_IN_DEGREES x_direction = -1 if scan.i_negative else 1 - Ni = section['Ni'] + Ni = section["Ni"] x_points = np.arange(Ni, dtype=np.float64) * x_inc * x_direction + x_offset # Determine whether the x-points (in degrees) are circular. @@ -1129,17 +1170,22 @@ def grid_definition_template_40_regular(section, metadata, cs): # GRIB2 message. This computed key provides a rapid calculation of the # monotonic latitude points that form the Gaussian grid, accounting for # the coverage of the grid. - y_points = section.get_computed_key('distinctLatitudes') + y_points = section.get_computed_key("distinctLatitudes") y_points.sort() if not scan.j_positive: y_points = y_points[::-1] # Create lat/lon coordinates. - x_coord = DimCoord(x_points, standard_name='longitude', - units='degrees', coord_system=cs, - circular=circular) - y_coord = DimCoord(y_points, standard_name='latitude', - units='degrees', coord_system=cs) + x_coord = DimCoord( + x_points, + standard_name="longitude", + units="degrees", + coord_system=cs, + circular=circular, + ) + y_coord = DimCoord( + y_points, standard_name="latitude", units="degrees", coord_system=cs + ) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -1147,8 +1193,8 @@ def grid_definition_template_40_regular(section, metadata, cs): y_dim, x_dim = 1, 0 # Add the lat/lon coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_40_reduced(section, metadata, cs): @@ -1166,18 +1212,20 @@ def grid_definition_template_40_reduced(section, metadata, cs): # from coded keys, it would be complex and time-consuming compared to # loading the latitude and longitude arrays directly using the computed # keys 'latitudes' and 'longitudes'. - x_points = section.get_computed_key('longitudes') - y_points = section.get_computed_key('latitudes') + x_points = section.get_computed_key("longitudes") + y_points = section.get_computed_key("latitudes") # Create lat/lon coordinates. - x_coord = AuxCoord(x_points, standard_name='longitude', - units='degrees', coord_system=cs) - y_coord = AuxCoord(y_points, standard_name='latitude', - units='degrees', coord_system=cs) + x_coord = AuxCoord( + x_points, standard_name="longitude", units="degrees", coord_system=cs + ) + y_coord = AuxCoord( + y_points, standard_name="latitude", units="degrees", coord_system=cs + ) # Add the lat/lon coordinates to the metadata dim coords. - metadata['aux_coords_and_dims'].append((y_coord, 0)) - metadata['aux_coords_and_dims'].append((x_coord, 0)) + metadata["aux_coords_and_dims"].append((y_coord, 0)) + metadata["aux_coords_and_dims"].append((x_coord, 0)) def grid_definition_template_90(section, metadata): @@ -1195,26 +1243,29 @@ def grid_definition_template_90(section, metadata): :class:`collections.OrderedDict` of metadata. """ - if section['Nr'] == _MDI: - raise TranslationError('Unsupported orthographic grid.') - elif section['Nr'] == 0: - raise TranslationError('Unsupported zero height for space-view.') - if section['orientationOfTheGrid'] != 0: - raise TranslationError('Unsupported space-view orientation.') + if section["Nr"] == _MDI: + raise TranslationError("Unsupported orthographic grid.") + elif section["Nr"] == 0: + raise TranslationError("Unsupported zero height for space-view.") + if section["orientationOfTheGrid"] != 0: + raise TranslationError("Unsupported space-view orientation.") # Determine the coordinate system. - sub_satellite_lat = (section['latitudeOfSubSatellitePoint'] * - _GRID_ACCURACY_IN_DEGREES) + sub_satellite_lat = ( + section["latitudeOfSubSatellitePoint"] * _GRID_ACCURACY_IN_DEGREES + ) # The subsequent calculations to determine the apparent Earth # diameters rely on the satellite being over the equator. if sub_satellite_lat != 0: - raise TranslationError('Unsupported non-zero latitude for ' - 'space-view perspective.') - sub_satellite_lon = (section['longitudeOfSubSatellitePoint'] * - _GRID_ACCURACY_IN_DEGREES) + raise TranslationError( + "Unsupported non-zero latitude for " "space-view perspective." + ) + sub_satellite_lon = ( + section["longitudeOfSubSatellitePoint"] * _GRID_ACCURACY_IN_DEGREES + ) major, minor, radius = ellipsoid_geometry(section) - geog_cs = ellipsoid(section['shapeOfTheEarth'], major, minor, radius) - height_above_centre = geog_cs.semi_major_axis * section['Nr'] / 1e6 + geog_cs = ellipsoid(section["shapeOfTheEarth"], major, minor, radius) + height_above_centre = geog_cs.semi_major_axis * section["Nr"] / 1e6 height_above_ellipsoid = height_above_centre - geog_cs.semi_major_axis # Figure out how large the Earth would appear in projection coordinates. @@ -1237,30 +1288,31 @@ def grid_definition_template_90(section, metadata): # ------------ # a / cos(psi) # This can be simplified using: cos(psi) = a / height_above_centre - half_apparent_equatorial_angle = math.asin(geog_cs.semi_major_axis / - height_above_centre) + half_apparent_equatorial_angle = math.asin( + geog_cs.semi_major_axis / height_above_centre + ) parametric_angle = math.acos(geog_cs.semi_major_axis / height_above_centre) - half_apparent_polar_angle = math.atan(geog_cs.semi_minor_axis / - (height_above_centre * - math.sin(parametric_angle))) + half_apparent_polar_angle = math.atan( + geog_cs.semi_minor_axis / (height_above_centre * math.sin(parametric_angle)) + ) y_apparent_angular_diameter = 2 * half_apparent_polar_angle x_apparent_angular_diameter = 2 * half_apparent_equatorial_angle - y_step = y_apparent_angular_diameter / section['dy'] - x_step = x_apparent_angular_diameter / section['dx'] - y_start = y_step * (section['Yo'] - section['Yp'] / 1000) - x_start = x_step * (section['Xo'] - section['Xp'] / 1000) - y_points = y_start + np.arange(section['Ny']) * y_step - x_points = x_start + np.arange(section['Nx']) * x_step + y_step = y_apparent_angular_diameter / section["dy"] + x_step = x_apparent_angular_diameter / section["dx"] + y_start = y_step * (section["Yo"] - section["Yp"] / 1000) + x_start = x_step * (section["Xo"] - section["Xp"] / 1000) + y_points = y_start + np.arange(section["Ny"]) * y_step + x_points = x_start + np.arange(section["Nx"]) * x_step # This has only been tested with -x/+y scanning, so raise an error # for other permutations. - scan = scanning_mode(section['scanningMode']) + scan = scanning_mode(section["scanningMode"]) if scan.i_negative: x_points = -x_points else: - raise TranslationError('Unsupported +x scanning') + raise TranslationError("Unsupported +x scanning") if not scan.j_positive: - raise TranslationError('Unsupported -y scanning') + raise TranslationError("Unsupported -y scanning") # Make a coordinate system for the X and Y coordinates. # Note: false_easting/northing are always just zero, as the calculation of @@ -1269,14 +1321,17 @@ def grid_definition_template_90(section, metadata): latitude_of_projection_origin=sub_satellite_lat, longitude_of_projection_origin=sub_satellite_lon, perspective_point_height=height_above_ellipsoid, - sweep_angle_axis='y', - ellipsoid=geog_cs) + sweep_angle_axis="y", + ellipsoid=geog_cs, + ) # Create the X and Y coordinates. - y_coord = DimCoord(y_points, 'projection_y_coordinate', units='radians', - coord_system=cs) - x_coord = DimCoord(x_points, 'projection_x_coordinate', units='radians', - coord_system=cs) + y_coord = DimCoord( + y_points, "projection_y_coordinate", units="radians", coord_system=cs + ) + x_coord = DimCoord( + x_points, "projection_x_coordinate", units="radians", coord_system=cs + ) # Determine the lat/lon dimensions. y_dim, x_dim = 0, 1 @@ -1284,8 +1339,8 @@ def grid_definition_template_90(section, metadata): y_dim, x_dim = 1, 0 # Add the X and Y coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_template_140(section, metadata): @@ -1305,14 +1360,14 @@ def grid_definition_template_140(section, metadata): # Define the coordinate system major, minor, radius = ellipsoid_geometry(section) cs = icoord_systems.LambertAzimuthalEqualArea( - section['standardParallelInMicrodegrees'] * _GRID_ACCURACY_IN_DEGREES, - section['centralLongitudeInMicrodegrees'] * _GRID_ACCURACY_IN_DEGREES, + section["standardParallelInMicrodegrees"] * _GRID_ACCURACY_IN_DEGREES, + section["centralLongitudeInMicrodegrees"] * _GRID_ACCURACY_IN_DEGREES, 0, 0, - ellipsoid(section['shapeOfTheEarth'], major, minor, radius)) + ellipsoid(section["shapeOfTheEarth"], major, minor, radius), + ) - x_coord, y_coord, scan = \ - _calculate_proj_coords_from_grid_lengths(section, cs) + x_coord, y_coord, scan = _calculate_proj_coords_from_grid_lengths(section, cs) # Determine the order of the dimensions. y_dim, x_dim = 0, 1 @@ -1320,8 +1375,8 @@ def grid_definition_template_140(section, metadata): y_dim, x_dim = 1, 0 # Add the projection coordinates to the metadata dim coords. - metadata['dim_coords_and_dims'].append((y_coord, y_dim)) - metadata['dim_coords_and_dims'].append((x_coord, x_dim)) + metadata["dim_coords_and_dims"].append((y_coord, y_dim)) + metadata["dim_coords_and_dims"].append((x_coord, x_dim)) def grid_definition_section(section, metadata): @@ -1340,14 +1395,16 @@ def grid_definition_section(section, metadata): """ # Reference GRIB2 Code Table 3.0. - value = section['sourceOfGridDefinition'] + value = section["sourceOfGridDefinition"] if value != 0: - msg = 'Grid definition section 3 contains unsupported ' \ - 'source of grid definition [{}]'.format(value) + msg = ( + "Grid definition section 3 contains unsupported " + "source of grid definition [{}]".format(value) + ) raise TranslationError(msg) # Reference GRIB2 Code Table 3.1. - template = section['gridDefinitionTemplateNumber'] + template = section["gridDefinitionTemplateNumber"] if template == 0: # Process regular latitude/longitude grid (regular_ll) @@ -1382,7 +1439,7 @@ def grid_definition_section(section, metadata): # Process Lambert Azimuthal Equal Area. grid_definition_template_140(section, metadata) else: - msg = 'Grid definition template [{}] is not supported'.format(template) + msg = "Grid definition template [{}] is not supported".format(template) raise TranslationError(msg) @@ -1392,10 +1449,17 @@ def grid_definition_section(section, metadata): # ############################################################################### -def translate_phenomenon(metadata, discipline, parameterCategory, - parameterNumber, typeOfFirstFixedSurface, - scaledValueOfFirstFixedSurface, - typeOfSecondFixedSurface, probability=None): + +def translate_phenomenon( + metadata, + discipline, + parameterCategory, + parameterNumber, + typeOfFirstFixedSurface, + scaledValueOfFirstFixedSurface, + typeOfSecondFixedSurface, + probability=None, +): """ Translate GRIB2 phenomenon to CF phenomenon. @@ -1422,54 +1486,60 @@ def translate_phenomenon(metadata, discipline, parameterCategory, given properties. """ - cf = itranslation.grib2_phenom_to_cf_info(param_discipline=discipline, - param_category=parameterCategory, - param_number=parameterNumber) + cf = itranslation.grib2_phenom_to_cf_info( + param_discipline=discipline, + param_category=parameterCategory, + param_number=parameterNumber, + ) if cf is not None: if probability is None: - metadata['standard_name'] = cf.standard_name - metadata['long_name'] = cf.long_name - metadata['units'] = cf.units + metadata["standard_name"] = cf.standard_name + metadata["long_name"] = cf.long_name + metadata["units"] = cf.units else: # The basic name+unit info goes into a 'threshold coordinate' which # encodes probability threshold values. threshold_coord = DimCoord( probability.threshold, - standard_name=cf.standard_name, long_name=cf.long_name, - units=cf.units) - metadata['aux_coords_and_dims'].append((threshold_coord, None)) + standard_name=cf.standard_name, + long_name=cf.long_name, + units=cf.units, + ) + metadata["aux_coords_and_dims"].append((threshold_coord, None)) # The main cube has an adjusted name, and units of '1'. base_name = cf.standard_name or cf.long_name - long_name = 'probability_of_{}_{}'.format( - base_name, probability.probability_type_name) - metadata['standard_name'] = None - metadata['long_name'] = long_name - metadata['units'] = Unit(1) + long_name = "probability_of_{}_{}".format( + base_name, probability.probability_type_name + ) + metadata["standard_name"] = None + metadata["long_name"] = long_name + metadata["units"] = Unit(1) # Add a standard attribute recording the grib phenomenon identity. - metadata['attributes']['GRIB_PARAM'] = GRIBCode( - edition_or_string=2, + metadata["attributes"]["GRIB_PARAM"] = GRIBCode( + edition=2, discipline=discipline, category=parameterCategory, - number=parameterNumber) + number=parameterNumber, + ) # Identify hybrid height and pressure reference fields. # Look for fields at surface level first. - if (typeOfFirstFixedSurface == 1 and - scaledValueOfFirstFixedSurface == 0 and - typeOfSecondFixedSurface == _TYPE_OF_FIXED_SURFACE_MISSING): + if ( + typeOfFirstFixedSurface == 1 + and scaledValueOfFirstFixedSurface == 0 + and typeOfSecondFixedSurface == _TYPE_OF_FIXED_SURFACE_MISSING + ): # Land surface products for model terrain height: - if (discipline == 2 and - parameterCategory == 0 and - parameterNumber == 7): - metadata['references'].append(ReferenceTarget( - 'ref_orography', None)) + if discipline == 2 and parameterCategory == 0 and parameterNumber == 7: + metadata["references"].append(ReferenceTarget("ref_orography", None)) # Meteorological mass products for pressure: - elif (discipline == 0 and - parameterCategory == 3 and - parameterNumber == 0): - metadata['references'].append(ReferenceTarget( - 'ref_surface_pressure', ensure_surface_air_pressure_name)) + elif discipline == 0 and parameterCategory == 3 and parameterNumber == 0: + metadata["references"].append( + ReferenceTarget( + "ref_surface_pressure", ensure_surface_air_pressure_name + ) + ) def ensure_surface_air_pressure_name(cube): @@ -1485,13 +1555,15 @@ def ensure_surface_air_pressure_name(cube): # This will cause an infinite loop when building the derived coord (!) name = cube.name() # Just check the passed cube is of the sort expected. - expected_names = ('air_pressure', 'surface_air_pressure') + expected_names = ("air_pressure", "surface_air_pressure") if name not in expected_names: - msg = ('Unexpected cube name for hybrid-pressure reference data : ' - 'Expected one of {}, got {!r}.') + msg = ( + "Unexpected cube name for hybrid-pressure reference data : " + "Expected one of {}, got {!r}." + ) raise ValueError(msg.format(expected_names, name)) # Get the caller (in rules.py) to rename it. - return {'standard_name': 'surface_air_pressure'} + return {"standard_name": "surface_air_pressure"} def time_range_unit(indicatorOfUnitOfTimeRange): @@ -1511,8 +1583,10 @@ def time_range_unit(indicatorOfUnitOfTimeRange): try: unit = Unit(_TIME_RANGE_UNITS[indicatorOfUnitOfTimeRange]) except (KeyError, ValueError): - msg = 'Product definition section 4 contains unsupported ' \ - 'time range unit [{}]'.format(indicatorOfUnitOfTimeRange) + msg = ( + "Product definition section 4 contains unsupported " + "time range unit [{}]".format(indicatorOfUnitOfTimeRange) + ) raise TranslationError(msg) return unit @@ -1537,72 +1611,89 @@ def hybrid_factories(section, metadata): :class:`collections.OrderedDict` of metadata. """ - NV = section['NV'] + NV = section["NV"] if NV > 0: - typeOfFirstFixedSurface = section['typeOfFirstFixedSurface'] + typeOfFirstFixedSurface = section["typeOfFirstFixedSurface"] if typeOfFirstFixedSurface == _TYPE_OF_FIXED_SURFACE_MISSING: - msg = 'Product definition section 4 contains missing ' \ - 'type of first fixed surface' + msg = ( + "Product definition section 4 contains missing " + "type of first fixed surface" + ) raise TranslationError(msg) - typeOfSecondFixedSurface = section['typeOfSecondFixedSurface'] + typeOfSecondFixedSurface = section["typeOfSecondFixedSurface"] if typeOfSecondFixedSurface != _TYPE_OF_FIXED_SURFACE_MISSING: - msg = 'Product definition section 4 contains unsupported type ' \ - 'of second fixed surface [{}]'.format(typeOfSecondFixedSurface) + msg = ( + "Product definition section 4 contains unsupported type " + "of second fixed surface [{}]".format(typeOfSecondFixedSurface) + ) raise TranslationError(msg) if typeOfFirstFixedSurface in [105, 118, 119]: # Hybrid level (105), Hybrid height level (118) and Hybrid # pressure level (119). - scaleFactor = section['scaleFactorOfFirstFixedSurface'] + scaleFactor = section["scaleFactorOfFirstFixedSurface"] if scaleFactor != 0: - msg = 'Product definition section 4 contains invalid scale ' \ - 'factor of first fixed surface [{}]'.format(scaleFactor) + msg = ( + "Product definition section 4 contains invalid scale " + "factor of first fixed surface [{}]".format(scaleFactor) + ) raise TranslationError(msg) # Create the model level number scalar coordinate. - scaledValue = section['scaledValueOfFirstFixedSurface'] - coord = DimCoord(scaledValue, standard_name='model_level_number', - units=1, attributes=dict(positive='up')) - metadata['aux_coords_and_dims'].append((coord, None)) + scaledValue = section["scaledValueOfFirstFixedSurface"] + coord = DimCoord( + scaledValue, + standard_name="model_level_number", + units=1, + attributes=dict(positive="up"), + ) + metadata["aux_coords_and_dims"].append((coord, None)) if typeOfFirstFixedSurface == 118: # height - level_value_name = 'level_height' - level_value_units = 'm' + level_value_name = "level_height" + level_value_units = "m" factory_class = HybridHeightFactory - factory_args = [{'long_name': level_value_name}, - {'long_name': 'sigma'}, - Reference('ref_orography')] + factory_args = [ + {"long_name": level_value_name}, + {"long_name": "sigma"}, + Reference("ref_orography"), + ] else: # pressure - level_value_name = 'level_pressure' - level_value_units = 'Pa' + level_value_name = "level_pressure" + level_value_units = "Pa" factory_class = HybridPressureFactory - factory_args = [{'long_name': level_value_name}, - {'long_name': 'sigma'}, - Reference('ref_surface_pressure')] + factory_args = [ + {"long_name": level_value_name}, + {"long_name": "sigma"}, + Reference("ref_surface_pressure"), + ] # Create the level height/pressure scalar coordinate. # scaledValue represents the level number, which is used to select # the sigma and delta values as follows: # sigma, delta = PV[i], PV[NV/2+i] : where i=1..level_number - pv = section['pv'] + pv = section["pv"] offset = scaledValue - coord = DimCoord(pv[offset], long_name=level_value_name, - units=level_value_units) - metadata['aux_coords_and_dims'].append((coord, None)) + coord = DimCoord( + pv[offset], long_name=level_value_name, units=level_value_units + ) + metadata["aux_coords_and_dims"].append((coord, None)) # Create the sigma scalar coordinate. offset = NV // 2 + scaledValue - coord = AuxCoord(pv[offset], long_name='sigma', units=1) - metadata['aux_coords_and_dims'].append((coord, None)) + coord = AuxCoord(pv[offset], long_name="sigma", units=1) + metadata["aux_coords_and_dims"].append((coord, None)) # Create the associated factory reference. factory = Factory(factory_class, factory_args) - metadata['factories'].append(factory) + metadata["factories"].append(factory) else: - msg = 'Product definition section 4 contains unsupported ' \ - 'first fixed surface [{}]'.format(typeOfFirstFixedSurface) + msg = ( + "Product definition section 4 contains unsupported " + "first fixed surface [{}]".format(typeOfFirstFixedSurface) + ) raise TranslationError(msg) @@ -1623,66 +1714,81 @@ def vertical_coords(section, metadata): :class:`collections.OrderedDict` of metadata. """ - if section['NV'] > 0: + if section["NV"] > 0: # Generate hybrid vertical coordinates. hybrid_factories(section, metadata) else: # Generate vertical coordinate. - typeOfFirstFixedSurface = section['typeOfFirstFixedSurface'] + typeOfFirstFixedSurface = section["typeOfFirstFixedSurface"] # We treat fixed surface level type=1 as having no vertical coordinate. # See https://github.com/SciTools/iris/issues/519 if typeOfFirstFixedSurface not in [_TYPE_OF_FIXED_SURFACE_MISSING, 1]: - key = 'scaledValueOfFirstFixedSurface' + key = "scaledValueOfFirstFixedSurface" scaledValueOfFirstFixedSurface = section[key] if scaledValueOfFirstFixedSurface == _MDI: if options.warn_on_unsupported: - msg = 'Unable to translate type of first fixed ' \ - 'surface with missing scaled value.' + msg = ( + "Unable to translate type of first fixed " + "surface with missing scaled value." + ) warnings.warn(msg) else: fixed_surface_missing = FixedSurface(None, None, None) fixed_surface = _FIXED_SURFACE.get( - typeOfFirstFixedSurface, fixed_surface_missing) - key = 'scaleFactorOfFirstFixedSurface' + typeOfFirstFixedSurface, fixed_surface_missing + ) + key = "scaleFactorOfFirstFixedSurface" scaleFactorOfFirstFixedSurface = section[key] - typeOfSecondFixedSurface = section['typeOfSecondFixedSurface'] + typeOfSecondFixedSurface = section["typeOfSecondFixedSurface"] if typeOfSecondFixedSurface != _TYPE_OF_FIXED_SURFACE_MISSING: if typeOfFirstFixedSurface != typeOfSecondFixedSurface: - msg = 'Product definition section 4 has different ' \ - 'types of first and second fixed surface' + msg = ( + "Product definition section 4 has different " + "types of first and second fixed surface" + ) raise TranslationError(msg) - key = 'scaledValueOfSecondFixedSurface' + key = "scaledValueOfSecondFixedSurface" scaledValueOfSecondFixedSurface = section[key] if scaledValueOfSecondFixedSurface == _MDI: - msg = 'Product definition section 4 has missing ' \ - 'scaled value of second fixed surface' + msg = ( + "Product definition section 4 has missing " + "scaled value of second fixed surface" + ) raise TranslationError(msg) else: - key = 'scaleFactorOfSecondFixedSurface' + key = "scaleFactorOfSecondFixedSurface" scaleFactorOfSecondFixedSurface = section[key] - first = unscale(scaledValueOfFirstFixedSurface, - scaleFactorOfFirstFixedSurface) - second = unscale(scaledValueOfSecondFixedSurface, - scaleFactorOfSecondFixedSurface) + first = unscale( + scaledValueOfFirstFixedSurface, + scaleFactorOfFirstFixedSurface, + ) + second = unscale( + scaledValueOfSecondFixedSurface, + scaleFactorOfSecondFixedSurface, + ) point = 0.5 * (first + second) bounds = [first, second] else: - point = unscale(scaledValueOfFirstFixedSurface, - scaleFactorOfFirstFixedSurface) + point = unscale( + scaledValueOfFirstFixedSurface, scaleFactorOfFirstFixedSurface + ) bounds = None - coord = DimCoord(point, - standard_name=fixed_surface.standard_name, - long_name=fixed_surface.long_name, - units=fixed_surface.units, - bounds=bounds) + coord = DimCoord( + point, + standard_name=fixed_surface.standard_name, + long_name=fixed_surface.long_name, + units=fixed_surface.units, + bounds=bounds, + ) if fixed_surface == fixed_surface_missing: - coord.attributes['GRIB_fixed_surface_type'] = \ + coord.attributes["GRIB_fixed_surface_type"] = ( typeOfFirstFixedSurface + ) # Add the vertical coordinate to metadata aux coords. - metadata['aux_coords_and_dims'].append((coord, None)) + metadata["aux_coords_and_dims"].append((coord, None)) def forecast_period_coord(indicatorOfUnitOfTimeRange, forecastTime): @@ -1703,9 +1809,9 @@ def forecast_period_coord(indicatorOfUnitOfTimeRange, forecastTime): """ # Determine the forecast period and associated units. unit = time_range_unit(indicatorOfUnitOfTimeRange) - point = unit.convert(forecastTime, 'hours') + point = unit.convert(forecastTime, "hours") # Create the forecast period scalar coordinate. - coord = DimCoord(point, standard_name='forecast_period', units='hours') + coord = DimCoord(point, standard_name="forecast_period", units="hours") return coord @@ -1729,23 +1835,25 @@ def statistical_forecast_period_coord(section, frt_coord): """ # Get the period end time as a datetime. - end_time = datetime(section['yearOfEndOfOverallTimeInterval'], - section['monthOfEndOfOverallTimeInterval'], - section['dayOfEndOfOverallTimeInterval'], - section['hourOfEndOfOverallTimeInterval'], - section['minuteOfEndOfOverallTimeInterval'], - section['secondOfEndOfOverallTimeInterval']) + end_time = datetime( + section["yearOfEndOfOverallTimeInterval"], + section["monthOfEndOfOverallTimeInterval"], + section["dayOfEndOfOverallTimeInterval"], + section["hourOfEndOfOverallTimeInterval"], + section["minuteOfEndOfOverallTimeInterval"], + section["secondOfEndOfOverallTimeInterval"], + ) # Get forecast reference time (frt) as a datetime. frt_point = frt_coord.units.num2date(frt_coord.points[0]) # Get the period start time (as a timedelta relative to the frt). - forecast_time = section['forecastTime'] + forecast_time = section["forecastTime"] if options.support_hindcast_values: # Apply the hindcast fix. forecast_time = _hindcast_fix(forecast_time) - forecast_units = time_range_unit(section['indicatorOfUnitOfTimeRange']) - forecast_seconds = forecast_units.convert(forecast_time, 'seconds') + forecast_units = time_range_unit(section["indicatorOfUnitOfTimeRange"]) + forecast_seconds = forecast_units.convert(forecast_time, "seconds") start_time_delta = timedelta(seconds=forecast_seconds) # Get the period end time (as a timedelta relative to the frt). @@ -1761,10 +1869,13 @@ def timedelta_hours(timedelta): return timedelta.total_seconds() / 3600.0 mid_point_hours = timedelta_hours(mid_time_delta) - bounds_hours = [timedelta_hours(start_time_delta), - timedelta_hours(end_time_delta)] - fp_coord = DimCoord(mid_point_hours, bounds=bounds_hours, - standard_name='forecast_period', units='hours') + bounds_hours = [timedelta_hours(start_time_delta), timedelta_hours(end_time_delta)] + fp_coord = DimCoord( + mid_point_hours, + bounds=bounds_hours, + standard_name="forecast_period", + units="hours", + ) return fp_coord @@ -1792,25 +1903,25 @@ def other_time_coord(rt_coord, fp_coord): """ if not rt_coord.units.is_time_reference(): - fmt = 'Invalid unit for reference time coord: {}' + fmt = "Invalid unit for reference time coord: {}" raise ValueError(fmt.format(rt_coord.units)) if not fp_coord.units.is_time(): - fmt = 'Invalid unit for forecast_period coord: {}' + fmt = "Invalid unit for forecast_period coord: {}" raise ValueError(fmt.format(fp_coord.units)) if rt_coord.has_bounds() or fp_coord.has_bounds(): - raise ValueError('Coordinate bounds are not supported') + raise ValueError("Coordinate bounds are not supported") if rt_coord.shape != (1,) or fp_coord.shape != (1,): - raise ValueError('Vector coordinates are not supported') + raise ValueError("Vector coordinates are not supported") - if rt_coord.standard_name == 'time': - rt_base_unit = str(rt_coord.units).split(' since ')[0] + if rt_coord.standard_name == "time": + rt_base_unit = str(rt_coord.units).split(" since ")[0] fp = fp_coord.units.convert(fp_coord.points[0], rt_base_unit) frt = rt_coord.points[0] - fp - return DimCoord(frt, 'forecast_reference_time', units=rt_coord.units) - elif rt_coord.standard_name == 'forecast_reference_time': + return DimCoord(frt, "forecast_reference_time", units=rt_coord.units) + elif rt_coord.standard_name == "forecast_reference_time": return validity_time_coord(rt_coord, fp_coord) else: - fmt = 'Unexpected reference time coordinate: {}' + fmt = "Unexpected reference time coordinate: {}" raise ValueError(fmt.format(rt_coord.name())) @@ -1832,18 +1943,22 @@ def validity_time_coord(frt_coord, fp_coord): """ if frt_coord.shape != (1,): - msg = 'Expected scalar forecast reference time coordinate when ' \ - 'calculating validity time, got shape {!r}'.format(frt_coord.shape) + msg = ( + "Expected scalar forecast reference time coordinate when " + "calculating validity time, got shape {!r}".format(frt_coord.shape) + ) raise ValueError(msg) if fp_coord.shape != (1,): - msg = 'Expected scalar forecast period coordinate when ' \ - 'calculating validity time, got shape {!r}'.format(fp_coord.shape) + msg = ( + "Expected scalar forecast period coordinate when " + "calculating validity time, got shape {!r}".format(fp_coord.shape) + ) raise ValueError(msg) def coord_timedelta(coord, value): # Helper to convert a time coordinate value into a timedelta. - seconds = coord.units.convert(value, 'seconds') + seconds = coord.units.convert(value, "seconds") return timedelta(seconds=seconds) # Calculate validity (phenomenon) time in forecast-reference-time units. @@ -1855,49 +1970,54 @@ def coord_timedelta(coord, value): if fp_coord.bounds is None: bounds = None else: - bounds_deltas = [coord_timedelta(fp_coord, bound_point) - for bound_point in fp_coord.bounds[0]] - bounds = [float(frt_coord.units.date2num(frt_point + delta)) - for delta in bounds_deltas] + bounds_deltas = [ + coord_timedelta(fp_coord, bound_point) for bound_point in fp_coord.bounds[0] + ] + bounds = [ + float(frt_coord.units.date2num(frt_point + delta)) + for delta in bounds_deltas + ] # Create the time scalar coordinate. - coord = DimCoord(point, bounds=bounds, - standard_name='time', units=frt_coord.units) + coord = DimCoord(point, bounds=bounds, standard_name="time", units=frt_coord.units) return coord def time_coords(section, metadata, rt_coord): - if 'forecastTime' in section.keys(): - forecast_time = section['forecastTime'] + if "forecastTime" in section.keys(): + forecast_time = section["forecastTime"] # ecCodes encodes the forecast time as 'startStep' for pdt 4.4x; # product_definition_template_40 makes use of this function. The # following will be removed once the suspected bug is fixed. - elif 'startStep' in section.keys(): - forecast_time = section['startStep'] + elif "startStep" in section.keys(): + forecast_time = section["startStep"] # Calculate the forecast period coordinate. - fp_coord = forecast_period_coord(section['indicatorOfUnitOfTimeRange'], - forecast_time) + fp_coord = forecast_period_coord( + section["indicatorOfUnitOfTimeRange"], forecast_time + ) # Add the forecast period coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((fp_coord, None)) + metadata["aux_coords_and_dims"].append((fp_coord, None)) # Calculate the "other" time coordinate - i.e. whichever of 'time' # or 'forecast_reference_time' we don't already have. other_coord = other_time_coord(rt_coord, fp_coord) # Add the time coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((other_coord, None)) + metadata["aux_coords_and_dims"].append((other_coord, None)) # Add the reference time coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((rt_coord, None)) + metadata["aux_coords_and_dims"].append((rt_coord, None)) def generating_process(section, include_forecast_process=True): if options.warn_on_unsupported: # Reference Code Table 4.3. - warnings.warn('Unable to translate type of generating process.') - warnings.warn('Unable to translate background generating ' - 'process identifier.') + warnings.warn("Unable to translate type of generating process.") + warnings.warn( + "Unable to translate background generating " "process identifier." + ) if include_forecast_process: - warnings.warn('Unable to translate forecast generating ' - 'process identifier.') + warnings.warn( + "Unable to translate forecast generating " "process identifier." + ) def data_cutoff(hoursAfterDataCutoff, minutesAfterDataCutoff): @@ -1913,11 +2033,11 @@ def data_cutoff(hoursAfterDataCutoff, minutesAfterDataCutoff): Message section 4, octet 17. """ - if (hoursAfterDataCutoff != _MDI or - minutesAfterDataCutoff != _MDI): + if hoursAfterDataCutoff != _MDI or minutesAfterDataCutoff != _MDI: if options.warn_on_unsupported: - warnings.warn('Unable to translate "hours and/or minutes ' - 'after data cutoff".') + warnings.warn( + 'Unable to translate "hours and/or minutes ' 'after data cutoff".' + ) def statistical_method_name(section): @@ -1925,21 +2045,25 @@ def statistical_method_name(section): # Templates 8, 9, 10, 11 and 15 all use this type code, which is defined # in table 4.10. # However, the actual keyname is different for template 15. - section_number = section['productDefinitionTemplateNumber'] + section_number = section["productDefinitionTemplateNumber"] if section_number in (8, 9, 10, 11): - stat_keyname = 'typeOfStatisticalProcessing' + stat_keyname = "typeOfStatisticalProcessing" elif section_number == 15: - stat_keyname = 'statisticalProcess' + stat_keyname = "statisticalProcess" else: # This should *never* happen, as only called by pdt 8 and 15. - msg = ("Internal error: can't get statistical method for unsupported " - "pdt : 4.{:d}.") + msg = ( + "Internal error: can't get statistical method for unsupported " + "pdt : 4.{:d}." + ) raise ValueError(msg.format(section_number)) statistic_code = section[stat_keyname] statistic_name = _STATISTIC_TYPE_NAMES.get(statistic_code) if statistic_name is None: - msg = ('Product definition section 4 contains an unsupported ' - 'statistical process type [{}] ') + msg = ( + "Product definition section 4 contains an unsupported " + "statistical process type [{}] " + ) raise TranslationError(msg.format(statistic_code)) return statistic_name @@ -1960,57 +2084,61 @@ def statistical_cell_method(section): """ # Handle the number of time ranges -- we currently only support one. - n_time_ranges = section['numberOfTimeRange'] + n_time_ranges = section["numberOfTimeRange"] if n_time_ranges != 1: if n_time_ranges == 0: - msg = ('Product definition section 4 specifies aggregation over ' - '"0 time ranges".') + msg = ( + "Product definition section 4 specifies aggregation over " + '"0 time ranges".' + ) raise TranslationError(msg) else: - msg = ('Product definition section 4 specifies aggregation over ' - 'multiple time ranges [{}], which is not yet ' - 'supported.'.format(n_time_ranges)) + msg = ( + "Product definition section 4 specifies aggregation over " + "multiple time ranges [{}], which is not yet " + "supported.".format(n_time_ranges) + ) raise TranslationError(msg) # Decode the type of statistic (aggregation method). statistic_name = statistical_method_name(section) # Decode the type of time increment. - increment_typecode = section['typeOfTimeIncrement'] + increment_typecode = section["typeOfTimeIncrement"] if increment_typecode not in (2, 255): # NOTE: All our current test data seems to contain the value 2, which # is all we currently support. # The exact interpretation of this is still unclear so we also accept # a missing value. - msg = ('grib statistic time-increment type [{}] ' - 'is not supported.'.format(increment_typecode)) + msg = "grib statistic time-increment type [{}] " "is not supported.".format( + increment_typecode + ) raise TranslationError(msg) - interval_number = section['timeIncrement'] + interval_number = section["timeIncrement"] if interval_number in (0, _TIME_RANGE_MISSING): intervals_string = None else: - units_string = _TIME_RANGE_UNITS[ - section['indicatorOfUnitForTimeIncrement']] - intervals_string = '{} {}'.format(interval_number, units_string) + units_string = _TIME_RANGE_UNITS[section["indicatorOfUnitForTimeIncrement"]] + intervals_string = "{} {}".format(interval_number, units_string) # Create a cell method to represent the time aggregation. - cell_method = CellMethod(method=statistic_name, - coords='time', - intervals=intervals_string) + cell_method = CellMethod( + method=statistic_name, coords="time", intervals=intervals_string + ) return cell_method def ensemble_identifier(section): if options.warn_on_unsupported: # Reference Code Table 4.6. - warnings.warn('Unable to translate type of ensemble forecast.') - warnings.warn('Unable to translate number of forecasts in ensemble.') + warnings.warn("Unable to translate type of ensemble forecast.") + warnings.warn("Unable to translate number of forecasts in ensemble.") # Create the realization coordinates. - realization = DimCoord(section['perturbationNumber'], - standard_name='realization', - units='no_unit') + realization = DimCoord( + section["perturbationNumber"], standard_name="realization", units="no_unit" + ) return realization @@ -2038,8 +2166,7 @@ def product_definition_template_0(section, metadata, rt_coord): generating_process(section) # Handle the data cutoff. - data_cutoff(section['hoursAfterDataCutoff'], - section['minutesAfterDataCutoff']) + data_cutoff(section["hoursAfterDataCutoff"], section["minutesAfterDataCutoff"]) time_coords(section, metadata, rt_coord) @@ -2073,7 +2200,7 @@ def product_definition_template_1(section, metadata, frt_coord): realization = ensemble_identifier(section) # Add the realization coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((realization, None)) + metadata["aux_coords_and_dims"].append((realization, None)) def product_definition_template_6(section, metadata, frt_coord): @@ -2099,12 +2226,10 @@ def product_definition_template_6(section, metadata, frt_coord): # Perform identical message processing. product_definition_template_0(section, metadata, frt_coord) - percentile = DimCoord(section['percentileValue'], - long_name='percentile', - units='%') + percentile = DimCoord(section["percentileValue"], long_name="percentile", units="%") # Add the percentile coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((percentile, None)) + metadata["aux_coords_and_dims"].append((percentile, None)) def product_definition_template_8(section, metadata, frt_coord): @@ -2131,28 +2256,27 @@ def product_definition_template_8(section, metadata, frt_coord): generating_process(section) # Handle the data cutoff. - data_cutoff(section['hoursAfterDataCutoff'], - section['minutesAfterDataCutoff']) + data_cutoff(section["hoursAfterDataCutoff"], section["minutesAfterDataCutoff"]) # Create a cell method to represent the time statistic. time_statistic_cell_method = statistical_cell_method(section) # Add the forecast cell method to the metadata. - metadata['cell_methods'].append(time_statistic_cell_method) + metadata["cell_methods"].append(time_statistic_cell_method) # Add the forecast reference time coordinate to the metadata aux coords, # if it is a forecast reference time, not a time coord, as defined by # significanceOfReferenceTime. - if frt_coord.name() != 'time': - metadata['aux_coords_and_dims'].append((frt_coord, None)) + if frt_coord.name() != "time": + metadata["aux_coords_and_dims"].append((frt_coord, None)) # Add a bounded forecast period coordinate. fp_coord = statistical_forecast_period_coord(section, frt_coord) - metadata['aux_coords_and_dims'].append((fp_coord, None)) + metadata["aux_coords_and_dims"].append((fp_coord, None)) # Calculate a bounded validity time coord matching the forecast period. t_coord = validity_time_coord(frt_coord, fp_coord) # Add the time coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((t_coord, None)) + metadata["aux_coords_and_dims"].append((t_coord, None)) # Check for vertical coordinates. vertical_coords(section, metadata) @@ -2183,36 +2307,42 @@ def product_definition_template_9(section, metadata, frt_coord): # Remove the cell_method encoding the underlying statistic, as CF does not # currently support this type of representation. - cell_method, = metadata['cell_methods'] - metadata['cell_methods'] = [] + (cell_method,) = metadata["cell_methods"] + metadata["cell_methods"] = [] # NOTE: we currently don't record the nature of the underlying statistic, # as we don't have an agreed way of representing that in CF. # Return a probability object to control the production of a probability # result. This is done once the underlying phenomenon type is determined, # in 'translate_phenomenon'. - probability_typecode = section['probabilityType'] + probability_typecode = section["probabilityType"] if probability_typecode == 1: # Type is "above upper level". - threshold_value = section['scaledValueOfUpperLimit'] + threshold_value = section["scaledValueOfUpperLimit"] if threshold_value == _MDI: - msg = 'Product definition section 4 has missing ' \ - 'scaled value of upper limit' + msg = ( + "Product definition section 4 has missing " + "scaled value of upper limit" + ) raise TranslationError(msg) - threshold_scaling = section['scaleFactorOfUpperLimit'] + threshold_scaling = section["scaleFactorOfUpperLimit"] if threshold_scaling == _MDI: - msg = 'Product definition section 4 has missing ' \ - 'scale factor of upper limit' + msg = ( + "Product definition section 4 has missing " + "scale factor of upper limit" + ) raise TranslationError(msg) # Encode threshold information. threshold = unscale(threshold_value, threshold_scaling) - probability_type = Probability('above_threshold', threshold) + probability_type = Probability("above_threshold", threshold) # Note that GRIB provides separate "above lower threshold" and "above # upper threshold" probability types. This naming style doesn't # recognise that distinction. For now, assume this is not important. else: - msg = ('Product definition section 4 contains an unsupported ' - 'probability type [{}]'.format(probability_typecode)) + msg = ( + "Product definition section 4 contains an unsupported " + "probability type [{}]".format(probability_typecode) + ) raise TranslationError(msg) return probability_type @@ -2239,12 +2369,12 @@ def product_definition_template_10(section, metadata, frt_coord): """ product_definition_template_8(section, metadata, frt_coord) - percentile = DimCoord(section['percentileValue'], - long_name='percentile_over_time', - units='no_unit') + percentile = DimCoord( + section["percentileValue"], long_name="percentile_over_time", units="no_unit" + ) # Add the percentile data info - metadata['aux_coords_and_dims'].append((percentile, None)) + metadata["aux_coords_and_dims"].append((percentile, None)) def product_definition_template_11(section, metadata, frt_coord): @@ -2273,7 +2403,7 @@ def product_definition_template_11(section, metadata, frt_coord): realization = ensemble_identifier(section) # Add the realization coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((realization, None)) + metadata["aux_coords_and_dims"].append((realization, None)) def product_definition_template_15(section, metadata, frt_coord): @@ -2297,20 +2427,23 @@ def product_definition_template_15(section, metadata, frt_coord): """ # Check unique keys for this template. - spatial_processing_code = section['spatialProcessing'] + spatial_processing_code = section["spatialProcessing"] # Only a limited number of spatial processing codes are supported if spatial_processing_code not in _SPATIAL_PROCESSING_TYPES.keys(): - msg = ('Product definition section 4 contains an unsupported ' - 'spatial processing type [{}]'.format(spatial_processing_code)) + msg = ( + "Product definition section 4 contains an unsupported " + "spatial processing type [{}]".format(spatial_processing_code) + ) raise TranslationError(msg) # Process parts in common with PDT 4.0. product_definition_template_0(section, metadata, frt_coord) # Add spatial processing type as an attribute. - metadata['attributes']['spatial_processing_type'] = \ - _SPATIAL_PROCESSING_TYPES[spatial_processing_code][0] + metadata["attributes"]["spatial_processing_type"] = _SPATIAL_PROCESSING_TYPES[ + spatial_processing_code + ][0] # Add a cell method if the spatial processing type supports a # statistical process. @@ -2319,46 +2452,42 @@ def product_definition_template_15(section, metadata, frt_coord): cell_method_name = statistical_method_name(section) # Record an 'area' cell-method using this statistic. - metadata['cell_methods'] = [CellMethod(coords=('area',), - method=cell_method_name)] + metadata["cell_methods"] = [ + CellMethod(coords=("area",), method=cell_method_name) + ] def satellite_common(section, metadata): # Number of contributing spectral bands. - NB = section['NB'] + NB = section["NB"] if NB > 0: # Create the satellite series coordinate. - satelliteSeries = section['satelliteSeries'] - coord = AuxCoord(satelliteSeries, long_name='satellite_series', - units=1) + satelliteSeries = section["satelliteSeries"] + coord = AuxCoord(satelliteSeries, long_name="satellite_series", units=1) # Add the satellite series coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((coord, None)) + metadata["aux_coords_and_dims"].append((coord, None)) # Create the satellite number coordinate. - satelliteNumber = section['satelliteNumber'] - coord = AuxCoord(satelliteNumber, long_name='satellite_number', - units=1) + satelliteNumber = section["satelliteNumber"] + coord = AuxCoord(satelliteNumber, long_name="satellite_number", units=1) # Add the satellite number coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((coord, None)) + metadata["aux_coords_and_dims"].append((coord, None)) # Create the satellite instrument type coordinate. - instrumentType = section['instrumentType'] - coord = AuxCoord(instrumentType, long_name='instrument_type', - units=1) + instrumentType = section["instrumentType"] + coord = AuxCoord(instrumentType, long_name="instrument_type", units=1) # Add the instrument type coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((coord, None)) + metadata["aux_coords_and_dims"].append((coord, None)) # Create the central wave number coordinate. - scaleFactor = section['scaleFactorOfCentralWaveNumber'] - scaledValue = section['scaledValueOfCentralWaveNumber'] + scaleFactor = section["scaleFactorOfCentralWaveNumber"] + scaledValue = section["scaledValueOfCentralWaveNumber"] wave_number = unscale(scaledValue, scaleFactor) - standard_name = 'sensor_band_central_radiation_wavenumber' - coord = AuxCoord(wave_number, - standard_name=standard_name, - units=Unit('m-1')) + standard_name = "sensor_band_central_radiation_wavenumber" + coord = AuxCoord(wave_number, standard_name=standard_name, units=Unit("m-1")) # Add the central wave number coordinate to the metadata aux coords. - metadata['aux_coords_and_dims'].append((coord, None)) + metadata["aux_coords_and_dims"].append((coord, None)) def product_definition_template_31(section, metadata, rt_coord): @@ -2384,7 +2513,7 @@ def product_definition_template_31(section, metadata, rt_coord): satellite_common(section, metadata) # Add the observation time coordinate. - metadata['aux_coords_and_dims'].append((rt_coord, None)) + metadata["aux_coords_and_dims"].append((rt_coord, None)) def product_definition_template_32(section, metadata, rt_coord): @@ -2410,8 +2539,7 @@ def product_definition_template_32(section, metadata, rt_coord): generating_process(section, include_forecast_process=False) # Handle the data cutoff. - data_cutoff(section['hoursAfterDataCutoff'], - section['minutesAfterDataCutoff']) + data_cutoff(section["hoursAfterDataCutoff"], section["minutesAfterDataCutoff"]) time_coords(section, metadata, rt_coord) @@ -2442,14 +2570,13 @@ def product_definition_template_40(section, metadata, frt_coord): product_definition_template_0(section, metadata, frt_coord) # Reference GRIB2 Code Table 4.230. - constituent_type = section['constituentType'] + constituent_type = section["constituentType"] # Add the constituent type as an attribute. - metadata['attributes']['WMO_constituent_type'] = constituent_type + metadata["attributes"]["WMO_constituent_type"] = constituent_type -def product_definition_section(section, metadata, discipline, tablesVersion, - rt_coord): +def product_definition_section(section, metadata, discipline, tablesVersion, rt_coord): """ Translate section 4 from the GRIB2 message. @@ -2474,7 +2601,7 @@ def product_definition_section(section, metadata, discipline, tablesVersion, """ # Reference GRIB2 Code Table 4.0. - template = section['productDefinitionTemplateNumber'] + template = section["productDefinitionTemplateNumber"] probability = None includes_fixed_surface_keys = True @@ -2495,8 +2622,7 @@ def product_definition_section(section, metadata, discipline, tablesVersion, # horizontal layer in a continuous or non-continuous time interval. product_definition_template_8(section, metadata, rt_coord) elif template == 9: - probability = \ - product_definition_template_9(section, metadata, rt_coord) + probability = product_definition_template_9(section, metadata, rt_coord) elif template == 10: product_definition_template_10(section, metadata, rt_coord) elif template == 11: @@ -2513,31 +2639,31 @@ def product_definition_section(section, metadata, discipline, tablesVersion, elif template == 40: product_definition_template_40(section, metadata, rt_coord) else: - msg = 'Product definition template [{}] is not ' \ - 'supported'.format(template) + msg = "Product definition template [{}] is not " "supported".format(template) raise TranslationError(msg) # Translate GRIB2 phenomenon to CF phenomenon. if tablesVersion != _CODE_TABLES_MISSING: translation_kwargs = { - 'metadata': metadata, - 'discipline': discipline, - 'parameterCategory': section['parameterCategory'], - 'parameterNumber': section['parameterNumber'], - 'probability': probability + "metadata": metadata, + "discipline": discipline, + "parameterCategory": section["parameterCategory"], + "parameterNumber": section["parameterNumber"], + "probability": probability, } # Won't always be able to populate the below arguments - # missing from some template definitions. fixed_surface_keys = [ - 'typeOfFirstFixedSurface', - 'scaledValueOfFirstFixedSurface', - 'typeOfSecondFixedSurface' + "typeOfFirstFixedSurface", + "scaledValueOfFirstFixedSurface", + "typeOfSecondFixedSurface", ] for section_key in fixed_surface_keys: - translation_kwargs[section_key] = \ + translation_kwargs[section_key] = ( section[section_key] if includes_fixed_surface_keys else None + ) translate_phenomenon(**translation_kwargs) @@ -2548,6 +2674,7 @@ def product_definition_section(section, metadata, discipline, tablesVersion, # ############################################################################### + def data_representation_section(section): """ Translate section 5 from the GRIB2 message. @@ -2557,7 +2684,7 @@ def data_representation_section(section): """ # Reference GRIB2 Code Table 5.0. - template = section['dataRepresentationTemplateNumber'] + template = section["dataRepresentationTemplateNumber"] # Supported templates for both grid point and spectral data: grid_point_templates = (0, 1, 2, 3, 4, 40, 41, 42, 61) @@ -2565,8 +2692,9 @@ def data_representation_section(section): supported_templates = grid_point_templates + spectral_templates if template not in supported_templates: - msg = 'Data Representation Section Template [{}] is not ' \ - 'supported'.format(template) + msg = "Data Representation Section Template [{}] is not " "supported".format( + template + ) raise TranslationError(msg) @@ -2576,6 +2704,7 @@ def data_representation_section(section): # ############################################################################### + def bitmap_section(section): """ Translate section 6 from the GRIB2 message. @@ -2595,16 +2724,18 @@ def bitmap_section(section): """ # Reference GRIB2 Code Table 6.0. - bitMapIndicator = section['bitMapIndicator'] + bitMapIndicator = section["bitMapIndicator"] if bitMapIndicator not in [_BITMAP_CODE_NONE, _BITMAP_CODE_PRESENT]: - msg = 'Bitmap Section 6 contains unsupported ' \ - 'bitmap indicator [{}]'.format(bitMapIndicator) + msg = "Bitmap Section 6 contains unsupported " "bitmap indicator [{}]".format( + bitMapIndicator + ) raise TranslationError(msg) ############################################################################### + def grib2_convert(field, metadata): """ Translate the GRIB2 message into the appropriate cube metadata. @@ -2621,19 +2752,22 @@ def grib2_convert(field, metadata): """ # Section 1 - Identification Section. - centre = _CENTRES.get(field.sections[1]['centre']) + centre = _CENTRES.get(field.sections[1]["centre"]) if centre is not None: - metadata['attributes']['centre'] = centre + metadata["attributes"]["centre"] = centre rt_coord = reference_time_coord(field.sections[1]) # Section 3 - Grid Definition Section (Grid Definition Template) grid_definition_section(field.sections[3], metadata) # Section 4 - Product Definition Section (Product Definition Template) - product_definition_section(field.sections[4], metadata, - field.sections[0]['discipline'], - field.sections[1]['tablesVersion'], - rt_coord) + product_definition_section( + field.sections[4], + metadata, + field.sections[0]["discipline"], + field.sections[1]["tablesVersion"], + rt_coord, + ) # Section 5 - Data Representation Section (Data Representation Template) data_representation_section(field.sections[5]) @@ -2644,6 +2778,7 @@ def grib2_convert(field, metadata): ############################################################################### + def convert(field): """ Translate the GRIB message into the appropriate cube metadata. @@ -2657,25 +2792,24 @@ def convert(field): A :class:`iris.fileformats.rules.ConversionMetadata` object. """ - if hasattr(field, 'sections'): - editionNumber = field.sections[0]['editionNumber'] + if hasattr(field, "sections"): + editionNumber = field.sections[0]["editionNumber"] if editionNumber != 2: - emsg = 'GRIB edition {} is not supported by {!r}.' - raise TranslationError(emsg.format(editionNumber, - type(field).__name__)) + emsg = "GRIB edition {} is not supported by {!r}." + raise TranslationError(emsg.format(editionNumber, type(field).__name__)) # Initialise the cube metadata. metadata = OrderedDict() - metadata['factories'] = [] - metadata['references'] = [] - metadata['standard_name'] = None - metadata['long_name'] = None - metadata['units'] = None - metadata['attributes'] = {} - metadata['cell_methods'] = [] - metadata['dim_coords_and_dims'] = [] - metadata['aux_coords_and_dims'] = [] + metadata["factories"] = [] + metadata["references"] = [] + metadata["standard_name"] = None + metadata["long_name"] = None + metadata["units"] = None + metadata["attributes"] = {} + metadata["cell_methods"] = [] + metadata["dim_coords_and_dims"] = [] + metadata["aux_coords_and_dims"] = [] # Convert GRIB2 message to cube metadata. grib2_convert(field, metadata) @@ -2685,9 +2819,8 @@ def convert(field): editionNumber = field.edition if editionNumber != 1: - emsg = 'GRIB edition {} is not supported by {!r}.' - raise TranslationError(emsg.format(editionNumber, - type(field).__name__)) + emsg = "GRIB edition {} is not supported by {!r}." + raise TranslationError(emsg.format(editionNumber, type(field).__name__)) result = grib1_convert(field) diff --git a/iris_grib/_save_rules.py b/iris_grib/_save_rules.py index df1be8d2c..a47bd453e 100644 --- a/iris_grib/_save_rules.py +++ b/iris_grib/_save_rules.py @@ -20,26 +20,32 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.coord_systems import (GeogCS, RotatedGeogCS, Mercator, - TransverseMercator, LambertConformal, - LambertAzimuthalEqualArea) +from iris.coord_systems import ( + GeogCS, + RotatedGeogCS, + Mercator, + TransverseMercator, + LambertConformal, + LambertAzimuthalEqualArea, +) from iris.exceptions import TranslationError -from ._iris_mercator_support import confirm_extended_mercator_supported from . import grib_phenom_translation as gptx -from ._load_convert import (_STATISTIC_TYPE_NAMES, _TIME_RANGE_UNITS, - _SPATIAL_PROCESSING_TYPES) +from ._load_convert import ( + _STATISTIC_TYPE_NAMES, + _TIME_RANGE_UNITS, + _SPATIAL_PROCESSING_TYPES, +) from .grib_phenom_translation import GRIBCode from iris.util import is_regular, regular_step # Invert code tables from :mod:`iris_grib._load_convert`. -_STATISTIC_TYPE_NAMES_INVERTED = {val: key for key, val in - _STATISTIC_TYPE_NAMES.items()} -_TIME_RANGE_UNITS_INVERTED = { - val: key for key, val in _TIME_RANGE_UNITS.items() +_STATISTIC_TYPE_NAMES_INVERTED = { + val: key for key, val in _STATISTIC_TYPE_NAMES.items() } +_TIME_RANGE_UNITS_INVERTED = {val: key for key, val in _TIME_RANGE_UNITS.items()} def fixup_float32_as_int32(value): @@ -52,8 +58,8 @@ def fixup_float32_as_int32(value): value. """ - value_as_float32 = np.array(value, dtype='f4') - value_as_uint32 = value_as_float32.view(dtype='u4') + value_as_float32 = np.array(value, dtype="f4") + value_as_uint32 = value_as_float32.view(dtype="u4") if value_as_uint32 >= 0x80000000: # Convert from two's-complement to sign-and-magnitude. # NB. Because of the silly representation of negative @@ -78,12 +84,12 @@ def fixup_int32_as_uint32(value): """ value = int(value) - if -0x7fffffff <= value <= 0x7fffffff: + if -0x7FFFFFFF <= value <= 0x7FFFFFFF: if value < 0: # Convert from two's-complement to sign-and-magnitude. value = 0x80000000 - value else: - msg = '{} out of range -2147483647 to 2147483647.'.format(value) + msg = "{} out of range -2147483647 to 2147483647.".format(value) raise ValueError(msg) return value @@ -142,9 +148,9 @@ def reference_time(cube, grib): eccodes.codes_set_long(grib, "significanceOfReferenceTime", rt_meaning) eccodes.codes_set_long( - grib, "dataDate", "%04d%02d%02d" % (rt.year, rt.month, rt.day)) - eccodes.codes_set_long( - grib, "dataTime", "%02d%02d" % (rt.hour, rt.minute)) + grib, "dataDate", "%04d%02d%02d" % (rt.year, rt.month, rt.day) + ) + eccodes.codes_set_long(grib, "dataTime", "%02d%02d" % (rt.hour, rt.minute)) # TODO: Set the calendar, when we find out what happened to the proposal! # http://tinyurl.com/oefqgv6 @@ -161,10 +167,10 @@ def identification(cube, grib): # Code table 1.4 # analysis, forecast, processed satellite, processed radar, - if cube.coords('realization'): + if cube.coords("realization"): # assume realization will always have 1 and only 1 point # as cubes saving to GRIB2 a 2D horizontal slices - if cube.coord('realization').points[0] != 0: + if cube.coord("realization").points[0] != 0: eccodes.codes_set_long(grib, "typeOfProcessedData", 4) else: eccodes.codes_set_long(grib, "typeOfProcessedData", 3) @@ -185,30 +191,32 @@ def shape_of_the_earth(cube, grib): # Initially set shape_of_earth keys to missing (255 for byte). eccodes.codes_set_long(grib, "scaleFactorOfRadiusOfSphericalEarth", 255) - eccodes.codes_set_long(grib, "scaledValueOfRadiusOfSphericalEarth", - GRIB_MISSING_LONG) + eccodes.codes_set_long( + grib, "scaledValueOfRadiusOfSphericalEarth", GRIB_MISSING_LONG + ) eccodes.codes_set_long(grib, "scaleFactorOfEarthMajorAxis", 255) - eccodes.codes_set_long(grib, "scaledValueOfEarthMajorAxis", - GRIB_MISSING_LONG) + eccodes.codes_set_long(grib, "scaledValueOfEarthMajorAxis", GRIB_MISSING_LONG) eccodes.codes_set_long(grib, "scaleFactorOfEarthMinorAxis", 255) - eccodes.codes_set_long(grib, "scaledValueOfEarthMinorAxis", - GRIB_MISSING_LONG) + eccodes.codes_set_long(grib, "scaledValueOfEarthMinorAxis", GRIB_MISSING_LONG) if isinstance(cs, GeogCS): ellipsoid = cs else: ellipsoid = cs.ellipsoid if ellipsoid is None: - msg = "Could not determine shape of the earth from coord system "\ - "of horizontal grid." + msg = ( + "Could not determine shape of the earth from coord system " + "of horizontal grid." + ) raise TranslationError(msg) # Spherical earth. if ellipsoid.inverse_flattening == 0.0: eccodes.codes_set_long(grib, "shapeOfTheEarth", 1) eccodes.codes_set_long(grib, "scaleFactorOfRadiusOfSphericalEarth", 0) - eccodes.codes_set_long(grib, "scaledValueOfRadiusOfSphericalEarth", - ellipsoid.semi_major_axis) + eccodes.codes_set_long( + grib, "scaledValueOfRadiusOfSphericalEarth", ellipsoid.semi_major_axis + ) eccodes.codes_set_long(grib, "scaleFactorOfEarthMajorAxis", 0) eccodes.codes_set_long(grib, "scaledValueOfEarthMajorAxis", 0) eccodes.codes_set_long(grib, "scaleFactorOfEarthMinorAxis", 0) @@ -217,11 +225,13 @@ def shape_of_the_earth(cube, grib): else: eccodes.codes_set_long(grib, "shapeOfTheEarth", 7) eccodes.codes_set_long(grib, "scaleFactorOfEarthMajorAxis", 0) - eccodes.codes_set_long(grib, "scaledValueOfEarthMajorAxis", - ellipsoid.semi_major_axis) + eccodes.codes_set_long( + grib, "scaledValueOfEarthMajorAxis", ellipsoid.semi_major_axis + ) eccodes.codes_set_long(grib, "scaleFactorOfEarthMinorAxis", 0) - eccodes.codes_set_long(grib, "scaledValueOfEarthMinorAxis", - ellipsoid.semi_minor_axis) + eccodes.codes_set_long( + grib, "scaledValueOfEarthMinorAxis", ellipsoid.semi_minor_axis + ) def grid_dims(x_coord, y_coord, grib, x_str, y_str): @@ -233,24 +243,28 @@ def latlon_first_last(x_coord, y_coord, grib): if x_coord.has_bounds() or y_coord.has_bounds(): warnings.warn("Ignoring xy bounds") -# XXX Pending #1125 -# eccodes.codes_set_double(grib, "latitudeOfFirstGridPointInDegrees", -# float(y_coord.points[0])) -# eccodes.codes_set_double(grib, "latitudeOfLastGridPointInDegrees", -# float(y_coord.points[-1])) -# eccodes.codes_set_double(grib, "longitudeOfFirstGridPointInDegrees", -# float(x_coord.points[0])) -# eccodes.codes_set_double(grib, "longitudeOfLastGridPointInDegrees", -# float(x_coord.points[-1])) -# WORKAROUND - eccodes.codes_set_long(grib, "latitudeOfFirstGridPoint", - int(y_coord.points[0]*1000000)) - eccodes.codes_set_long(grib, "latitudeOfLastGridPoint", - int(y_coord.points[-1]*1000000)) - eccodes.codes_set_long(grib, "longitudeOfFirstGridPoint", - int((x_coord.points[0] % 360)*1000000)) - eccodes.codes_set_long(grib, "longitudeOfLastGridPoint", - int((x_coord.points[-1] % 360)*1000000)) + # XXX Pending #1125 + # eccodes.codes_set_double(grib, "latitudeOfFirstGridPointInDegrees", + # float(y_coord.points[0])) + # eccodes.codes_set_double(grib, "latitudeOfLastGridPointInDegrees", + # float(y_coord.points[-1])) + # eccodes.codes_set_double(grib, "longitudeOfFirstGridPointInDegrees", + # float(x_coord.points[0])) + # eccodes.codes_set_double(grib, "longitudeOfLastGridPointInDegrees", + # float(x_coord.points[-1])) + # WORKAROUND + eccodes.codes_set_long( + grib, "latitudeOfFirstGridPoint", int(y_coord.points[0] * 1000000) + ) + eccodes.codes_set_long( + grib, "latitudeOfLastGridPoint", int(y_coord.points[-1] * 1000000) + ) + eccodes.codes_set_long( + grib, "longitudeOfFirstGridPoint", int((x_coord.points[0] % 360) * 1000000) + ) + eccodes.codes_set_long( + grib, "longitudeOfLastGridPoint", int((x_coord.points[-1] % 360) * 1000000) + ) def dx_dy(x_coord, y_coord, grib): @@ -259,25 +273,27 @@ def dx_dy(x_coord, y_coord, grib): # Set x and y step. For degrees, this is encoded as an integer: # 1 * 10^6 * floating point value. # WMO Manual on Codes regulation 92.1.6 - if x_coord.units == 'degrees': - eccodes.codes_set(grib, "iDirectionIncrement", - round(1e6 * float(abs(x_step)))) + if x_coord.units == "degrees": + eccodes.codes_set(grib, "iDirectionIncrement", round(1e6 * float(abs(x_step)))) else: - raise ValueError('X coordinate must be in degrees, not {}' - '.'.format(x_coord.units)) - if y_coord.units == 'degrees': - eccodes.codes_set(grib, "jDirectionIncrement", - round(1e6 * float(abs(y_step)))) + raise ValueError( + "X coordinate must be in degrees, not {}" ".".format(x_coord.units) + ) + if y_coord.units == "degrees": + eccodes.codes_set(grib, "jDirectionIncrement", round(1e6 * float(abs(y_step)))) else: - raise ValueError('Y coordinate must be in degrees, not {}' - '.'.format(y_coord.units)) + raise ValueError( + "Y coordinate must be in degrees, not {}" ".".format(y_coord.units) + ) def scanning_mode_flags(x_coord, y_coord, grib): - eccodes.codes_set_long(grib, "iScansPositively", - int(x_coord.points[1] - x_coord.points[0] > 0)) - eccodes.codes_set_long(grib, "jScansPositively", - int(y_coord.points[1] - y_coord.points[0] > 0)) + eccodes.codes_set_long( + grib, "iScansPositively", int(x_coord.points[1] - x_coord.points[0] > 0) + ) + eccodes.codes_set_long( + grib, "jScansPositively", int(y_coord.points[1] - y_coord.points[0] > 0) + ) def horizontal_grid_common(cube, grib, xy=False): @@ -302,13 +318,17 @@ def latlon_points_irregular(cube, grib): x_coord = cube.coord(dimensions=[1]) # Distinguish between true-north and grid-oriented vectors. - is_grid_wind = cube.name() in ('x_wind', 'y_wind', 'grid_eastward_wind', - 'grid_northward_wind') + is_grid_wind = cube.name() in ( + "x_wind", + "y_wind", + "grid_eastward_wind", + "grid_northward_wind", + ) # Encode in bit "5" of 'resolutionAndComponentFlags' (other bits unused). component_flags = 0 if is_grid_wind: - component_flags |= 2 ** _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT - eccodes.codes_set(grib, 'resolutionAndComponentFlags', component_flags) + component_flags |= 2**_RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT + eccodes.codes_set(grib, "resolutionAndComponentFlags", component_flags) # Record the X and Y coordinate values. # NOTE: there is currently a bug in the gribapi which means that the size @@ -317,10 +337,12 @@ def latlon_points_irregular(cube, grib): # So, this only works at present if the x and y dimensions are **equal**. lon_values = x_coord.points / _DEFAULT_DEGREES_UNITS lat_values = y_coord.points / _DEFAULT_DEGREES_UNITS - eccodes.codes_set_array(grib, 'longitude', - np.array(np.round(lon_values), dtype=np.int64)) - eccodes.codes_set_array(grib, 'latitude', - np.array(np.round(lat_values), dtype=np.int64)) + eccodes.codes_set_array( + grib, "longitude", np.array(np.round(lon_values), dtype=np.int64) + ) + eccodes.codes_set_array( + grib, "latitude", np.array(np.round(lat_values), dtype=np.int64) + ) def rotated_pole(cube, grib): @@ -329,19 +351,19 @@ def rotated_pole(cube, grib): if cs.north_pole_grid_longitude != 0.0: raise TranslationError( - 'Grib save does not yet support Rotated-pole coordinates with ' - 'a rotated prime meridian.') -# XXX Pending #1125 -# eccodes.codes_set_double(grib, "latitudeOfSouthernPoleInDegrees", -# float(cs.n_pole.latitude)) -# eccodes.codes_set_double(grib, "longitudeOfSouthernPoleInDegrees", -# float(cs.n_pole.longitude)) -# eccodes.codes_set_double(grib, "angleOfRotationInDegrees", 0) -# WORKAROUND + "Grib save does not yet support Rotated-pole coordinates with " + "a rotated prime meridian." + ) + # XXX Pending #1125 + # eccodes.codes_set_double(grib, "latitudeOfSouthernPoleInDegrees", + # float(cs.n_pole.latitude)) + # eccodes.codes_set_double(grib, "longitudeOfSouthernPoleInDegrees", + # float(cs.n_pole.longitude)) + # eccodes.codes_set_double(grib, "angleOfRotationInDegrees", 0) + # WORKAROUND latitude = cs.grid_north_pole_latitude / _DEFAULT_DEGREES_UNITS - longitude = (((cs.grid_north_pole_longitude + 180) % 360) / - _DEFAULT_DEGREES_UNITS) - eccodes.codes_set(grib, "latitudeOfSouthernPole", - int(round(latitude))) + longitude = ((cs.grid_north_pole_longitude + 180) % 360) / _DEFAULT_DEGREES_UNITS + eccodes.codes_set(grib, "latitudeOfSouthernPole", -int(round(latitude))) eccodes.codes_set(grib, "longitudeOfSouthernPole", int(round(longitude))) eccodes.codes_set(grib, "angleOfRotation", 0) @@ -457,8 +479,8 @@ def grid_definition_template_10(cube, grib): # Normalise the coordinate values to millimetres - the resolution # used in the GRIB message. - y_mm = points_in_unit(y_coord, 'mm') - x_mm = points_in_unit(x_coord, 'mm') + y_mm = points_in_unit(y_coord, "mm") + x_mm = points_in_unit(x_coord, "mm") # Encode the horizontal points. @@ -468,46 +490,54 @@ def grid_definition_template_10(cube, grib): x_step = step(x_mm, atol=1) y_step = step(y_mm, atol=1) except ValueError: - msg = 'Irregular coordinates not supported for Mercator.' + msg = "Irregular coordinates not supported for Mercator." raise TranslationError(msg) - eccodes.codes_set(grib, 'Di', abs(x_step)) - eccodes.codes_set(grib, 'Dj', abs(y_step)) + eccodes.codes_set(grib, "Di", abs(x_step)) + eccodes.codes_set(grib, "Dj", abs(y_step)) horizontal_grid_common(cube, grib) # Transform first and last points into geographic CS. geog = cs.ellipsoid if cs.ellipsoid is not None else GeogCS(1) - first_x, first_y, = geog.as_cartopy_crs().transform_point( - x_coord.points[0], - y_coord.points[0], - cs.as_cartopy_crs()) + ( + first_x, + first_y, + ) = geog.as_cartopy_crs().transform_point( + x_coord.points[0], y_coord.points[0], cs.as_cartopy_crs() + ) last_x, last_y = geog.as_cartopy_crs().transform_point( - x_coord.points[-1], - y_coord.points[-1], - cs.as_cartopy_crs()) + x_coord.points[-1], y_coord.points[-1], cs.as_cartopy_crs() + ) first_x = first_x % 360 last_x = last_x % 360 - eccodes.codes_set(grib, "latitudeOfFirstGridPoint", - int(np.round(first_y / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, "longitudeOfFirstGridPoint", - int(np.round(first_x / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, "latitudeOfLastGridPoint", - int(np.round(last_y / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, "longitudeOfLastGridPoint", - int(np.round(last_x / _DEFAULT_DEGREES_UNITS))) - - # Check and raise a more intelligible error, if the Iris version is too old - # to support the Mercator 'standard_parallel' property. - confirm_extended_mercator_supported() + eccodes.codes_set( + grib, + "latitudeOfFirstGridPoint", + int(np.round(first_y / _DEFAULT_DEGREES_UNITS)), + ) + eccodes.codes_set( + grib, + "longitudeOfFirstGridPoint", + int(np.round(first_x / _DEFAULT_DEGREES_UNITS)), + ) + eccodes.codes_set( + grib, "latitudeOfLastGridPoint", int(np.round(last_y / _DEFAULT_DEGREES_UNITS)) + ) + eccodes.codes_set( + grib, "longitudeOfLastGridPoint", int(np.round(last_x / _DEFAULT_DEGREES_UNITS)) + ) + # Encode the latitude at which the projection intersects the Earth. - eccodes.codes_set(grib, 'LaD', - cs.standard_parallel / _DEFAULT_DEGREES_UNITS) + eccodes.codes_set(grib, "LaD", cs.standard_parallel / _DEFAULT_DEGREES_UNITS) # Encode resolution and component flags - eccodes.codes_set(grib, 'resolutionAndComponentFlags', - 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT) + eccodes.codes_set( + grib, + "resolutionAndComponentFlags", + 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT, + ) def grid_definition_template_12(cube, grib): @@ -527,8 +557,8 @@ def grid_definition_template_12(cube, grib): # Normalise the coordinate values to centimetres - the resolution # used in the GRIB message. - y_cm = points_in_unit(y_coord, 'cm') - x_cm = points_in_unit(x_coord, 'cm') + y_cm = points_in_unit(y_coord, "cm") + x_cm = points_in_unit(x_coord, "cm") # Set some keys specific to GDT12. # Encode the horizontal points. @@ -539,29 +569,30 @@ def grid_definition_template_12(cube, grib): x_step = step(x_cm, atol=1) y_step = step(y_cm, atol=1) except ValueError: - msg = ('Irregular coordinates not supported for Transverse ' - 'Mercator.') + msg = "Irregular coordinates not supported for Transverse " "Mercator." raise TranslationError(msg) - eccodes.codes_set(grib, 'Di', abs(x_step)) - eccodes.codes_set(grib, 'Dj', abs(y_step)) + eccodes.codes_set(grib, "Di", abs(x_step)) + eccodes.codes_set(grib, "Dj", abs(y_step)) horizontal_grid_common(cube, grib) # GRIBAPI expects unsigned ints in X1, X2, Y1, Y2 but it should accept # signed ints, so work around this. # See https://software.ecmwf.int/issues/browse/SUP-1101 - ensure_set_int32_value(grib, 'Y1', int(y_cm[0])) - ensure_set_int32_value(grib, 'Y2', int(y_cm[-1])) - ensure_set_int32_value(grib, 'X1', int(x_cm[0])) - ensure_set_int32_value(grib, 'X2', int(x_cm[-1])) + ensure_set_int32_value(grib, "Y1", int(y_cm[0])) + ensure_set_int32_value(grib, "Y2", int(y_cm[-1])) + ensure_set_int32_value(grib, "X1", int(x_cm[0])) + ensure_set_int32_value(grib, "X2", int(x_cm[-1])) # Lat and lon of reference point are measured in millionths of a degree. eccodes.codes_set( - grib, "latitudeOfReferencePoint", - cs.latitude_of_projection_origin / _DEFAULT_DEGREES_UNITS + grib, + "latitudeOfReferencePoint", + cs.latitude_of_projection_origin / _DEFAULT_DEGREES_UNITS, ) eccodes.codes_set( - grib, "longitudeOfReferencePoint", - cs.longitude_of_central_meridian / _DEFAULT_DEGREES_UNITS + grib, + "longitudeOfReferencePoint", + cs.longitude_of_central_meridian / _DEFAULT_DEGREES_UNITS, ) # Convert a value in metres into the closest integer number of @@ -570,15 +601,14 @@ def m_to_cm(value): return int(round(value * 100)) # False easting and false northing are measured in units of (10^-2)m. - eccodes.codes_set(grib, 'XR', m_to_cm(cs.false_easting)) - eccodes.codes_set(grib, 'YR', m_to_cm(cs.false_northing)) + eccodes.codes_set(grib, "XR", m_to_cm(cs.false_easting)) + eccodes.codes_set(grib, "YR", m_to_cm(cs.false_northing)) # GRIBAPI expects a signed int for scaleFactorAtReferencePoint # but it should accept a float, so work around this. # See https://software.ecmwf.int/issues/browse/SUP-1100 value = cs.scale_factor_at_central_meridian - key_type = eccodes.codes_get_native_type(grib, - "scaleFactorAtReferencePoint") + key_type = eccodes.codes_get_native_type(grib, "scaleFactorAtReferencePoint") if key_type is not float: value = fixup_float32_as_int32(value) eccodes.codes_set(grib, "scaleFactorAtReferencePoint", value) @@ -602,8 +632,8 @@ def grid_definition_template_30(cube, grib): # Normalise the coordinate values to millimetres - the resolution # used in the GRIB message. - y_mm = points_in_unit(y_coord, 'mm') - x_mm = points_in_unit(x_coord, 'mm') + y_mm = points_in_unit(y_coord, "mm") + x_mm = points_in_unit(x_coord, "mm") # Encode the horizontal points. @@ -613,40 +643,47 @@ def grid_definition_template_30(cube, grib): x_step = step(x_mm, atol=1) y_step = step(y_mm, atol=1) except ValueError: - msg = ('Irregular coordinates not supported for Lambert ' - 'Conformal.') + msg = "Irregular coordinates not supported for Lambert " "Conformal." raise TranslationError(msg) - eccodes.codes_set(grib, 'Dx', abs(x_step)) - eccodes.codes_set(grib, 'Dy', abs(y_step)) + eccodes.codes_set(grib, "Dx", abs(x_step)) + eccodes.codes_set(grib, "Dy", abs(y_step)) horizontal_grid_common(cube, grib, xy=True) # Transform first point into geographic CS geog = cs.ellipsoid if cs.ellipsoid is not None else GeogCS(1) first_x, first_y = geog.as_cartopy_crs().transform_point( - x_coord.points[0], - y_coord.points[0], - cs.as_cartopy_crs()) + x_coord.points[0], y_coord.points[0], cs.as_cartopy_crs() + ) first_x = first_x % 360 central_lon = cs.central_lon % 360 - eccodes.codes_set(grib, "latitudeOfFirstGridPoint", - int(np.round(first_y / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, "longitudeOfFirstGridPoint", - int(np.round(first_x / _DEFAULT_DEGREES_UNITS))) + eccodes.codes_set( + grib, + "latitudeOfFirstGridPoint", + int(np.round(first_y / _DEFAULT_DEGREES_UNITS)), + ) + eccodes.codes_set( + grib, + "longitudeOfFirstGridPoint", + int(np.round(first_x / _DEFAULT_DEGREES_UNITS)), + ) eccodes.codes_set(grib, "LaD", cs.central_lat / _DEFAULT_DEGREES_UNITS) eccodes.codes_set(grib, "LoV", central_lon / _DEFAULT_DEGREES_UNITS) latin1, latin2 = cs.secant_latitudes eccodes.codes_set(grib, "Latin1", latin1 / _DEFAULT_DEGREES_UNITS) eccodes.codes_set(grib, "Latin2", latin2 / _DEFAULT_DEGREES_UNITS) - eccodes.codes_set(grib, 'resolutionAndComponentFlags', - 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT) + eccodes.codes_set( + grib, + "resolutionAndComponentFlags", + 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT, + ) # Which pole are the parallels closest to? That is the direction # that the cone converges. poliest_sec = latin1 if abs(latin1) > abs(latin2) else latin2 centre_flag = 0x0 if poliest_sec > 0 else 0x1 - eccodes.codes_set(grib, 'projectionCentreFlag', centre_flag) + eccodes.codes_set(grib, "projectionCentreFlag", centre_flag) eccodes.codes_set(grib, "latitudeOfSouthernPole", 0) eccodes.codes_set(grib, "longitudeOfSouthernPole", 0) @@ -669,8 +706,8 @@ def grid_definition_template_140(cube, grib): # Normalise the coordinate values to millimetres - the resolution # used in the GRIB message. - y_mm = points_in_unit(y_coord, 'mm') - x_mm = points_in_unit(x_coord, 'mm') + y_mm = points_in_unit(y_coord, "mm") + x_mm = points_in_unit(x_coord, "mm") # Encode the horizontal points. @@ -680,40 +717,52 @@ def grid_definition_template_140(cube, grib): x_step = step(x_mm, atol=1) y_step = step(y_mm, atol=1) except ValueError: - msg = ('Irregular coordinates not supported for Lambert ' - 'Azimuthal Equal Area.') + msg = "Irregular coordinates not supported for Lambert " "Azimuthal Equal Area." raise TranslationError(msg) - eccodes.codes_set(grib, 'Dx', abs(x_step)) - eccodes.codes_set(grib, 'Dy', abs(y_step)) + eccodes.codes_set(grib, "Dx", abs(x_step)) + eccodes.codes_set(grib, "Dy", abs(y_step)) horizontal_grid_common(cube, grib, xy=True) # Transform first point into geographic CS geog = cs.ellipsoid if cs.ellipsoid is not None else GeogCS(1) first_x, first_y = geog.as_cartopy_crs().transform_point( - x_coord.points[0], - y_coord.points[0], - cs.as_cartopy_crs()) + x_coord.points[0], y_coord.points[0], cs.as_cartopy_crs() + ) first_x = first_x % 360 central_lon = cs.longitude_of_projection_origin % 360 central_lat = cs.latitude_of_projection_origin - eccodes.codes_set(grib, "latitudeOfFirstGridPoint", - int(np.round(first_y / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, "longitudeOfFirstGridPoint", - int(np.round(first_x / _DEFAULT_DEGREES_UNITS))) - eccodes.codes_set(grib, 'standardParallelInMicrodegrees', - central_lat / _DEFAULT_DEGREES_UNITS) - eccodes.codes_set(grib, 'centralLongitudeInMicrodegrees', - central_lon / _DEFAULT_DEGREES_UNITS) - eccodes.codes_set(grib, 'resolutionAndComponentFlags', - 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT) - if (not (np.isclose(cs.false_easting, 0.0, atol=1e-6)) or - not (np.isclose(cs.false_northing, 0.0, atol=1e-6))): - msg = ('non zero false easting ({:.2f}) or ' - 'non zero false northing ({:.2f})' - '; unsupported by GRIB Template 3.140' - '').format(cs.false_easting, cs.false_northing) + eccodes.codes_set( + grib, + "latitudeOfFirstGridPoint", + int(np.round(first_y / _DEFAULT_DEGREES_UNITS)), + ) + eccodes.codes_set( + grib, + "longitudeOfFirstGridPoint", + int(np.round(first_x / _DEFAULT_DEGREES_UNITS)), + ) + eccodes.codes_set( + grib, "standardParallelInMicrodegrees", central_lat / _DEFAULT_DEGREES_UNITS + ) + eccodes.codes_set( + grib, "centralLongitudeInMicrodegrees", central_lon / _DEFAULT_DEGREES_UNITS + ) + eccodes.codes_set( + grib, + "resolutionAndComponentFlags", + 0x1 << _RESOLUTION_AND_COMPONENTS_GRID_WINDS_BIT, + ) + if not (np.isclose(cs.false_easting, 0.0, atol=1e-6)) or not ( + np.isclose(cs.false_northing, 0.0, atol=1e-6) + ): + msg = ( + "non zero false easting ({:.2f}) or " + "non zero false northing ({:.2f})" + "; unsupported by GRIB Template 3.140" + "" + ).format(cs.false_easting, cs.false_northing) raise TranslationError(msg) @@ -759,8 +808,8 @@ def grid_definition_section(cube, grib): grid_definition_template_140(cube, grib) else: - name = cs.grid_mapping_name.replace('_', ' ').title() - emsg = 'Grib saving is not supported for coordinate system {!r}' + name = cs.grid_mapping_name.replace("_", " ").title() + emsg = "Grib saving is not supported for coordinate system {!r}" raise ValueError(emsg.format(name)) @@ -770,13 +819,14 @@ def grid_definition_section(cube, grib): # ############################################################################### + def set_discipline_and_parameter(cube, grib): # Default values for parameter identity keys = effectively "MISSING". discipline, category, number = 255, 255, 255 identity_found = False # First, see if we can find and interpret a 'GRIB_PARAM' attribute. - attr = cube.attributes.get('GRIB_PARAM', None) + attr = cube.attributes.get("GRIB_PARAM", None) if attr: try: # Convert to standard tuple-derived form. @@ -793,8 +843,7 @@ def set_discipline_and_parameter(cube, grib): # Else, translate a cube phenomenon, if possible. # NOTE: for now, can match by *either* standard_name or long_name. # This allows workarounds for data with no identified standard_name. - grib2_info = gptx.cf_phenom_to_grib2_info(cube.standard_name, - cube.long_name) + grib2_info = gptx.cf_phenom_to_grib2_info(cube.standard_name, cube.long_name) if grib2_info is not None: discipline = grib2_info.discipline category = grib2_info.category @@ -802,9 +851,11 @@ def set_discipline_and_parameter(cube, grib): identity_found = True if not identity_found: - warnings.warn('Unable to determine Grib2 parameter code for cube.\n' - 'discipline, parameterCategory and parameterNumber ' - 'have been set to "missing".') + warnings.warn( + "Unable to determine Grib2 parameter code for cube.\n" + "discipline, parameterCategory and parameterNumber " + 'have been set to "missing".' + ) eccodes.codes_set(grib, "discipline", discipline) eccodes.codes_set(grib, "parameterCategory", category) @@ -816,10 +867,9 @@ def _non_missing_forecast_period(cube): fp_coord = cube.coord("forecast_period") # Convert fp and t to hours so we can subtract to calculate R. - cf_fp_hrs = fp_coord.units.convert(fp_coord.points[0], 'hours') + cf_fp_hrs = fp_coord.units.convert(fp_coord.points[0], "hours") t_coord = cube.coord("time").copy() - hours_since = cf_units.Unit("hours since epoch", - calendar=t_coord.units.calendar) + hours_since = cf_units.Unit("hours since epoch", calendar=t_coord.units.calendar) t_coord.convert_units(hours_since) rt_num = t_coord.points[0] - cf_fp_hrs @@ -835,20 +885,20 @@ def _non_missing_forecast_period(cube): grib_time_code = 13 else: raise TranslationError( - "Unexpected units for 'forecast_period' : %s" % fp_coord.units) + "Unexpected units for 'forecast_period' : %s" % fp_coord.units + ) if not t_coord.has_bounds(): fp = fp_coord.points[0] else: if not fp_coord.has_bounds(): raise TranslationError( - "bounds on 'time' coordinate requires bounds on" - " 'forecast_period'.") + "bounds on 'time' coordinate requires bounds on" " 'forecast_period'." + ) fp = fp_coord.bounds[0][0] if fp - int(fp): - warnings.warn("forecast_period encoding problem: " - "scaling required.") + warnings.warn("forecast_period encoding problem: " "scaling required.") fp = int(fp) return rt, rt_meaning, fp, grib_time_code @@ -862,11 +912,12 @@ def _missing_forecast_period(cube): """ t_coord = cube.coord("time") - if cube.coords('forecast_reference_time'): + if cube.coords("forecast_reference_time"): # Make copies and convert them to common "hours since" units. - hours_since = cf_units.Unit('hours since epoch', - calendar=t_coord.units.calendar) - frt_coord = cube.coord('forecast_reference_time').copy() + hours_since = cf_units.Unit( + "hours since epoch", calendar=t_coord.units.calendar + ) + frt_coord = cube.coord("forecast_reference_time").copy() frt_coord.convert_units(hours_since) t_coord = t_coord.copy() t_coord.convert_units(hours_since) @@ -879,8 +930,7 @@ def _missing_forecast_period(cube): fp = t - frt integer_fp = int(fp) if integer_fp != fp: - msg = 'Truncating floating point forecast period {} to ' \ - 'integer value {}' + msg = "Truncating floating point forecast period {} to " "integer value {}" warnings.warn(msg.format(fp, integer_fp)) fp = integer_fp fp_meaning = 1 # Hours @@ -919,18 +969,19 @@ def set_forecast_time(cube, grib): def set_fixed_surfaces(cube, grib, full3d_cube=None): - # Look for something we can export v_coord = grib_v_code = output_unit = None # Detect factories for hybrid vertical coordinates. hybrid_factories = [ - factory for factory in cube.aux_factories - if isinstance(factory, (HybridHeightFactory, HybridPressureFactory))] + factory + for factory in cube.aux_factories + if isinstance(factory, (HybridHeightFactory, HybridPressureFactory)) + ] if not hybrid_factories: hybrid_factory = None elif len(hybrid_factories) > 1: - msg = 'Data contains >1 vertical coordinate factory : {}' + msg = "Data contains >1 vertical coordinate factory : {}" raise ValueError(msg.format(hybrid_factories)) else: factory = hybrid_factories[0] @@ -943,14 +994,14 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): if hybrid_factory is not None: # N.B. in this case, there are additional operations, besides just # encoding v_coord : see below at end .. - v_coord = cube.coord('model_level_number') + v_coord = cube.coord("model_level_number") output_unit = cf_units.Unit("1") if isinstance(hybrid_factory, HybridHeightFactory): grib_v_code = 118 elif isinstance(hybrid_factory, HybridPressureFactory): grib_v_code = 119 else: - msg = 'Unrecognised factory type : {}' + msg = "Unrecognised factory type : {}" raise ValueError(msg.format(hybrid_factory)) # pressure @@ -974,37 +1025,41 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): # depth elif cube.coords("depth"): grib_v_code = 106 - output_unit = cf_units.Unit('m') + output_unit = cf_units.Unit("m") v_coord = cube.coord("depth") elif cube.coords("air_potential_temperature"): grib_v_code = 107 - output_unit = cf_units.Unit('K') + output_unit = cf_units.Unit("K") v_coord = cube.coord("air_potential_temperature") # unknown / absent else: - fs_v_coords = [coord for coord in cube.coords() if - 'GRIB_fixed_surface_type' in coord.attributes] + fs_v_coords = [ + coord + for coord in cube.coords() + if "GRIB_fixed_surface_type" in coord.attributes + ] if len(fs_v_coords) > 1: - fs_types = [c.attributes['GRIB_fixed_surface_type'] - for c in fs_v_coords] - raise ValueError("Multiple vertical-axis coordinates were found " - f"of fixed surface type: {fs_types}") + fs_types = [c.attributes["GRIB_fixed_surface_type"] for c in fs_v_coords] + raise ValueError( + "Multiple vertical-axis coordinates were found " + f"of fixed surface type: {fs_types}" + ) elif len(fs_v_coords) == 1: v_coord = fs_v_coords[0] - grib_v_code = v_coord.attributes['GRIB_fixed_surface_type'] + grib_v_code = v_coord.attributes["GRIB_fixed_surface_type"] else: # check for *ANY* height coords at all... - v_coords = cube.coords(axis='z') + v_coords = cube.coords(axis="z") if v_coords: # There are vertical coordinate(s), but we don't understand # them... - v_coords_str = ' ,'.join(["'{}'".format(c.name()) - for c in v_coords]) + v_coords_str = " ,".join(["'{}'".format(c.name()) for c in v_coords]) raise TranslationError( - 'The vertical-axis coordinate(s) ({}) ' - 'are not recognised or handled.'.format(v_coords_str)) + "The vertical-axis coordinate(s) ({}) " + "are not recognised or handled.".format(v_coords_str) + ) # What did we find? if v_coord is None: @@ -1017,10 +1072,8 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): eccodes.codes_set(grib, "scaledValueOfFirstFixedSurface", 0) # Set secondary surface = 'missing'. eccodes.codes_set(grib, "typeOfSecondFixedSurface", 255) - eccodes.codes_set(grib, "scaleFactorOfSecondFixedSurface", - GRIB_MISSING_LONG) - eccodes.codes_set(grib, "scaledValueOfSecondFixedSurface", - GRIB_MISSING_LONG) + eccodes.codes_set(grib, "scaleFactorOfSecondFixedSurface", GRIB_MISSING_LONG) + eccodes.codes_set(grib, "scaledValueOfSecondFixedSurface", GRIB_MISSING_LONG) elif not v_coord.has_bounds(): # No second surface output_v = v_coord.points[0] @@ -1034,10 +1087,8 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): eccodes.codes_set(grib, "scaleFactorOfFirstFixedSurface", 0) eccodes.codes_set(grib, "scaledValueOfFirstFixedSurface", output_v) eccodes.codes_set(grib, "typeOfSecondFixedSurface", 255) - eccodes.codes_set(grib, "scaleFactorOfSecondFixedSurface", - GRIB_MISSING_LONG) - eccodes.codes_set(grib, "scaledValueOfSecondFixedSurface", - GRIB_MISSING_LONG) + eccodes.codes_set(grib, "scaleFactorOfSecondFixedSurface", GRIB_MISSING_LONG) + eccodes.codes_set(grib, "scaledValueOfSecondFixedSurface", GRIB_MISSING_LONG) else: # bounded : set lower+upper surfaces output_v = v_coord.bounds[0] @@ -1049,29 +1100,30 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): eccodes.codes_set(grib, "typeOfSecondFixedSurface", grib_v_code) eccodes.codes_set(grib, "scaleFactorOfFirstFixedSurface", 0) eccodes.codes_set(grib, "scaleFactorOfSecondFixedSurface", 0) - eccodes.codes_set(grib, "scaledValueOfFirstFixedSurface", - int(round(output_v[0]))) - eccodes.codes_set(grib, "scaledValueOfSecondFixedSurface", - int(round(output_v[1]))) + eccodes.codes_set( + grib, "scaledValueOfFirstFixedSurface", int(round(output_v[0])) + ) + eccodes.codes_set( + grib, "scaledValueOfSecondFixedSurface", int(round(output_v[1])) + ) if hybrid_factory is not None: # Need to record ALL the level coefficients in a 'PV' vector. level_delta_coord = hybrid_factory.delta sigma_coord = hybrid_factory.sigma - model_levels = full3d_cube.coord('model_level_number').points + model_levels = full3d_cube.coord("model_level_number").points # Just check these make some kind of sense (!) - if model_levels.dtype.kind not in 'iu': - msg = 'model_level_number is not an integer: dtype={}.' + if model_levels.dtype.kind not in "iu": + msg = "model_level_number is not an integer: dtype={}." raise ValueError(msg.format(model_levels.dtype)) if np.min(model_levels) < 1: - msg = 'model_level_number must be > 0: minimum value = {}.' + msg = "model_level_number must be > 0: minimum value = {}." raise ValueError(msg.format(np.min(model_levels))) # Need to save enough levels for indexes up to [max(model_levels)] n_levels = np.max(model_levels) max_valid_nlevels = 9999 if n_levels > max_valid_nlevels: - msg = ('model_level_number values are > {} : ' - 'maximum value = {}.') + msg = "model_level_number values are > {} : " "maximum value = {}." raise ValueError(msg.format(max_valid_nlevels, n_levels)) # In sample data we have seen, there seems to be an extra missing data # value *before* each set of n-levels coefficients. @@ -1079,9 +1131,9 @@ def set_fixed_surfaces(cube, grib, full3d_cube=None): # I.E. sigma, delta = PV[i], PV[NV/2+i] : where i=1..n_levels n_coeffs = n_levels + 1 coeffs_array = np.zeros(n_coeffs * 2, dtype=np.float32) - for n_lev, height, sigma in zip(model_levels, - level_delta_coord.points, - sigma_coord.points): + for n_lev, height, sigma in zip( + model_levels, level_delta_coord.points, sigma_coord.points + ): # Record all the level coefficients coming from the 'full' cube. # Note: if some model levels are missing, we must still have the # coeffs at the correct index according to the model_level_number @@ -1103,20 +1155,23 @@ def set_time_range(time_coord, grib): """ if len(time_coord.points) != 1: - msg = 'Expected length one time coordinate, got {} points' + msg = "Expected length one time coordinate, got {} points" raise ValueError(msg.format(len(time_coord.points))) if time_coord.nbounds != 2: - msg = 'Expected time coordinate with two bounds, got {} bounds' + msg = "Expected time coordinate with two bounds, got {} bounds" raise ValueError(msg.format(time_coord.nbounds)) # Set type to hours and convert period to this unit. - eccodes.codes_set(grib, "indicatorOfUnitForTimeRange", - _TIME_RANGE_UNITS_INVERTED['hours']) - hours_since_units = cf_units.Unit('hours since epoch', - calendar=time_coord.units.calendar) - start_hours, end_hours = time_coord.units.convert(time_coord.bounds[0], - hours_since_units) + eccodes.codes_set( + grib, "indicatorOfUnitForTimeRange", _TIME_RANGE_UNITS_INVERTED["hours"] + ) + hours_since_units = cf_units.Unit( + "hours since epoch", calendar=time_coord.units.calendar + ) + start_hours, end_hours = time_coord.units.convert( + time_coord.bounds[0], hours_since_units + ) # Cast from np.float to Python int. The lengthOfTimeRange key is a # 4 byte integer so we cast to highlight truncation of any floating # point value. The grib_api will do the cast from float to int, but it @@ -1124,8 +1179,7 @@ def set_time_range(time_coord, grib): time_range_in_hours = end_hours - start_hours integer_hours = int(time_range_in_hours) if integer_hours != time_range_in_hours: - msg = 'Truncating floating point lengthOfTimeRange {} to ' \ - 'integer value {}' + msg = "Truncating floating point lengthOfTimeRange {} to " "integer value {}" warnings.warn(msg.format(time_range_in_hours, integer_hours)) eccodes.codes_set(grib, "lengthOfTimeRange", integer_hours) @@ -1147,14 +1201,14 @@ def set_time_increment(cell_method, grib): # Attempt to determine time increment from cell method intervals string. intervals = cell_method.intervals if intervals is not None and len(intervals) == 1: - interval, = intervals + (interval,) = intervals try: inc, units = interval.split() inc = float(inc) - if units in ('hr', 'hour', 'hours'): - units_type = _TIME_RANGE_UNITS_INVERTED['hours'] + if units in ("hr", "hour", "hours"): + units_type = _TIME_RANGE_UNITS_INVERTED["hours"] else: - raise ValueError('Unable to parse units of interval') + raise ValueError("Unable to parse units of interval") except ValueError: # Problem interpreting the interval string. inc = 0 @@ -1163,8 +1217,10 @@ def set_time_increment(cell_method, grib): # Cast to int as timeIncrement key is a 4 byte integer. integer_inc = int(inc) if integer_inc != inc: - warnings.warn('Truncating floating point timeIncrement {} to ' - 'integer value {}'.format(inc, integer_inc)) + warnings.warn( + "Truncating floating point timeIncrement {} to " + "integer value {}".format(inc, integer_inc) + ) inc = integer_inc eccodes.codes_set(grib, "indicatorOfUnitForTimeIncrement", units_type) @@ -1183,7 +1239,7 @@ def _cube_is_time_statistic(cube): """ result = False - stat_coord_name = 'percentile_over_time' + stat_coord_name = "percentile_over_time" cube_coord_names = [coord.name() for coord in cube.coords()] # Check our cube for time statistic indicators. @@ -1195,8 +1251,7 @@ def _cube_is_time_statistic(cube): result = True elif has_cell_methods: # Define accepted time names, including from coord_categorisations. - recognised_time_names = ['time', 'year', 'month', 'day', 'weekday', - 'season'] + recognised_time_names = ["time", "year", "month", "day", "weekday", "season"] latest_coordnames = cube.cell_methods[-1].coord_names if len(latest_coordnames) != 1: result = False @@ -1215,16 +1270,19 @@ def _spatial_statistic(cube): """ spatial_cell_methods = [ - cell_method for cell_method in cube.cell_methods if 'area' in - cell_method.coord_names] + cell_method + for cell_method in cube.cell_methods + if "area" in cell_method.coord_names + ] if len(spatial_cell_methods) > 1: raise ValueError("Cannot handle multiple 'area' cell methods") elif len(spatial_cell_methods[0].coord_names) > 1: - raise ValueError("Cannot handle multiple coordinate names in " - "the spatial processing related cell method. " - "Expected ('area',), got {!r}".format - (spatial_cell_methods[0].coord_names)) + raise ValueError( + "Cannot handle multiple coordinate names in " + "the spatial processing related cell method. " + "Expected ('area',), got {!r}".format(spatial_cell_methods[0].coord_names) + ) return spatial_cell_methods @@ -1233,10 +1291,12 @@ def statistical_method_code(cell_method_name): """ Decode cell_method string as statistic code integer. """ - statistic_code = _STATISTIC_TYPE_NAMES_INVERTED.get(cell_method_name, None) + statistic_code = _STATISTIC_TYPE_NAMES_INVERTED.get(cell_method_name) if statistic_code is None: - msg = ('Product definition section 4 contains an unsupported ' - 'statistical process type [{}] ') + msg = ( + "Product definition section 4 contains an unsupported " + "statistical process type [{}] " + ) raise TranslationError(msg.format(statistic_code)) return statistic_code @@ -1254,8 +1314,10 @@ def get_spatial_process_code(spatial_processing_type): break if spatial_processing_code is None: - msg = ('Product definition section 4 contains an unsupported ' - 'spatial processing or interpolation type: {} ') + msg = ( + "Product definition section 4 contains an unsupported " + "spatial processing or interpolation type: {} " + ) raise TranslationError(msg.format(spatial_processing_type)) return spatial_processing_code @@ -1267,12 +1329,14 @@ def set_ensemble(cube, grib): information. """ - if not (cube.coords('realization') and - len(cube.coord('realization').points) == 1): - raise ValueError("A cube 'realization' coordinate with one " - "point is required, but not present") - eccodes.codes_set(grib, "perturbationNumber", - int(cube.coord('realization').points[0])) + if not (cube.coords("realization") and len(cube.coord("realization").points) == 1): + raise ValueError( + "A cube 'realization' coordinate with one " + "point is required, but not present" + ) + eccodes.codes_set( + grib, "perturbationNumber", int(cube.coord("realization").points[0]) + ) # no encoding at present in iris-grib, set to missing eccodes.codes_set(grib, "numberOfForecastsInEnsemble", 255) eccodes.codes_set(grib, "typeOfEnsembleForecast", 255) @@ -1337,12 +1401,12 @@ def product_definition_template_6(cube, grib, full3d_cube=None): """ eccodes.codes_set(grib, "productDefinitionTemplateNumber", 6) product_definition_template_common(cube, grib, full3d_cube) - if not (cube.coords('percentile') and - len(cube.coord('percentile').points) == 1): - raise ValueError("A cube 'percentile' coordinate with one " - "point is required, but not present.") - eccodes.codes_set(grib, "percentileValue", - int(cube.coord('percentile').points[0])) + if not (cube.coords("percentile") and len(cube.coord("percentile").points) == 1): + raise ValueError( + "A cube 'percentile' coordinate with one " + "point is required, but not present." + ) + eccodes.codes_set(grib, "percentileValue", int(cube.coord("percentile").points[0])) def product_definition_template_8(cube, grib, full3d_cube=None): @@ -1368,12 +1432,17 @@ def product_definition_template_10(cube, grib, full3d_cube=None): """ eccodes.codes_set(grib, "productDefinitionTemplateNumber", 10) - if not (cube.coords('percentile_over_time') and - len(cube.coord('percentile_over_time').points) == 1): - raise ValueError("A cube 'percentile_over_time' coordinate with one " - "point is required, but not present.") - eccodes.codes_set(grib, "percentileValue", - int(cube.coord('percentile_over_time').points[0])) + if not ( + cube.coords("percentile_over_time") + and len(cube.coord("percentile_over_time").points) == 1 + ): + raise ValueError( + "A cube 'percentile_over_time' coordinate with one " + "point is required, but not present." + ) + eccodes.codes_set( + grib, "percentileValue", int(cube.coord("percentile_over_time").points[0]) + ) _product_definition_template_8_10_and_11(cube, grib) @@ -1403,14 +1472,14 @@ def _product_definition_template_8_10_and_11(cube, grib, full3d_cube=None): product_definition_template_common(cube, grib, full3d_cube) # Check for time coordinate. - time_coord = cube.coord('time') + time_coord = cube.coord("time") if len(time_coord.points) != 1: - msg = 'Expected length one time coordinate, got {} points' + msg = "Expected length one time coordinate, got {} points" raise ValueError(msg.format(time_coord.points)) if time_coord.nbounds != 2: - msg = 'Expected time coordinate with two bounds, got {} bounds' + msg = "Expected time coordinate with two bounds, got {} bounds" raise ValueError(msg.format(time_coord.nbounds)) # Extract the datetime-like object corresponding to the end of @@ -1439,25 +1508,27 @@ def _product_definition_template_8_10_and_11(cube, grib, full3d_cube=None): # time coord. if cube.cell_methods: time_cell_methods = [ - cell_method for cell_method in cube.cell_methods if 'time' in - cell_method.coord_names] + cell_method + for cell_method in cube.cell_methods + if "time" in cell_method.coord_names + ] if not time_cell_methods: - raise ValueError("Expected a cell method with a coordinate name " - "of 'time'") + raise ValueError( + "Expected a cell method with a coordinate name " "of 'time'" + ) if len(time_cell_methods) > 1: raise ValueError("Cannot handle multiple 'time' cell methods") - cell_method, = time_cell_methods + (cell_method,) = time_cell_methods if len(cell_method.coord_names) > 1: - raise ValueError("Cannot handle multiple coordinate names in " - "the time related cell method. Expected " - "('time',), got {!r}".format( - cell_method.coord_names)) + raise ValueError( + "Cannot handle multiple coordinate names in " + "the time related cell method. Expected " + "('time',), got {!r}".format(cell_method.coord_names) + ) # Type of statistical process (see code table 4.10) - statistic_type = _STATISTIC_TYPE_NAMES_INVERTED.get( - cell_method.method, 255 - ) + statistic_type = _STATISTIC_TYPE_NAMES_INVERTED.get(cell_method.method, 255) eccodes.codes_set(grib, "typeOfStatisticalProcessing", statistic_type) # Time increment i.e. interval of cell method (if any) @@ -1474,7 +1545,7 @@ def product_definition_template_15(cube, grib, full3d_cube=None): """ # Encode type of spatial processing (see code table 4.15) - spatial_processing_type = cube.attributes['spatial_processing_type'] + spatial_processing_type = cube.attributes["spatial_processing_type"] spatial_processing = get_spatial_process_code(spatial_processing_type) # Encode statistical process and number of points @@ -1484,8 +1555,10 @@ def product_definition_template_15(cube, grib, full3d_cube=None): # Only a limited number of spatial processing types are supported. if spatial_processing not in _SPATIAL_PROCESSING_TYPES.keys(): - msg = ('Cannot save Product Definition Type 4.15 with spatial ' - 'processing type {}'.format(spatial_processing)) + msg = ( + "Cannot save Product Definition Type 4.15 with spatial " + "processing type {}".format(spatial_processing) + ) raise ValueError(msg) if statistical_process is not None: @@ -1496,8 +1569,10 @@ def product_definition_template_15(cube, grib, full3d_cube=None): cell_method_name = spatial_stats[0].method statistical_process = statistical_method_code(cell_method_name) else: - raise ValueError("Could not find a suitable cell_method to save " - "as a spatial statistical process.") + raise ValueError( + "Could not find a suitable cell_method to save " + "as a spatial statistical process." + ) # Set GRIB messages eccodes.codes_set(grib, "productDefinitionTemplateNumber", 15) @@ -1521,7 +1596,7 @@ def product_definition_template_40(cube, grib, full3d_cube=None): """ eccodes.codes_set(grib, "productDefinitionTemplateNumber", 40) product_definition_template_common(cube, grib) - constituent_type = cube.attributes['WMO_constituent_type'] + constituent_type = cube.attributes["WMO_constituent_type"] eccodes.codes_set(grib, "constituentType", constituent_type) @@ -1532,25 +1607,25 @@ def product_definition_section(cube, grib, full3d_cube=None): """ if not cube.coord("time").has_bounds(): - if cube.coords('realization'): + if cube.coords("realization"): # ensemble forecast (template 4.1) pdt = product_definition_template_1(cube, grib, full3d_cube) - elif 'WMO_constituent_type' in cube.attributes: + elif "WMO_constituent_type" in cube.attributes: # forecast for atmospheric chemical constiuent (template 4.40) product_definition_template_40(cube, grib, full3d_cube) - elif 'spatial_processing_type' in cube.attributes: + elif "spatial_processing_type" in cube.attributes: # spatial process (template 4.15) product_definition_template_15(cube, grib, full3d_cube) - elif cube.coords('percentile'): + elif cube.coords("percentile"): product_definition_template_6(cube, grib, full3d_cube) else: # forecast (template 4.0) product_definition_template_0(cube, grib, full3d_cube) elif _cube_is_time_statistic(cube): - if cube.coords('realization'): + if cube.coords("realization"): # time processed (template 4.11) pdt = product_definition_template_11 - elif cube.coords('percentile_over_time'): + elif cube.coords("percentile_over_time"): # time processed as percentile (template 4.10) pdt = product_definition_template_10 else: @@ -1559,12 +1634,14 @@ def product_definition_section(cube, grib, full3d_cube=None): try: pdt(cube, grib, full3d_cube) except ValueError as e: - raise ValueError('Saving to GRIB2 failed: the cube is not suitable' - ' for saving as a time processed statistic GRIB' - ' message. {}'.format(e)) + raise ValueError( + "Saving to GRIB2 failed: the cube is not suitable" + " for saving as a time processed statistic GRIB" + " message. {}".format(e) + ) else: # Don't know how to handle this kind of data - msg = 'A suitable product template could not be deduced' + msg = "A suitable product template could not be deduced" raise TranslationError(msg) @@ -1574,6 +1651,7 @@ def product_definition_section(cube, grib, full3d_cube=None): # ############################################################################### + def data_section(cube, grib): # Masked data? if ma.isMaskedArray(cube.data): @@ -1593,12 +1671,13 @@ def data_section(cube, grib): data = cube.data # units scaling - grib2_info = gptx.cf_phenom_to_grib2_info(cube.standard_name, - cube.long_name) + grib2_info = gptx.cf_phenom_to_grib2_info(cube.standard_name, cube.long_name) if grib2_info is None: # for now, just allow this - warnings.warn('Unable to determine Grib2 parameter code for cube.\n' - 'Message data may not be correctly scaled.') + warnings.warn( + "Unable to determine Grib2 parameter code for cube.\n" + "Message data may not be correctly scaled." + ) else: if cube.units != grib2_info.units: data = cube.units.convert(data, grib2_info.units) @@ -1616,11 +1695,14 @@ def data_section(cube, grib): eccodes.codes_set_double_array(grib, "values", data.flatten()) # todo: check packing accuracy? + + # print("packingError", eccodes.get_get_double(grib, "packingError")) ############################################################################### + def gribbability_check(cube): "We always need the following things for grib saving." diff --git a/iris_grib/grib_phenom_translation.py b/iris_grib/grib_phenom_translation/__init__.py similarity index 51% rename from iris_grib/grib_phenom_translation.py rename to iris_grib/grib_phenom_translation/__init__.py index a1680cbd9..1fcb5e606 100644 --- a/iris_grib/grib_phenom_translation.py +++ b/iris_grib/grib_phenom_translation/__init__.py @@ -2,28 +2,37 @@ # # This file is part of iris-grib and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -''' +""" Provide grib 1 and 2 phenomenon translations to + from CF terms. This is done by wrapping '_grib_cf_map.py', which is in a format provided by the metadata translation project. + Currently supports only these ones: * grib1 --> cf * grib2 --> cf * cf --> grib2 -''' +""" + from collections import namedtuple -import re import warnings import cf_units -from . import _grib_cf_map as grcf +from iris_grib import _grib_cf_map as grcf +from iris_grib.grib_phenom_translation._gribcode import GRIBCode import iris.std_names +__all__ = [ + "GRIBCode", + "cf_phenom_to_grib2_info", + "grib1_phenom_to_cf_info", + "grib2_phenom_to_cf_info", +] + class _LookupTable(dict): """ @@ -34,6 +43,7 @@ class _LookupTable(dict): (but it is still possible to remove keys) """ + def __init__(self, *args, **kwargs): self._super = super() self._super.__init__(*args, **kwargs) @@ -45,32 +55,41 @@ def __getitem__(self, key): def __setitem__(self, key, value): if key in self and self[key] is not value: - raise KeyError('Attempted to set dict[{}] = {}, ' - 'but this is already set to {}.'.format( - key, value, self[key])) + raise KeyError( + "Attempted to set dict[{}] = {}, " + "but this is already set to {}.".format(key, value, self[key]) + ) self._super.__setitem__(key, value) # Define namedtuples for keys+values of the Grib1 lookup table. Grib1CfKey = namedtuple( - 'Grib1CfKey', - ('table2_version', 'centre_number', 'param_number')) + "Grib1CfKey", ("table2_version", "centre_number", "param_number") +) # NOTE: this form is currently used for both Grib1 *and* Grib2 Grib1CfData = namedtuple( - 'Grib1CfData', - ('standard_name', 'long_name', 'units', 'set_height')) + "Grib1CfData", ("standard_name", "long_name", "units", "set_height") +) # Create the grib1-to-cf lookup table. + def _make_grib1_cf_table(): - """ Build the Grib1 to CF phenomenon translation table. """ + """Build the Grib1 to CF phenomenon translation table.""" table = _LookupTable() - def _make_grib1_cf_entry(table2_version, centre_number, param_number, - standard_name, long_name, units, set_height=None): + def _make_grib1_cf_entry( + table2_version, + centre_number, + param_number, + standard_name, + long_name, + units, + set_height=None, + ): """ Check data, convert types and create a new _GRIB1_CF_TABLE key/value. @@ -79,24 +98,31 @@ def _make_grib1_cf_entry(table2_version, centre_number, param_number, e.g. "2-metre tempererature". """ - grib1_key = Grib1CfKey(table2_version=int(table2_version), - centre_number=int(centre_number), - param_number=int(param_number)) + grib1_key = Grib1CfKey( + table2_version=int(table2_version), + centre_number=int(centre_number), + param_number=int(param_number), + ) if standard_name is not None: if standard_name not in iris.std_names.STD_NAMES: - warnings.warn('{} is not a recognised CF standard name ' - '(skipping).'.format(standard_name)) + warnings.warn( + "{} is not a recognised CF standard name " "(skipping).".format( + standard_name + ) + ) return None # convert units string to iris Unit (i.e. mainly, check it is good) a_cf_unit = cf_units.Unit(units) - cf_data = Grib1CfData(standard_name=standard_name, - long_name=long_name, - units=a_cf_unit, - set_height=set_height) + cf_data = Grib1CfData( + standard_name=standard_name, + long_name=long_name, + units=a_cf_unit, + set_height=set_height, + ) return (grib1_key, cf_data) # Interpret the imported Grib1-to-CF table. - for (grib1data, cfdata) in grcf.GRIB1_LOCAL_TO_CF.items(): + for grib1data, cfdata in grcf.GRIB1_LOCAL_TO_CF.items(): assert grib1data.edition == 1 association_entry = _make_grib1_cf_entry( table2_version=grib1data.t2version, @@ -104,27 +130,35 @@ def _make_grib1_cf_entry(table2_version, centre_number, param_number, param_number=grib1data.iParam, standard_name=cfdata.standard_name, long_name=cfdata.long_name, - units=cfdata.units) + units=cfdata.units, + ) if association_entry is not None: key, value = association_entry table[key] = value # Do the same for special Grib1 codes that include an implied height level. - for (grib1data, (cfdata, extra_dimcoord)) \ - in grcf.GRIB1_LOCAL_TO_CF_CONSTRAINED.items(): + for grib1data, ( + cfdata, + extra_dimcoord, + ) in grcf.GRIB1_LOCAL_TO_CF_CONSTRAINED.items(): assert grib1data.edition == 1 - if extra_dimcoord.standard_name != 'height': - raise ValueError('Got implied dimension coord of "{}", ' - 'currently can only handle "height".'.format( - extra_dimcoord.standard_name)) - if extra_dimcoord.units != 'm': - raise ValueError('Got implied dimension units of "{}", ' - 'currently can only handle "m".'.format( - extra_dimcoord.units)) + if extra_dimcoord.standard_name != "height": + raise ValueError( + 'Got implied dimension coord of "{}", ' + 'currently can only handle "height".'.format( + extra_dimcoord.standard_name + ) + ) + if extra_dimcoord.units != "m": + raise ValueError( + 'Got implied dimension units of "{}", ' + 'currently can only handle "m".'.format(extra_dimcoord.units) + ) if len(extra_dimcoord.points) != 1: - raise ValueError('Implied dimension has {} points, ' - 'currently can only handle 1.'.format( - len(extra_dimcoord.points))) + raise ValueError( + "Implied dimension has {} points, " + "currently can only handle 1.".format(len(extra_dimcoord.points)) + ) association_entry = _make_grib1_cf_entry( table2_version=int(grib1data.t2version), centre_number=int(grib1data.centre), @@ -132,7 +166,8 @@ def _make_grib1_cf_entry(table2_version, centre_number, param_number, standard_name=cfdata.standard_name, long_name=cfdata.long_name, units=cfdata.units, - set_height=extra_dimcoord.points[0]) + set_height=extra_dimcoord.points[0], + ) if association_entry is not None: key, value = association_entry table[key] = value @@ -146,18 +181,20 @@ def _make_grib1_cf_entry(table2_version, centre_number, param_number, # Define a namedtuple for the keys of the Grib2 lookup table. Grib2CfKey = namedtuple( - 'Grib2CfKey', - ('param_discipline', 'param_category', 'param_number')) + "Grib2CfKey", ("param_discipline", "param_category", "param_number") +) # Create the grib2-to-cf lookup table. + def _make_grib2_to_cf_table(): - """ Build the Grib2 to CF phenomenon translation table. """ + """Build the Grib2 to CF phenomenon translation table.""" table = _LookupTable() - def _make_grib2_cf_entry(param_discipline, param_category, param_number, - standard_name, long_name, units): + def _make_grib2_cf_entry( + param_discipline, param_category, param_number, standard_name, long_name, units + ): """ Check data, convert types and make a _GRIB2_CF_TABLE key/value pair. @@ -166,20 +203,27 @@ def _make_grib2_cf_entry(param_discipline, param_category, param_number, e.g. "2-metre tempererature". """ - grib2_key = Grib2CfKey(param_discipline=int(param_discipline), - param_category=int(param_category), - param_number=int(param_number)) + grib2_key = Grib2CfKey( + param_discipline=int(param_discipline), + param_category=int(param_category), + param_number=int(param_number), + ) if standard_name is not None: if standard_name not in iris.std_names.STD_NAMES: - warnings.warn('{} is not a recognised CF standard name ' - '(skipping).'.format(standard_name)) + warnings.warn( + "{} is not a recognised CF standard name " "(skipping).".format( + standard_name + ) + ) return None # convert units string to iris Unit (i.e. mainly, check it is good) a_cf_unit = cf_units.Unit(units) - cf_data = Grib1CfData(standard_name=standard_name, - long_name=long_name, - units=a_cf_unit, - set_height=None) + cf_data = Grib1CfData( + standard_name=standard_name, + long_name=long_name, + units=a_cf_unit, + set_height=None, + ) return (grib2_key, cf_data) # Interpret the grib2 info from grib_cf_map @@ -191,7 +235,8 @@ def _make_grib2_cf_entry(param_discipline, param_category, param_number, param_number=grib2data.number, standard_name=cfdata.standard_name, long_name=cfdata.long_name, - units=cfdata.units) + units=cfdata.units, + ) if association_entry is not None: key, value = association_entry table[key] = value @@ -204,24 +249,21 @@ def _make_grib2_cf_entry(param_discipline, param_category, param_number, # Define namedtuples for key+values of the cf-to-grib2 lookup table. -CfGrib2Key = namedtuple( - 'CfGrib2Key', - ('standard_name', 'long_name')) +CfGrib2Key = namedtuple("CfGrib2Key", ("standard_name", "long_name")) -CfGrib2Data = namedtuple( - 'CfGrib2Data', - ('discipline', 'category', 'number', 'units')) +CfGrib2Data = namedtuple("CfGrib2Data", ("discipline", "category", "number", "units")) # Create the cf-to-grib2 lookup table. + def _make_cf_to_grib2_table(): - """ Build the Grib1 to CF phenomenon translation table. """ + """Build the Grib1 to CF phenomenon translation table.""" table = _LookupTable() - def _make_cf_grib2_entry(standard_name, long_name, - param_discipline, param_category, param_number, - units): + def _make_cf_grib2_entry( + standard_name, long_name, param_discipline, param_category, param_number, units + ): """ Check data, convert types and make a new _CF_TABLE key/value pair. @@ -230,16 +272,21 @@ def _make_cf_grib2_entry(standard_name, long_name, if standard_name is not None: long_name = None if standard_name not in iris.std_names.STD_NAMES: - warnings.warn('{} is not a recognised CF standard name ' - '(skipping).'.format(standard_name)) + warnings.warn( + "{} is not a recognised CF standard name " "(skipping).".format( + standard_name + ) + ) return None cf_key = CfGrib2Key(standard_name, long_name) # convert units string to iris Unit (i.e. mainly, check it is good) a_cf_unit = cf_units.Unit(units) - grib2_data = CfGrib2Data(discipline=int(param_discipline), - category=int(param_category), - number=int(param_number), - units=a_cf_unit) + grib2_data = CfGrib2Data( + discipline=int(param_discipline), + category=int(param_category), + number=int(param_number), + units=a_cf_unit, + ) return (cf_key, grib2_data) # Interpret the imported CF-to-Grib2 table into a lookup table @@ -252,7 +299,8 @@ def _make_cf_grib2_entry(standard_name, long_name, param_discipline=grib2data.discipline, param_category=grib2data.category, param_number=grib2data.number, - units=a_cf_unit) + units=a_cf_unit, + ) if association_entry is not None: key, value = association_entry table[key] = value @@ -265,6 +313,7 @@ def _make_cf_grib2_entry(standard_name, long_name, # Interface functions for translation lookup + def grib1_phenom_to_cf_info(table2_version, centre_number, param_number): """ Lookup grib-1 parameter --> cf_data or None. @@ -277,9 +326,11 @@ def grib1_phenom_to_cf_info(table2_version, centre_number, param_number): * set_height : a scalar 'height' value , or None """ - grib1_key = Grib1CfKey(table2_version=table2_version, - centre_number=centre_number, - param_number=param_number) + grib1_key = Grib1CfKey( + table2_version=table2_version, + centre_number=centre_number, + param_number=param_number, + ) return _GRIB1_CF_TABLE[grib1_key] @@ -294,9 +345,11 @@ def grib2_phenom_to_cf_info(param_discipline, param_category, param_number): * units : a :class:`cf_units.Unit` """ - grib2_key = Grib2CfKey(param_discipline=int(param_discipline), - param_category=int(param_category), - param_number=int(param_number)) + grib2_key = Grib2CfKey( + param_discipline=int(param_discipline), + param_category=int(param_category), + param_number=int(param_number), + ) return _GRIB2_CF_TABLE[grib2_key] @@ -316,70 +369,3 @@ def cf_phenom_to_grib2_info(standard_name, long_name=None): if standard_name is not None: long_name = None return _CF_GRIB2_TABLE[(standard_name, long_name)] - - -class GRIBCode(namedtuple('GRIBCode', - 'edition discipline category number')): - """ - An object representing a specific Grib phenomenon identity. - - Basically a namedtuple of (edition, discipline, category, number). - - Also provides a string representation, and supports creation from: another - similar object; a tuple of numbers; or any string with 4 separate decimal - numbers in it. - - """ - __slots__ = () - - def __new__(cls, edition_or_string, - discipline=None, category=None, number=None): - args = (edition_or_string, discipline, category, number) - nargs = sum(arg is not None for arg in args) - if nargs == 1: - # Single argument: convert to a string and extract 4 integers. - # NOTE: this also allows input from a GRIBCode, or a plain tuple. - edition_or_string = str(edition_or_string) - edition, discipline, category, number = \ - cls._fournums_from_gribcode_string(edition_or_string) - elif nargs == 4: - edition = edition_or_string - edition, discipline, category, number = [ - int(arg) - for arg in (edition, discipline, category, number)] - else: - msg = ('Cannot create GRIBCode from {} arguments, ' - '"GRIBCode{!r}" : ' - 'expected either 1 or 4 non-None arguments.') - raise ValueError(msg.format(nargs, args)) - - return super(GRIBCode, cls).__new__( - cls, edition, discipline, category, number) - - RE_PARSE_FOURNUMS = re.compile(4 * r'[^\d]*(\d*)') - - @classmethod - def _fournums_from_gribcode_string(cls, edcn_string): - parsed_ok = False - nums_match = cls.RE_PARSE_FOURNUMS.match(edcn_string).groups() - if nums_match is not None: - try: - nums = [int(grp) for grp in nums_match] - parsed_ok = True - except ValueError: - pass - - if not parsed_ok: - msg = ('Invalid argument for GRIBCode creation, ' - '"GRIBCode({!r})" : ' - 'requires 4 numbers, separated by non-numerals.') - raise ValueError(msg.format(edcn_string)) - - return nums - - PRINT_FORMAT = 'GRIB{:1d}:d{:03d}c{:03d}n{:03d}' - - def __str__(self): - result = self.PRINT_FORMAT.format( - self.edition, self.discipline, self.category, self.number) - return result diff --git a/iris_grib/grib_phenom_translation/_gribcode.py b/iris_grib/grib_phenom_translation/_gribcode.py new file mode 100644 index 000000000..5dfe5c5d9 --- /dev/null +++ b/iris_grib/grib_phenom_translation/_gribcode.py @@ -0,0 +1,191 @@ +# Copyright iris-grib contributors +# +# This file is part of iris-grib and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +""" +Provide object to represent grib phenomena + +For use as cube attributes, freely convertible to+from strings +""" + +from __future__ import annotations +from dataclasses import dataclass +import re + + +def _invalid_edition(edition): + msg = f"Invalid grib edition, {edition!r}, for GRIBcode : " "can only be 1 or 2." + raise ValueError(msg) + + +def _invalid_nargs(args): + nargs = len(args) + msg = ( + f"Cannot create GRIBCode from {nargs} arguments, " + f"GRIBCode({args!r}) : expects either 1 or 4 arguments." + ) + raise ValueError(msg) + + +# Regexp to extract four integers from a string: +# - for four times ... +# - match any non-digits (including none) and discard +# - then match any digits (including none), and return as a "group" +_RE_PARSE_FOURNUMS = re.compile(4 * r"[^\d]*(\d*)") + + +def _fournums_from_gribcode_string(grib_param_string): + parsed_ok = True + # get the numbers.. + match_groups = _RE_PARSE_FOURNUMS.match(grib_param_string).groups() + # N.B. always produces 4 "strings of digits", but some can be empty + try: + nums = [int(grp) for grp in match_groups] + except ValueError: + parsed_ok = False + + if not parsed_ok: + msg = ( + "Invalid argument for GRIBCode creation, " + '"GRIBCode({!r})" : ' + "requires 4 numbers, separated by non-numerals." + ) + raise ValueError(msg.format(grib_param_string)) + + return nums + + +def GRIBCode(edition, *args, **kwargs): + """ + Make an object representing a specific Grib phenomenon identity. + + The class of the result, and the list of its properties, depend on whether + 'edition' is 1 or 2. + + One of : + + * GRIBCode(edition=1, table_version, centre_number, number) + * GRIBCode(edition=2, discipline, category, number) + + Either provides a string representation, and supports creation from: + keywords, another similar object; a tuple of numbers; or any string with 4 + separate decimal numbers in it. + """ + if edition is None: + _invalid_nargs(()) + + # Convert single argument to *args + if not args and not kwargs: + # Convert to a string and extract 4 integers. + # NOTE: this also allows input from a GRIBCode, or a plain tuple. + edition_string = str(edition) + edition, arg2, arg3, arg4 = _fournums_from_gribcode_string(edition_string) + args = [arg2, arg3, arg4] + + # Check edition + select the relevant keywords for the edition + if edition not in (1, 2): + _invalid_edition(edition) + + # Choose which actual type we will return. This also determines the + # argument (keyword) names. + instance_cls = {1: GRIBCode1, 2: GRIBCode2}[edition] + + # Convert all of (edition, *args) into **kwargs + if not args: + # Ignore that edition= is a required arg -- make it a kwarg + kwargs["edition"] = edition + else: + # Include edition, which just makes it simpler + args = tuple([edition] + list(args)) + nargs = len(args) + if nargs != 4: + _invalid_nargs(args) + + for i_arg, (arg, name) in enumerate(zip(args, instance_cls.argnames)): + if name in kwargs: + msg = ( + f"Keyword {name!r}={kwargs[name]!r} " + f"is not compatible with a {i_arg + 1}th argument." + ) + raise ValueError(msg) + else: + kwargs[name] = arg + + result = instance_cls(**kwargs) + return result + + +@dataclass +class GenericConcreteGRIBCode: + """ + Common behaviour for GRIBCode1 and GRIBCode2 + + GRIBCode1 and GRIBCode2 inherit this, making both dataclasses. + They contain different data properties. + """ + + def __init__(self, **kwargs): + # Note : only support creation with kargs. In GRIBCode(), any args + # get translated into kwargs + # Check against "_edition", defined by the specific subclass. + assert kwargs["edition"] == self._edition + for key, value in kwargs.items(): + setattr(self, key, value) + + def _broken_repr(self): + result = ( + f"<{self.__class__.__name__} with invalid content: " f"{self.__dict__}>" + ) + return result + + def __str__(self): + edition = self.edition + try: + # NB fallback to "invalid" if edition not one of (1, 2) + format = { + 1: "GRIB{:1d}:t{:03d}c{:03d}n{:03d}", + 2: "GRIB{:1d}:d{:03d}c{:03d}n{:03d}", + }[edition] + arg_values = [getattr(self, argname) for argname in self.argnames] + # NB fallback to "invalid" if format fails + result = format.format(*arg_values) + except Exception: + # Invalid content somewhere : fall back on default repr + result = self._broken_repr() + + return result + + def __repr__(self): + edition = self.edition + try: + assert edition in (1, 2) + key_value_strings = [] + for argname in self.argnames: + value = getattr(self, argname, None) + assert isinstance(value, int) + key_value_strings.append(f"{argname}={value}") + inner_text = ", ".join(key_value_strings) + result = f"GRIBCode({inner_text})" + except Exception: + # Invalid content somewhere : fall back on a default repr + result = self._broken_repr() + + return result + + +class GRIBCode1(GenericConcreteGRIBCode): + edition: int = 1 + table_version: int | None = None + centre_number: int | None = None + number: int | None = None + argnames = ["edition", "table_version", "centre_number", "number"] + _edition = 1 # Constructor argument should always match this + + +class GRIBCode2(GenericConcreteGRIBCode): + edition: int = 2 + discipline: int | None = None + category: int | None = None + number: int | None = None + argnames = ["edition", "discipline", "category", "number"] + _edition = 2 # Constructor argument should always match this diff --git a/iris_grib/message.py b/iris_grib/message.py index 80a7f8b2c..f416dd7f5 100644 --- a/iris_grib/message.py +++ b/iris_grib/message.py @@ -21,10 +21,10 @@ # Alias names for eccodes spatial computed keys. KEY_ALIAS = { - 'latitude': 'latitudes', - 'longitude': 'longitudes', - 'latitudes': 'latitude', - 'longitudes': 'longitude', + "latitude": "latitudes", + "longitude": "longitudes", + "latitudes": "latitude", + "longitudes": "longitude", } @@ -33,6 +33,7 @@ class _OpenFileRef: A reference to an open file that ensures that the file is closed when the object is garbage collected. """ + def __init__(self, open_file): self.open_file = open_file @@ -61,13 +62,12 @@ def messages_from_filename(filename): Name of the file to generate fields from. """ - grib_fh = open(filename, 'rb') + grib_fh = open(filename, "rb") # create an _OpenFileRef to manage the closure of the file handle file_ref = _OpenFileRef(grib_fh) while True: - grib_id = eccodes.codes_new_from_file(grib_fh, - eccodes.CODES_PRODUCT_GRIB) + grib_id = eccodes.codes_new_from_file(grib_fh, eccodes.CODES_PRODUCT_GRIB) if grib_id is None: break offset = eccodes.codes_get_message_offset(grib_id) @@ -130,38 +130,46 @@ def data(self): """ sections = self.sections grid_section = sections[3] - if grid_section['sourceOfGridDefinition'] != 0: + if grid_section["sourceOfGridDefinition"] != 0: raise TranslationError( - 'Unsupported source of grid definition: {}'.format( - grid_section['sourceOfGridDefinition'])) + "Unsupported source of grid definition: {}".format( + grid_section["sourceOfGridDefinition"] + ) + ) - reduced = (grid_section['numberOfOctectsForNumberOfPoints'] != 0 or - grid_section['interpretationOfNumberOfPoints'] != 0) - template = grid_section['gridDefinitionTemplateNumber'] + reduced = ( + grid_section["numberOfOctectsForNumberOfPoints"] != 0 + or grid_section["interpretationOfNumberOfPoints"] != 0 + ) + template = grid_section["gridDefinitionTemplateNumber"] if reduced and template not in (40,): - raise TranslationError('Grid definition Section 3 contains ' - 'unsupported quasi-regular grid.') + raise TranslationError( + "Grid definition Section 3 contains " "unsupported quasi-regular grid." + ) if template in _SUPPORTED_GRID_DEFINITIONS: # We can ignore the first two bits (i-neg, j-pos) because # that is already captured in the coordinate values. - if grid_section['scanningMode'] & 0x3f: - msg = 'Unsupported scanning mode: {}'.format( - grid_section['scanningMode']) + if grid_section["scanningMode"] & 0x3F: + msg = "Unsupported scanning mode: {}".format( + grid_section["scanningMode"] + ) raise TranslationError(msg) if template in (20, 30, 90): - shape = (grid_section['Ny'], grid_section['Nx']) + shape = (grid_section["Ny"], grid_section["Nx"]) elif template == 140: - shape = (grid_section['numberOfPointsAlongYAxis'], - grid_section['numberOfPointsAlongXAxis']) + shape = ( + grid_section["numberOfPointsAlongYAxis"], + grid_section["numberOfPointsAlongXAxis"], + ) elif template == 40 and reduced: - shape = (grid_section['numberOfDataPoints'],) + shape = (grid_section["numberOfDataPoints"],) else: - shape = (grid_section['Nj'], grid_section['Ni']) - proxy = _DataProxy(shape, np.dtype('f8'), self._recreate_raw) + shape = (grid_section["Nj"], grid_section["Ni"]) + proxy = _DataProxy(shape, np.dtype("f8"), self._recreate_raw) data = as_lazy_data(proxy) else: - fmt = 'Grid definition template {} is not supported' + fmt = "Grid definition template {} is not supported" raise TranslationError(fmt.format(template)) return data @@ -175,7 +183,7 @@ def __getstate__(self): return self -class _MessageLocation(namedtuple('_MessageLocation', 'filename offset')): +class _MessageLocation(namedtuple("_MessageLocation", "filename offset")): """A reference to a specific GRIB message within a file.""" __slots__ = () @@ -187,7 +195,7 @@ def __call__(self): class _DataProxy: """A reference to the data payload of a single GRIB message.""" - __slots__ = ('shape', 'dtype', 'recreate_raw') + __slots__ = ("shape", "dtype", "recreate_raw") def __init__(self, shape, dtype, recreate_raw): self.shape = shape @@ -224,15 +232,18 @@ def _bitmap(self, bitmap_section): """ # Reference GRIB2 Code Table 6.0. - bitMapIndicator = bitmap_section['bitMapIndicator'] + bitMapIndicator = bitmap_section["bitMapIndicator"] if bitMapIndicator == 0: - bitmap = bitmap_section['bitmap'] + bitmap = bitmap_section["bitmap"] elif bitMapIndicator == 255: bitmap = None else: - msg = 'Bitmap Section 6 contains unsupported ' \ - 'bitmap indicator [{}]'.format(bitMapIndicator) + msg = ( + "Bitmap Section 6 contains unsupported " "bitmap indicator [{}]".format( + bitMapIndicator + ) + ) raise TranslationError(msg) return bitmap @@ -269,10 +280,11 @@ def __getitem__(self, keys): _data[bitmap.astype(bool)] = data # `ma.masked_array` masks where input = 1, the opposite of # the behaviour specified by the GRIB spec. - data = ma.masked_array(_data, mask=np.logical_not(bitmap), - fill_value=np.nan) + data = ma.masked_array( + _data, mask=np.logical_not(bitmap), fill_value=np.nan + ) else: - msg = 'Shapes of data and bitmap do not match.' + msg = "Shapes of data and bitmap do not match." raise TranslationError(msg) data = data.reshape(self.shape) @@ -281,8 +293,10 @@ def __getitem__(self, keys): return result def __repr__(self): - msg = '<{self.__class__.__name__} shape={self.shape} ' \ - 'dtype={self.dtype!r} recreate_raw={self.recreate_raw!r} ' + msg = ( + "<{self.__class__.__name__} shape={self.shape} " + "dtype={self.dtype!r} recreate_raw={self.recreate_raw!r} " + ) return msg.format(self=self) def __getstate__(self): @@ -299,17 +313,16 @@ class _RawGribMessage: of the input GRIB message. """ - _NEW_SECTION_KEY_MATCHER = re.compile(r'section([0-9]{1})Length') + + _NEW_SECTION_KEY_MATCHER = re.compile(r"section([0-9]{1})Length") @staticmethod def from_file_offset(filename, offset): - with open(filename, 'rb') as f: + with open(filename, "rb") as f: f.seek(offset) - message_id = eccodes.codes_new_from_file( - f, eccodes.CODES_PRODUCT_GRIB - ) + message_id = eccodes.codes_new_from_file(f, eccodes.CODES_PRODUCT_GRIB) if message_id is None: - fmt = 'Invalid GRIB message: {} @ {}' + fmt = "Invalid GRIB message: {} @ {}" raise RuntimeError(fmt.format(filename, offset)) return _RawGribMessage(message_id) @@ -384,11 +397,10 @@ def _get_message_sections(self): key_match = re.match(self._NEW_SECTION_KEY_MATCHER, key_name) if key_match is not None: new_section = int(key_match.group(1)) - elif key_name == '7777': + elif key_name == "7777": new_section = 8 if section != new_section: - sections[section] = Section(self._message_id, section, - section_keys) + sections[section] = Section(self._message_id, section, section_keys) section_keys = [] section = new_section section_keys.append(key_name) @@ -405,6 +417,7 @@ class Section: write to the file. """ + # Keys are read from the file as required and values are cached. # Within GribMessage instances all keys will have been fetched @@ -417,14 +430,13 @@ def __init__(self, message_id, number, keys): def __repr__(self): items = [] for key in self._keys: - value = self._cache.get(key, '?') - items.append('{}={}'.format(key, value)) - return '<{} {}: {}>'.format(type(self).__name__, self._number, - ', '.join(items)) + value = self._cache.get(key, "?") + items.append("{}={}".format(key, value)) + return "<{} {}: {}>".format(type(self).__name__, self._number, ", ".join(items)) def __getitem__(self, key): if key not in self._cache: - if key == 'numberOfSection': + if key == "numberOfSection": value = self._number else: if key not in self._keys: @@ -445,8 +457,9 @@ def __setitem__(self, key, value): if key in self._cache: self._cache[key] = value else: - raise KeyError('{!r} cannot be redefined in ' - 'section {}'.format(key, self._number)) + raise KeyError( + "{!r} cannot be redefined in " "section {}".format(key, self._number) + ) def _get_key_value(self, key): """ @@ -461,20 +474,27 @@ def _get_key_value(self, key): message. """ - vector_keys = ('codedValues', 'pv', 'satelliteSeries', - 'satelliteNumber', 'instrumentType', - 'scaleFactorOfCentralWaveNumber', - 'scaledValueOfCentralWaveNumber', - 'longitude', 'latitude', - 'longitudes', 'latitudes') + vector_keys = ( + "codedValues", + "pv", + "satelliteSeries", + "satelliteNumber", + "instrumentType", + "scaleFactorOfCentralWaveNumber", + "scaledValueOfCentralWaveNumber", + "longitude", + "latitude", + "longitudes", + "latitudes", + ) if key in vector_keys: res = eccodes.codes_get_array(self._message_id, key) - elif key == 'bitmap': + elif key == "bitmap": # The bitmap is stored as contiguous boolean bits, one bit for each # data point. ecCodes returns these as strings, so it must be # type-cast to return an array of ints (0, 1). res = eccodes.codes_get_array(self._message_id, key, int) - elif key in ('typeOfFirstFixedSurface', 'typeOfSecondFixedSurface'): + elif key in ("typeOfFirstFixedSurface", "typeOfSecondFixedSurface"): # By default these values are returned as unhelpful strings but # we can use int representation to compare against instead. res = self._get_value_or_missing(key, use_int=True) @@ -496,7 +516,7 @@ def get_computed_key(self, key): message. """ - vector_keys = ('longitudes', 'latitudes', 'distinctLatitudes') + vector_keys = ("longitudes", "latitudes", "distinctLatitudes") if key in vector_keys: res = eccodes.codes_get_array(self._message_id, key) else: diff --git a/iris_grib/tests/__init__.py b/iris_grib/tests/__init__.py index b33d72c89..197ff1b84 100644 --- a/iris_grib/tests/__init__.py +++ b/iris_grib/tests/__init__.py @@ -30,10 +30,10 @@ #: Basepath for iris-grib test results. -_RESULT_PATH = os.path.join(os.path.dirname(__file__), 'results') +_RESULT_PATH = os.path.join(os.path.dirname(__file__), "results") #: Basepath for iris-grib loadable test files. -_TESTDATA_PATH = os.path.join(os.path.dirname(__file__), 'testdata') +_TESTDATA_PATH = os.path.join(os.path.dirname(__file__), "testdata") override = os.environ.get("GRIB_TEST_DATA_PATH") if override: @@ -57,9 +57,7 @@ class MyDataTests(tests.IrisGribTest): no_data = not os.path.isdir(dpath) or os.environ.get(evar) reason = "Test(s) require missing external GRIB test data." - skip = unittest.skipIf( - condition=no_data, reason=reason - ) + skip = unittest.skipIf(condition=no_data, reason=reason) return skip(fn) @@ -79,7 +77,7 @@ def get_result_path(relative_path): relative_path = os.path.join(*relative_path) return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) - def result_path(self, basename=None, ext=''): + def result_path(self, basename=None, ext=""): """ Return the full path to a test result, generated from the \ calling file, class and, optionally, method. @@ -91,27 +89,29 @@ def result_path(self, basename=None, ext=''): * ext - Appended file extension. """ - if ext and not ext.startswith('.'): - ext = '.' + ext + if ext and not ext.startswith("."): + ext = "." + ext # Generate the folder name from the calling file name. path = os.path.abspath(inspect.getfile(self.__class__)) path = os.path.splitext(path)[0] - sub_path = path.rsplit('iris_grib', 1)[1].split('tests', 1)[1][1:] + sub_path = path.rsplit("iris_grib", 1)[1].split("tests", 1)[1][1:] # Generate the file name from the calling function name? if basename is None: stack = inspect.stack() for frame in stack[1:]: - if 'test_' in frame[3]: - basename = frame[3].replace('test_', '') + if "test_" in frame[3]: + basename = frame[3].replace("test_", "") break filename = basename + ext - result = os.path.join(self.get_result_path(''), - sub_path.replace('test_', ''), - self.__class__.__name__.replace('Test_', ''), - filename) + result = os.path.join( + self.get_result_path(""), + sub_path.replace("test_", ""), + self.__class__.__name__.replace("Test_", ""), + filename, + ) return result @staticmethod @@ -173,9 +173,7 @@ def assertGribMessageDifference( m2_sect = set(m2.sections.keys()) for missing_section in m1_sect ^ m2_sect: - what = ( - "introduced" if missing_section in m1_sect else "removed" - ) + what = "introduced" if missing_section in m1_sect else "removed" # Assert that an introduced section is in the diffs. self.assertIn( missing_section, @@ -211,9 +209,7 @@ def assertGribMessageDifference( if isinstance(m1_value, np.ndarray): # A large tolerance appears to be required for # gribapi 1.12, but not for 1.14. - self.assertArrayAlmostEqual( - m1_value, m2_value, decimal=2 - ) + self.assertArrayAlmostEqual(m1_value, m2_value, decimal=2) else: self.assertEqual( m1_value, diff --git a/iris_grib/tests/integration/format_interop/test_name_grib.py b/iris_grib/tests/integration/format_interop/test_name_grib.py index eb5366c45..16f50a59c 100644 --- a/iris_grib/tests/integration/format_interop/test_name_grib.py +++ b/iris_grib/tests/integration/format_interop/test_name_grib.py @@ -28,7 +28,7 @@ def name_cb(cube, field, filename): if z_coord: z_coord[0].standard_name = "height" z_coord[0].long_name = "height above ground level" - z_coord[0].attributes = {'positive': 'up'} + z_coord[0].attributes = {"positive": "up"} class TestNameToGRIB(tests.IrisGribTest): @@ -73,8 +73,8 @@ def test_name2_field(self): # Iris>=3.2 loads in an extra 'z' coordinate which cannot currently # be save to GRIB. - if name_cube.coords('z'): - name_cube.remove_coord('z') + if name_cube.coords("z"): + name_cube.remove_coord("z") with self.temp_filename(".grib2") as temp_filename: iris.save(name_cube, temp_filename) @@ -99,8 +99,8 @@ def test_name3_field(self): for i, name_cube in enumerate(name_cubes): # Iris>=3.2 loads in an extra 'z' coordinate which cannot currently # be save to GRIB. - if name_cube.coord('z') is not None: - name_cube.remove_coord('z') + if name_cube.coord("z") is not None: + name_cube.remove_coord("z") with self.temp_filename(".grib2") as temp_filename: iris.save(name_cube, temp_filename) diff --git a/iris_grib/tests/integration/format_interop/test_pp_grib.py b/iris_grib/tests/integration/format_interop/test_pp_grib.py index a4e031f47..f5b9b2fec 100644 --- a/iris_grib/tests/integration/format_interop/test_pp_grib.py +++ b/iris_grib/tests/integration/format_interop/test_pp_grib.py @@ -14,14 +14,10 @@ class TestBoundedTime(tests.IrisTest): @tests.skip_data def test_time_and_forecast_period_round_trip(self): - pp_path = tests.get_data_path( - ("PP", "meanMaxMin", "200806081200__qwpb.T24.pp") - ) + pp_path = tests.get_data_path(("PP", "meanMaxMin", "200806081200__qwpb.T24.pp")) # Choose the first time-bounded Cube in the PP dataset. original = [ - cube - for cube in iris.load(pp_path) - if cube.coord("time").has_bounds() + cube for cube in iris.load(pp_path) if cube.coord("time").has_bounds() ][0] # Save it to GRIB2 and re-load. with self.temp_filename(".grib2") as grib_path: diff --git a/iris_grib/tests/integration/load_convert/test_data_section.py b/iris_grib/tests/integration/load_convert/test_data_section.py index 6c4aca1e3..8e5e74313 100644 --- a/iris_grib/tests/integration/load_convert/test_data_section.py +++ b/iris_grib/tests/integration/load_convert/test_data_section.py @@ -19,9 +19,7 @@ class TestImport(tests.IrisGribTest): def test_gdt1(self): - path = tests.get_data_path( - ("GRIB", "rotated_nae_t", "sensible_pole.grib2") - ) + path = tests.get_data_path(("GRIB", "rotated_nae_t", "sensible_pole.grib2")) cube = load_cube(path) self.assertCMLApproxData(cube) @@ -62,9 +60,7 @@ def test_reduced(self): class TestDRT3(tests.IrisGribTest): def test_grid_complex_spatial_differencing(self): - path = tests.get_data_path( - ("GRIB", "missing_values", "missing_values.grib2") - ) + path = tests.get_data_path(("GRIB", "missing_values", "missing_values.grib2")) cube = load_cube(path) self.assertCMLApproxData(cube) @@ -84,5 +80,5 @@ def test_data_representation__no_bitsPerValue(self): self.assertEqual(0, np.sum(cube.data)) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/integration/load_convert/test_load_hybrid_coords.py b/iris_grib/tests/integration/load_convert/test_load_hybrid_coords.py index 184b618c1..af267b16b 100644 --- a/iris_grib/tests/integration/load_convert/test_load_hybrid_coords.py +++ b/iris_grib/tests/integration/load_convert/test_load_hybrid_coords.py @@ -19,65 +19,70 @@ @tests.skip_grib_data class TestHybridHeight(tests.IrisGribTest): def setUp(self): - filepath = self.get_testdata_path('faked_sample_hh_grib_data.grib2') - self.testdata_cube = load_cube(filepath, 'air_temperature') + filepath = self.get_testdata_path("faked_sample_hh_grib_data.grib2") + self.testdata_cube = load_cube(filepath, "air_temperature") def test_load_hybrid_height(self): # Check that it loads right, and creates a factory. - self.assertIsInstance(self.testdata_cube.aux_factories[0], - HybridHeightFactory) + self.assertIsInstance(self.testdata_cube.aux_factories[0], HybridHeightFactory) def test_hybrid_height_coords_values(self): cube = self.testdata_cube # check actual model level values. - self.assertArrayEqual(cube.coord('model_level_number').points, - [1, 11, 21]) + self.assertArrayEqual(cube.coord("model_level_number").points, [1, 11, 21]) # check sigma values correctly loaded from indices 1, 11, 21. - self.assertArrayAllClose(cube.coord('sigma').points, - [0.998, 0.894, 0.667], - atol=0.001) + self.assertArrayAllClose( + cube.coord("sigma").points, [0.998, 0.894, 0.667], atol=0.001 + ) # check height values too. - self.assertArrayAllClose(cube.coord('level_height').points, - [20., 953.3, 3220.], - atol=0.5) + self.assertArrayAllClose( + cube.coord("level_height").points, [20.0, 953.3, 3220.0], atol=0.5 + ) @tests.skip_grib_data class TestHybridPressure(tests.IrisGribTest): def setUp(self): - filepath = self.get_testdata_path('faked_sample_hp_grib_data.grib2') - self.testdata_cube = load_cube(filepath, 'air_temperature') + filepath = self.get_testdata_path("faked_sample_hp_grib_data.grib2") + self.testdata_cube = load_cube(filepath, "air_temperature") def test_load_hybrid_pressure(self): # Check that it loads right, and creates a factory. - self.assertIsInstance(self.testdata_cube.aux_factories[0], - HybridPressureFactory) + self.assertIsInstance( + self.testdata_cube.aux_factories[0], HybridPressureFactory + ) def test_hybrid_pressure_coords_values(self): cube = self.testdata_cube # Check existence, and some values, of the loaded coefficients. - self.assertArrayEqual(cube.coord('model_level_number').points, - [1, 51, 91]) - self.assertArrayAllClose(cube.coord('sigma').points, - [0., 0.045, 1.], atol=0.001) - self.assertArrayAllClose(cube.coord('level_pressure').points, - [2.00004, 18716.9688, 0.], rtol=0.0001) + self.assertArrayEqual(cube.coord("model_level_number").points, [1, 51, 91]) + self.assertArrayAllClose( + cube.coord("sigma").points, [0.0, 0.045, 1.0], atol=0.001 + ) + self.assertArrayAllClose( + cube.coord("level_pressure").points, [2.00004, 18716.9688, 0.0], rtol=0.0001 + ) self.assertArrayAllClose( - cube.coord('surface_air_pressure')[:2, :3].points, - [[103493.8, 103493.8, 103493.8], - [103401.0, 103407.4, 103412.2]], atol=0.1) + cube.coord("surface_air_pressure")[:2, :3].points, + [[103493.8, 103493.8, 103493.8], [103401.0, 103407.4, 103412.2]], + atol=0.1, + ) # Also check a few values from the derived coord. self.assertArrayAllClose( - cube.coord('air_pressure')[:, :3, 0].points, - [[2., 2., 2.], - [23389.3, 23385.1, 23379.9], - [103493.8, 103401.0, 103285.8]], atol=0.1) + cube.coord("air_pressure")[:, :3, 0].points, + [ + [2.0, 2.0, 2.0], + [23389.3, 23385.1, 23379.9], + [103493.8, 103401.0, 103285.8], + ], + atol=0.1, + ) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/integration/load_convert/test_product_definition_section.py b/iris_grib/tests/integration/load_convert/test_product_definition_section.py index b7a81b034..a9431db2e 100644 --- a/iris_grib/tests/integration/load_convert/test_product_definition_section.py +++ b/iris_grib/tests/integration/load_convert/test_product_definition_section.py @@ -18,9 +18,7 @@ class TestPDT8(tests.IrisGribTest): def setUp(self): # Load from the test file. - file_path = tests.get_data_path( - ("GRIB", "time_processed", "time_bound.grib2") - ) + file_path = tests.get_data_path(("GRIB", "time_processed", "time_bound.grib2")) self.cube = load_cube(file_path) def test_coords(self): @@ -61,9 +59,7 @@ def test_cell_method(self): self.assertEqual( len(cell_methods), 1, - "result has {} cell methods, expected one.".format( - len(cell_methods) - ), + "result has {} cell methods, expected one.".format(len(cell_methods)), ) (cell_method,) = cell_methods self.assertEqual(cell_method.coord_names, ("time",)) diff --git a/iris_grib/tests/integration/load_convert/test_sample_file_loads.py b/iris_grib/tests/integration/load_convert/test_sample_file_loads.py index 3f07082b4..aeb5526ed 100644 --- a/iris_grib/tests/integration/load_convert/test_sample_file_loads.py +++ b/iris_grib/tests/integration/load_convert/test_sample_file_loads.py @@ -40,9 +40,7 @@ def test_load_time_processed(self): self.assertCML(cubes, _RESULTDIR_PREFIX + ("time_bound_grib2.cml",)) def test_load_3_layer(self): - cubes = iris.load( - tests.get_data_path(("GRIB", "3_layer_viz", "3_layer.grib2")) - ) + cubes = iris.load(tests.get_data_path(("GRIB", "3_layer_viz", "3_layer.grib2"))) cubes = iris.cube.CubeList([cubes[1], cubes[0], cubes[2]]) self.assertCML(cubes, _RESULTDIR_PREFIX + ("3_layer.cml",)) @@ -51,8 +49,7 @@ def test_load_masked(self): ("GRIB", "missing_values", "missing_values.grib2") ) cubes = iris.load(gribfile) - self.assertCML(cubes, - _RESULTDIR_PREFIX + ("missing_values_grib2.cml",)) + self.assertCML(cubes, _RESULTDIR_PREFIX + ("missing_values_grib2.cml",)) def test_polar_stereo_grib1(self): cube = iris.load_cube( @@ -86,15 +83,11 @@ def test_polar_stereo_grib2_grid_definition(self): self.assertEqual(pyc.coord_system.true_scale_lat, 60.0) def test_lambert_grib1(self): - cube = iris.load_cube( - tests.get_data_path(("GRIB", "lambert", "lambert.grib1")) - ) + cube = iris.load_cube(tests.get_data_path(("GRIB", "lambert", "lambert.grib1"))) self.assertCML(cube, _RESULTDIR_PREFIX + ("lambert_grib1.cml",)) def test_lambert_grib2(self): - cube = iris.load_cube( - tests.get_data_path(("GRIB", "lambert", "lambert.grib2")) - ) + cube = iris.load_cube(tests.get_data_path(("GRIB", "lambert", "lambert.grib2"))) self.assertCML(cube, _RESULTDIR_PREFIX + ("lambert_grib2.cml",)) def test_regular_gg_grib1(self): diff --git a/iris_grib/tests/integration/round_trip/test_WAFC_mapping_round_trip.py b/iris_grib/tests/integration/round_trip/test_WAFC_mapping_round_trip.py index ac76c89c4..b106474b9 100644 --- a/iris_grib/tests/integration/round_trip/test_WAFC_mapping_round_trip.py +++ b/iris_grib/tests/integration/round_trip/test_WAFC_mapping_round_trip.py @@ -19,18 +19,18 @@ @tests.skip_grib_data class TestWAFCCodes(tests.IrisGribTest): def setUp(self): - self.cat = self.get_testdata_path('CAT_T+24_0600.grib2') - self.cb = self.get_testdata_path('CB_T+24_0600.grib2') - self.icing = self.get_testdata_path('ICING_T+24_0600.grib2') - self.turb = self.get_testdata_path('INCLDTURB_T+24_0600.grib2') + self.cat = self.get_testdata_path("CAT_T+24_0600.grib2") + self.cb = self.get_testdata_path("CB_T+24_0600.grib2") + self.icing = self.get_testdata_path("ICING_T+24_0600.grib2") + self.turb = self.get_testdata_path("INCLDTURB_T+24_0600.grib2") def test_WAFC_CAT_round_trip(self): - cubelist = load(self.cat, 'WAFC_CAT_potential') + cubelist = load(self.cat, "WAFC_CAT_potential") cube = cubelist[0] self.assertIsInstance(cube, Cube) with self.temp_filename() as tmp_save_path: - save(cube, tmp_save_path, saver='grib2') + save(cube, tmp_save_path, saver="grib2") saved_cube = load_cube(tmp_save_path) self.assertEqual(saved_cube.metadata, cube.metadata) @@ -40,7 +40,7 @@ def test_WAFC_CB_round_trip(self): self.assertIsInstance(cube, Cube) with self.temp_filename() as tmp_save_path: - save(cube, tmp_save_path, saver='grib2') + save(cube, tmp_save_path, saver="grib2") saved_cube = load_cube(tmp_save_path) self.assertEqual(saved_cube.metadata, cube.metadata) @@ -50,7 +50,7 @@ def test_WAFC_icing_round_trip(self): self.assertIsInstance(cube, Cube) with self.temp_filename() as tmp_save_path: - save(cube, tmp_save_path, saver='grib2') + save(cube, tmp_save_path, saver="grib2") saved_cube = load_cube(tmp_save_path) self.assertEqual(saved_cube.metadata, cube.metadata) @@ -60,10 +60,10 @@ def test_WAFC_turb_round_trip(self): self.assertIsInstance(cube, Cube) with self.temp_filename() as tmp_save_path: - save(cube, tmp_save_path, saver='grib2') + save(cube, tmp_save_path, saver="grib2") saved_cube = load_cube(tmp_save_path) self.assertEqual(saved_cube.metadata, cube.metadata) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/integration/round_trip/test_grid_definition_section.py b/iris_grib/tests/integration/round_trip/test_grid_definition_section.py index 06cfd342c..8988a7dea 100644 --- a/iris_grib/tests/integration/round_trip/test_grid_definition_section.py +++ b/iris_grib/tests/integration/round_trip/test_grid_definition_section.py @@ -72,9 +72,7 @@ def test_save_load(self): # Check those re-loaded properties which should match the original. for test_cube in (cube, cube_loaded_from_saved): - self.assertEqual( - test_cube.standard_name, "air_pressure_at_sea_level" - ) + self.assertEqual(test_cube.standard_name, "air_pressure_at_sea_level") self.assertEqual(test_cube.units, "Pa") self.assertEqual(test_cube.shape, (928, 744)) self.assertEqual(test_cube.cell_methods, ()) @@ -90,9 +88,7 @@ def test_save_load(self): # Check they have all the same coordinates. co_names = [coord.name() for coord in cube.coords()] - co_names_reload = [ - coord.name() for coord in cube_loaded_from_saved.coords() - ] + co_names_reload = [coord.name() for coord in cube_loaded_from_saved.coords()] self.assertEqual(sorted(co_names_reload), sorted(co_names)) # Check all the coordinates. @@ -105,16 +101,13 @@ def test_save_load(self): self.assertEqual( co_load.shape, co_orig.shape, - 'Shape of re-loaded "{}" coord is {} ' - "instead of {}".format( + 'Shape of re-loaded "{}" coord is {} ' "instead of {}".format( coord_name, co_load.shape, co_orig.shape ), ) # Check coordinate points equal, within a tolerance. - self.assertArrayAllClose( - co_load.points, co_orig.points, rtol=1.0e-6 - ) + self.assertArrayAllClose(co_load.points, co_orig.points, rtol=1.0e-6) # Check all coords are unbounded. # (NOTE: this is not so for the original X and Y coordinates, @@ -124,9 +117,7 @@ def test_save_load(self): except AssertionError as err: self.assertTrue( False, - 'Failed on coordinate "{}" : {}'.format( - coord_name, str(err) - ), + 'Failed on coordinate "{}" : {}'.format(coord_name, str(err)), ) # Check that main data array also matches. diff --git a/iris_grib/tests/integration/round_trip/test_hybrid_coords_round_trip.py b/iris_grib/tests/integration/round_trip/test_hybrid_coords_round_trip.py index 578eaee71..25dc38c77 100644 --- a/iris_grib/tests/integration/round_trip/test_hybrid_coords_round_trip.py +++ b/iris_grib/tests/integration/round_trip/test_hybrid_coords_round_trip.py @@ -13,6 +13,7 @@ import iris_grib.tests as tests from iris import load_cube, load_cubes, save + # Try except allows compatibility with current Iris (2.4) and also master. # TODO: simplify to just the iris.util import once we drop support for any # Iris versions with iris.experimental.equalise_cubes import @@ -25,38 +26,34 @@ @tests.skip_grib_data class TestHybridHeightRoundTrip(tests.IrisGribTest): def test_hh_round_trip(self): - filepath = self.get_testdata_path( - 'faked_sample_hh_grib_data.grib2') + filepath = self.get_testdata_path("faked_sample_hh_grib_data.grib2") # Load and save temperature cube and reference (orography) cube # separately because this is the only way to save the hybrid height # coordinate. - cube, ref_cube = load_cubes(filepath, - ('air_temperature', 'surface_altitude')) + cube, ref_cube = load_cubes(filepath, ("air_temperature", "surface_altitude")) with self.temp_filename() as tmp_save_path: - save([cube, ref_cube], tmp_save_path, saver='grib2') + save([cube, ref_cube], tmp_save_path, saver="grib2") # Only need to reload temperature cube to compare with unsaved # temperature cube. - saved_cube = load_cube(tmp_save_path, 'air_temperature') + saved_cube = load_cube(tmp_save_path, "air_temperature") self.assertTrue(saved_cube == cube) @tests.skip_grib_data class TestHybridPressureRoundTrip(tests.IrisGribTest): def test_hybrid_pressure(self): - filepath = self.get_testdata_path( - 'faked_sample_hp_grib_data.grib2') + filepath = self.get_testdata_path("faked_sample_hp_grib_data.grib2") # Load and save temperature cube and reference (air_pressure at # surface) cube separately because this is the only way to save the # hybrid pressure coordinate. - cube, ref_cube = load_cubes(filepath, - ('air_temperature', 'air_pressure')) + cube, ref_cube = load_cubes(filepath, ("air_temperature", "air_pressure")) with self.temp_filename() as tmp_save_path: - save([cube, ref_cube], tmp_save_path, saver='grib2') + save([cube, ref_cube], tmp_save_path, saver="grib2") # Only need to reload temperature cube to compare with unsaved # temperature cube. - saved_cube = load_cube(tmp_save_path, 'air_temperature') + saved_cube = load_cube(tmp_save_path, "air_temperature") # Currently all attributes are lost when saving to grib, so we must # equalise them in order to successfully compare all other aspects. @@ -65,5 +62,5 @@ def test_hybrid_pressure(self): self.assertTrue(saved_cube == cube) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/integration/round_trip/test_product_definition_section.py b/iris_grib/tests/integration/round_trip/test_product_definition_section.py index 0b33fdc52..e5f28fb19 100644 --- a/iris_grib/tests/integration/round_trip/test_product_definition_section.py +++ b/iris_grib/tests/integration/round_trip/test_product_definition_section.py @@ -35,9 +35,7 @@ def test_perturbation(self): cube.coord("longitude").coord_system = crs # add a realization coordinate cube.add_aux_coord( - iris.coords.DimCoord( - points=1, standard_name="realization", units="1" - ) + iris.coords.DimCoord(points=1, standard_name="realization", units="1") ) with self.temp_filename("testPDT11.GRIB2") as temp_file_path: iris.save(cube, temp_file_path) @@ -62,8 +60,7 @@ def test_save_load(self): tcoord = iris.coords.DimCoord( 23, "time", units=Unit("days since epoch", calendar="standard") ) - fpcoord = iris.coords.DimCoord(24, "forecast_period", - units=Unit("hours")) + fpcoord = iris.coords.DimCoord(24, "forecast_period", units=Unit("hours")) cube.add_aux_coord(tcoord) cube.add_aux_coord(fpcoord) cube.attributes["WMO_constituent_type"] = 0 diff --git a/iris_grib/tests/integration/save_rules/test_grib_save.py b/iris_grib/tests/integration/save_rules/test_grib_save.py index 0d5f7c901..875c2ecff 100644 --- a/iris_grib/tests/integration/save_rules/test_grib_save.py +++ b/iris_grib/tests/integration/save_rules/test_grib_save.py @@ -144,9 +144,7 @@ def test_irregular(self): lat_coord = cube.coord("latitude") cube.remove_coord("latitude") - new_lats = np.append( - lat_coord.points[:-1], lat_coord.points[0] - ) # Irregular + new_lats = np.append(lat_coord.points[:-1], lat_coord.points[0]) # Irregular cube.add_aux_coord( iris.coords.AuxCoord( new_lats, @@ -158,18 +156,14 @@ def test_irregular(self): ) saved_grib = iris.util.create_temp_filename(suffix=".grib2") - self.assertRaises( - iris.exceptions.TranslationError, iris.save, cube, saved_grib - ) + self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_non_latlon(self): cube = self._load_basic() cube.coord(dimensions=[0]).coord_system = None saved_grib = iris.util.create_temp_filename(suffix=".grib2") - self.assertRaises( - iris.exceptions.TranslationError, iris.save, cube, saved_grib - ) + self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_forecast_period(self): @@ -177,9 +171,7 @@ def test_forecast_period(self): cube = self._load_basic() cube.coord("forecast_period").units = cf_units.Unit("years") saved_grib = iris.util.create_temp_filename(suffix=".grib2") - self.assertRaises( - iris.exceptions.TranslationError, iris.save, cube, saved_grib - ) + self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_unhandled_vertical(self): @@ -204,25 +196,17 @@ def test_scalar_int32_pressure(self): iris.save(cube, testfile) def test_bounded_level(self): - cube = iris.load_cube( - tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2")) - ) + cube = iris.load_cube(tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2"))) with self.temp_filename(".grib2") as testfile: iris.save(cube, testfile) with open(testfile, "rb") as saved_file: - g = eccodes.codes_new_from_file( - saved_file, eccodes.CODES_PRODUCT_GRIB - ) + g = eccodes.codes_new_from_file(saved_file, eccodes.CODES_PRODUCT_GRIB) self.assertEqual( - eccodes.codes_get_double( - g, "scaledValueOfFirstFixedSurface" - ), + eccodes.codes_get_double(g, "scaledValueOfFirstFixedSurface"), 0.0, ) self.assertEqual( - eccodes.codes_get_double( - g, "scaledValueOfSecondFixedSurface" - ), + eccodes.codes_get_double(g, "scaledValueOfSecondFixedSurface"), 2147483647.0, ) @@ -259,9 +243,7 @@ def _lat_lon_cube_no_time(self): def _cube_time_no_forecast(self): cube = self._lat_lon_cube_no_time() - unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN - ) + unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN) dt = datetime.datetime(2010, 12, 31, 12, 0) cube.add_aux_coord( iris.coords.AuxCoord( @@ -296,17 +278,13 @@ def _cube_with_time_bounds(self): def test_no_time_cube(self): cube = self._lat_lon_cube_no_time() saved_grib = iris.util.create_temp_filename(suffix=".grib2") - self.assertRaises( - iris.exceptions.TranslationError, iris.save, cube, saved_grib - ) + self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_cube_with_time_bounds(self): cube = self._cube_with_time_bounds() saved_grib = iris.util.create_temp_filename(suffix=".grib2") - self.assertRaises( - iris.exceptions.TranslationError, iris.save, cube, saved_grib - ) + self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) diff --git a/iris_grib/tests/integration/save_rules/test_grid_definition_section.py b/iris_grib/tests/integration/save_rules/test_grid_definition_section.py index 465128317..b7b0f3bc0 100644 --- a/iris_grib/tests/integration/save_rules/test_grid_definition_section.py +++ b/iris_grib/tests/integration/save_rules/test_grid_definition_section.py @@ -12,13 +12,15 @@ # importing anything else. import iris_grib.tests as tests -from iris.coord_systems import (GeogCS, - RotatedGeogCS, - Mercator, - TransverseMercator, - LambertConformal, - AlbersEqualArea, - LambertAzimuthalEqualArea) +from iris.coord_systems import ( + GeogCS, + RotatedGeogCS, + Mercator, + TransverseMercator, + LambertConformal, + AlbersEqualArea, + LambertAzimuthalEqualArea, +) import numpy as np from iris_grib._save_rules import grid_definition_section @@ -34,92 +36,92 @@ def test_grid_definition_template_0(self): # Regular lat/lon (Plate Carree). x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'degrees' + coord_units = "degrees" cs = self.ellipsoid test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 0) + self._check_key("gridDefinitionTemplateNumber", 0) def test_grid_definition_template_1(self): # Rotated lat/lon (Plate Carree). x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'degrees' + coord_units = "degrees" cs = RotatedGeogCS(34.0, 117.0, ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 1) + self._check_key("gridDefinitionTemplateNumber", 1) def test_grid_definition_template_4(self): # Irregular (variable resolution) lat/lon grid. x_points = np.array([0, 2, 7]) y_points = np.array([1, 3, 6]) - coord_units = '1' + coord_units = "1" cs = self.ellipsoid test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 4) + self._check_key("gridDefinitionTemplateNumber", 4) def test_grid_definition_template_5(self): # Irregular (variable resolution) rotated lat/lon grid. x_points = np.array([0, 2, 7]) y_points = np.array([1, 3, 6]) - coord_units = '1' + coord_units = "1" cs = RotatedGeogCS(34.0, 117.0, ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 5) + self._check_key("gridDefinitionTemplateNumber", 5) def test_grid_definition_template_10(self): # Mercator grid. x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'm' + coord_units = "m" cs = Mercator(ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 10) + self._check_key("gridDefinitionTemplateNumber", 10) def test_grid_definition_template_12(self): # Transverse Mercator grid. x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'm' + coord_units = "m" cs = TransverseMercator(0, 0, 0, 0, 1, ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 12) + self._check_key("gridDefinitionTemplateNumber", 12) def test_grid_definition_template_30(self): # Lambert Conformal grid. x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'm' + coord_units = "m" cs = LambertConformal(ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 30) + self._check_key("gridDefinitionTemplateNumber", 30) def test_grid_definition_template_140(self): # Lambert Conformal grid. x_points = np.arange(3) y_points = np.arange(3) - coord_units = 'm' + coord_units = "m" cs = LambertAzimuthalEqualArea(ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) grid_definition_section(test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 140) + self._check_key("gridDefinitionTemplateNumber", 140) def test_coord_system_not_supported(self): # Test an unsupported grid - let's choose Albers Equal Area. x_points = np.arange(3) y_points = np.arange(3) - coord_units = '1' + coord_units = "1" cs = AlbersEqualArea(ellipsoid=self.ellipsoid) test_cube = self._make_test_cube(cs, x_points, y_points, coord_units) - exp_name = cs.grid_mapping_name.replace('_', ' ').title() - exp_emsg = 'not supported for coordinate system {!r}'.format(exp_name) + exp_name = cs.grid_mapping_name.replace("_", " ").title() + exp_emsg = "not supported for coordinate system {!r}".format(exp_name) with self.assertRaisesRegex(ValueError, exp_emsg): grid_definition_section(test_cube, self.mock_grib) diff --git a/iris_grib/tests/integration/save_rules/test_save_hybrid_coords.py b/iris_grib/tests/integration/save_rules/test_save_hybrid_coords.py index 5ed377f17..a77c2f55b 100644 --- a/iris_grib/tests/integration/save_rules/test_save_hybrid_coords.py +++ b/iris_grib/tests/integration/save_rules/test_save_hybrid_coords.py @@ -21,12 +21,10 @@ @tests.skip_grib_data class TestSaveHybridHeight(tests.IrisGribTest): def setUp(self): - reference_data_filepath = self.get_testdata_path('hybrid_height.nc') - if (hasattr(iris.FUTURE, 'netcdf_promote') and - not iris.FUTURE.netcdf_promote): + reference_data_filepath = self.get_testdata_path("hybrid_height.nc") + if hasattr(iris.FUTURE, "netcdf_promote") and not iris.FUTURE.netcdf_promote: iris.FUTURE.netcdf_promote = True - data_cube = iris.load_cube(reference_data_filepath, - 'air_potential_temperature') + data_cube = iris.load_cube(reference_data_filepath, "air_potential_temperature") # Use only 3 (non-contiguous) levels, and a single timestep. data_cube = data_cube[0, :6:2] self.test_hh_data_cube = data_cube @@ -55,37 +53,28 @@ def test_save(self): # Check that the PV vector (same in all messages) is as expected. # Note: gaps here are because we took model levels = (1, 3, 5). self.assertArrayAllClose( - msgs[0].sections[4]['pv'], - [0, 5., 0, 45., 0, 111.667, 0, 0.999, 0, 0.995, 0, 0.987], - atol=0.0015) + msgs[0].sections[4]["pv"], + [0, 5.0, 0, 45.0, 0, 111.667, 0, 0.999, 0, 0.995, 0, 0.987], + atol=0.0015, + ) # Check message #2-of-3 has the correctly encoded hybrid height. msg = msgs[1] # first surface type = 118 (i.e. hybrid height). - self.assertEqual( - msg.sections[4]['typeOfFirstFixedSurface'], - 118) + self.assertEqual(msg.sections[4]["typeOfFirstFixedSurface"], 118) # first surface scaling = 0. - self.assertEqual( - msg.sections[4]['scaleFactorOfFirstFixedSurface'], - 0) + self.assertEqual(msg.sections[4]["scaleFactorOfFirstFixedSurface"], 0) # first surface value = 3 -- i.e. #2 of (1, 3, 5). - self.assertEqual( - msg.sections[4]['scaledValueOfFirstFixedSurface'], - 3) + self.assertEqual(msg.sections[4]["scaledValueOfFirstFixedSurface"], 3) # second surface type = "NONE" -- i.e. unbounded level. - self.assertEqual( - msg.sections[4]['typeOfSecondFixedSurface'], - 255) + self.assertEqual(msg.sections[4]["typeOfSecondFixedSurface"], 255) @tests.skip_grib_data class TestSaveHybridPressure(tests.IrisGribTest): def setUp(self): - reference_data_filepath = self.get_testdata_path( - 'hybrid_pressure.nc') - data_cube = iris.load_cube(reference_data_filepath, - 'air_temperature') + reference_data_filepath = self.get_testdata_path("hybrid_pressure.nc") + data_cube = iris.load_cube(reference_data_filepath, "air_temperature") self.test_hp_data_cube = data_cube def test_save(self): @@ -111,32 +100,23 @@ def test_save(self): # Check that the PV vector (same in all messages) is as expected. # Note: HUGE gaps here because we took model levels = (1, 51, 91). - self.assertEqual(msgs[0].sections[4]['NV'], 184) + self.assertEqual(msgs[0].sections[4]["NV"], 184) pv_expected = np.zeros(184, dtype=np.float64) - pv_expected[[1, 51, 91]] = [0., 18191.03, 0.003] - pv_expected[[93, 143, 183]] = [0., 0.036, 0.998] - self.assertArrayAllClose( - msgs[0].sections[4]['pv'], pv_expected, atol=0.001) + pv_expected[[1, 51, 91]] = [0.0, 18191.03, 0.003] + pv_expected[[93, 143, 183]] = [0.0, 0.036, 0.998] + self.assertArrayAllClose(msgs[0].sections[4]["pv"], pv_expected, atol=0.001) # Check message #2-of-3 has the correctly encoded hybrid pressure. msg = msgs[1] # first surface type = 119 (i.e. hybrid pressure). - self.assertEqual( - msg.sections[4]['typeOfFirstFixedSurface'], - 119) + self.assertEqual(msg.sections[4]["typeOfFirstFixedSurface"], 119) # first surface scaling = 0. - self.assertEqual( - msg.sections[4]['scaleFactorOfFirstFixedSurface'], - 0) + self.assertEqual(msg.sections[4]["scaleFactorOfFirstFixedSurface"], 0) # first surface value = 3 -- i.e. #2 of (1, 3, 5). - self.assertEqual( - msg.sections[4]['scaledValueOfFirstFixedSurface'], - 51) + self.assertEqual(msg.sections[4]["scaledValueOfFirstFixedSurface"], 51) # second surface type = "NONE" -- i.e. unbounded level. - self.assertEqual( - msg.sections[4]['typeOfSecondFixedSurface'], - 255) + self.assertEqual(msg.sections[4]["typeOfSecondFixedSurface"], 255) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/earth_shape_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/earth_shape_grib1.cml index e323b8414..3ee702856 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/earth_shape_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/earth_shape_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/lambert_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/lambert_grib1.cml index 32b73ff7c..7c0b4d053 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/lambert_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/lambert_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/polar_stereo_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/polar_stereo_grib1.cml index c590e237a..c1c845ae9 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/polar_stereo_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/polar_stereo_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/reduced_ll_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/reduced_ll_grib1.cml index 095e9d2fb..65c5483f8 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/reduced_ll_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/reduced_ll_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/regular_gg_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/regular_gg_grib1.cml index 8b811c4f9..0fab6aa5b 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/regular_gg_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/regular_gg_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/rotated.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/rotated.cml index abb271076..f464429a7 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/rotated.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/rotated.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/results/integration/load_convert/sample_file_loads/time_bound_grib1.cml b/iris_grib/tests/results/integration/load_convert/sample_file_loads/time_bound_grib1.cml index 023f0f622..977fe0280 100644 --- a/iris_grib/tests/results/integration/load_convert/sample_file_loads/time_bound_grib1.cml +++ b/iris_grib/tests/results/integration/load_convert/sample_file_loads/time_bound_grib1.cml @@ -1,6 +1,9 @@ + + + diff --git a/iris_grib/tests/test_license_headers.py b/iris_grib/tests/test_license_headers.py index 1ac0d92db..7779bc2b6 100644 --- a/iris_grib/tests/test_license_headers.py +++ b/iris_grib/tests/test_license_headers.py @@ -37,20 +37,23 @@ def whatchanged_parse(whatchanged_output): Sample input:: - ['TIME:1366884020', '', - ':000000 100644 0000000... 5862ced... A\tlib/iris/cube.py'] + [ + "TIME:1366884020", + "", + ":000000 100644 0000000... 5862ced... A\tlib/iris/cube.py", + ] """ dt = None for line in whatchanged_output: if not line.strip(): continue - elif line.startswith('TIME:'): + elif line.startswith("TIME:"): dt = datetime.fromtimestamp(int(line[5:])) else: # Non blank, non date, line -> must be the lines # containing the file info. - fname = ' '.join(line.split('\t')[1:]) + fname = " ".join(line.split("\t")[1:]) yield fname, dt @staticmethod @@ -67,15 +70,15 @@ def last_change_by_fname(): """ # Check the ".git" folder exists at the repo dir. - if not os.path.isdir(os.path.join(REPO_DIR, '.git')): - raise ValueError('{} is not a git repository.'.format(REPO_DIR)) + if not os.path.isdir(os.path.join(REPO_DIR, ".git")): + raise ValueError("{} is not a git repository.".format(REPO_DIR)) # Call "git whatchanged" to get the details of all the files and when # they were last changed. - output = subprocess.check_output(['git', 'whatchanged', - "--pretty=TIME:%ct"], - cwd=REPO_DIR) - output = output.decode().split('\n') + output = subprocess.check_output( + ["git", "whatchanged", "--pretty=TIME:%ct"], cwd=REPO_DIR + ) + output = output.decode().split("\n") res = {} for fname, dt in TestLicenseHeaders.whatchanged_parse(output): if fname not in res or dt > res[fname]: @@ -84,35 +87,43 @@ def last_change_by_fname(): return res def test_license_headers(self): - exclude_patterns = ('setup.py', - 'build/*', - 'dist/*', - 'docs/*', - 'iris_grib/tests/unit/results/*', - 'iris_grib.egg-info/*') + exclude_patterns = ( + "setup.py", + "build/*", + "dist/*", + "docs/*", + "iris_grib/tests/unit/results/*", + "iris_grib.egg-info/*", + ) try: last_change_by_fname = self.last_change_by_fname() except ValueError: # Caught the case where this is not a git repo. - return self.skipTest('Iris-grib installation did not look like a ' - 'git repo.') + return self.skipTest( + "Iris-grib installation did not look like a " "git repo." + ) failed = False for fname, last_change in sorted(last_change_by_fname.items()): full_fname = os.path.join(REPO_DIR, fname) - if full_fname.endswith('.py') and os.path.isfile(full_fname) and \ - not any(fnmatch(fname, pat) for pat in exclude_patterns): + if ( + full_fname.endswith(".py") + and os.path.isfile(full_fname) + and not any(fnmatch(fname, pat) for pat in exclude_patterns) + ): with open(full_fname) as fh: content = fh.read() if not content.startswith(LICENSE_TEMPLATE): - print('The file {} does not start with the required ' - 'license header.'.format(fname)) + print( + "The file {} does not start with the required " + "license header.".format(fname) + ) failed = True if failed: - raise ValueError('There were license header failures. See stdout.') + raise ValueError("There were license header failures. See stdout.") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/iris_grib/tests/unit/__init__.py b/iris_grib/tests/unit/__init__.py index 27f39ec16..7cbbc4d63 100644 --- a/iris_grib/tests/unit/__init__.py +++ b/iris_grib/tests/unit/__init__.py @@ -45,7 +45,7 @@ def _mock_eccodes__codes_is_missing(grib_message, keyname): Return whether the key exists in the fake message (dictionary). """ - return (keyname not in grib_message) + return keyname not in grib_message def _mock_eccodes__codes_get_native_type(grib_message, keyname): @@ -68,12 +68,11 @@ def _mock_eccodes__codes_get_native_type(grib_message, keyname): _mock_eccodes.codes_get_long = mock.Mock(side_effect=_mock_eccodes_fetch) _mock_eccodes.codes_get_string = mock.Mock(side_effect=_mock_eccodes_fetch) _mock_eccodes.codes_get_double = mock.Mock(side_effect=_mock_eccodes_fetch) -_mock_eccodes.codes_get_double_array = mock.Mock( - side_effect=_mock_eccodes_fetch) -_mock_eccodes.codes_is_missing = mock.Mock( - side_effect=_mock_eccodes__codes_is_missing) +_mock_eccodes.codes_get_double_array = mock.Mock(side_effect=_mock_eccodes_fetch) +_mock_eccodes.codes_is_missing = mock.Mock(side_effect=_mock_eccodes__codes_is_missing) _mock_eccodes.codes_get_native_type = mock.Mock( - side_effect=_mock_eccodes__codes_get_native_type) + side_effect=_mock_eccodes__codes_get_native_type +) class FakeGribMessage(dict): @@ -83,6 +82,7 @@ class FakeGribMessage(dict): Behaves as a dictionary, containing key-values for message keys. """ + def __init__(self, **kwargs): """ Create a fake message object. @@ -94,7 +94,7 @@ def __init__(self, **kwargs): # Start with a bare dictionary dict.__init__(self) # Extract specially-recognised keys. - time_code = kwargs.pop('time_code', None) + time_code = kwargs.pop("time_code", None) # Set the minimally required keys. self._init_minimal_message() # Also set a time-code, if given. @@ -105,58 +105,71 @@ def __init__(self, **kwargs): def _init_minimal_message(self): # Set values for all the required keys. - self.update({ - 'edition': 1, - 'Ni': 1, - 'Nj': 1, - 'numberOfValues': 1, - 'alternativeRowScanning': 0, - 'centre': 'ecmf', - 'year': 2007, - 'month': 3, - 'day': 23, - 'hour': 12, - 'minute': 0, - 'indicatorOfUnitOfTimeRange': 1, - 'gridType': 'rotated_ll', - 'angleOfRotation': 0.0, - 'resolutionAndComponentFlags': 128, - 'iDirectionIncrementInDegrees': 0.036, - 'jDirectionIncrementInDegrees': 0.036, - 'iScansNegatively': 0, - 'jScansPositively': 1, - 'longitudeOfFirstGridPointInDegrees': -5.70, - 'latitudeOfFirstGridPointInDegrees': -4.452, - 'jPointsAreConsecutive': 0, - 'values': np.array([[1.0]]), - 'indicatorOfParameter': 9999, - 'parameterNumber': 9999, - 'startStep': 24, - 'timeRangeIndicator': 1, - 'P1': 2, 'P2': 0, - # time unit - needed AS WELL as 'indicatorOfUnitOfTimeRange' - 'unitOfTime': 1, - 'table2Version': 9999, - }) + self.update( + { + "edition": 1, + "Ni": 1, + "Nj": 1, + "numberOfValues": 1, + "alternativeRowScanning": 0, + "centre": "ecmf", + "year": 2007, + "month": 3, + "day": 23, + "hour": 12, + "minute": 0, + "indicatorOfUnitOfTimeRange": 1, + "gridType": "rotated_ll", + "angleOfRotation": 0.0, + "resolutionAndComponentFlags": 128, + "iDirectionIncrementInDegrees": 0.036, + "jDirectionIncrementInDegrees": 0.036, + "iScansNegatively": 0, + "jScansPositively": 1, + "longitudeOfFirstGridPointInDegrees": -5.70, + "latitudeOfFirstGridPointInDegrees": -4.452, + "jPointsAreConsecutive": 0, + "values": np.array([[1.0]]), + "indicatorOfParameter": 9999, + "parameterNumber": 9999, + "startStep": 24, + "timeRangeIndicator": 1, + "P1": 2, + "P2": 0, + # time unit - needed AS WELL as 'indicatorOfUnitOfTimeRange' + "unitOfTime": 1, + "table2Version": 9999, + } + ) def set_timeunit_code(self, timecode): - self['indicatorOfUnitOfTimeRange'] = timecode + self["indicatorOfUnitOfTimeRange"] = timecode # for some odd reason, GRIB1 code uses *both* of these # NOTE kludge -- the 2 keys are really the same thing - self['unitOfTime'] = timecode + self["unitOfTime"] = timecode class TestField(tests.IrisGribTest): - def _test_for_coord(self, field, convert, coord_predicate, expected_points, - expected_bounds): - (factories, references, standard_name, long_name, units, - attributes, cell_methods, dim_coords_and_dims, - aux_coords_and_dims) = convert(field) + def _test_for_coord( + self, field, convert, coord_predicate, expected_points, expected_bounds + ): + ( + factories, + references, + standard_name, + long_name, + units, + attributes, + cell_methods, + dim_coords_and_dims, + aux_coords_and_dims, + ) = convert(field) # Check for one and only one matching coordinate. coords_and_dims = dim_coords_and_dims + aux_coords_and_dims - matching_coords = [coord for coord, _ in coords_and_dims if - coord_predicate(coord)] + matching_coords = [ + coord for coord, _ in coords_and_dims if coord_predicate(coord) + ] self.assertEqual(len(matching_coords), 1, str(matching_coords)) coord = matching_coords[0] @@ -169,8 +182,9 @@ def _test_for_coord(self, field, convert, coord_predicate, expected_points, else: self.assertArrayEqual(coord.bounds, expected_bounds) - def assertCoordsAndDimsListsMatch(self, coords_and_dims_got, - coords_and_dims_expected): + def assertCoordsAndDimsListsMatch( + self, coords_and_dims_got, coords_and_dims_expected + ): """ Check that coords_and_dims lists are equivalent. @@ -180,17 +194,18 @@ def assertCoordsAndDimsListsMatch(self, coords_and_dims_got, It also checks that the coordinate types (DimCoord/AuxCoord) match. """ + def sorted_by_coordname(list): return sorted(list, key=lambda item: item[0].name()) coords_and_dims_got = sorted_by_coordname(coords_and_dims_got) - coords_and_dims_expected = sorted_by_coordname( - coords_and_dims_expected) + coords_and_dims_expected = sorted_by_coordname(coords_and_dims_expected) self.assertEqual(coords_and_dims_got, coords_and_dims_expected) # Also check coordinate type equivalences (as Coord.__eq__ does not). self.assertEqual( [type(coord) for coord, dims in coords_and_dims_got], - [type(coord) for coord, dims in coords_and_dims_expected]) + [type(coord) for coord, dims in coords_and_dims_expected], + ) class TestGribSimple(tests.IrisGribTest): @@ -211,9 +226,10 @@ def mock_grib(self): def cube_from_message(self, grib): # Parameter translation now uses the GribWrapper, so we must convert # the Mock-based fake message to a FakeGribMessage. - with mock.patch('iris_grib.eccodes', _mock_eccodes): + with mock.patch("iris_grib.eccodes", _mock_eccodes): grib_message = FakeGribMessage(**grib.__dict__) wrapped_msg = iris_grib.GribWrapper(grib_message) cube, _, _ = iris.fileformats.rules._make_cube( - wrapped_msg, iris_grib._grib1_load_rules.grib1_convert) + wrapped_msg, iris_grib._grib1_load_rules.grib1_convert + ) return cube diff --git a/iris_grib/tests/unit/grib1_load_rules/test_grib1_convert.py b/iris_grib/tests/unit/grib1_load_rules/test_grib1_convert.py index 327f6b65f..840313b0e 100644 --- a/iris_grib/tests/unit/grib1_load_rules/test_grib1_convert.py +++ b/iris_grib/tests/unit/grib1_load_rules/test_grib1_convert.py @@ -23,7 +23,7 @@ class TestBadEdition(tests.IrisGribTest): def test(self): message = mock.Mock(edition=2) - emsg = 'GRIB edition 2 is not supported' + emsg = "GRIB edition 2 is not supported" with self.assertRaisesRegex(TranslationError, emsg): grib1_convert(message) @@ -31,49 +31,67 @@ def test(self): class TestBoundedTime(TestField): @staticmethod def is_forecast_period(coord): - return (coord.standard_name == 'forecast_period' and - coord.units == 'hours') + return coord.standard_name == "forecast_period" and coord.units == "hours" @staticmethod def is_time(coord): - return (coord.standard_name == 'time' and - coord.units == 'hours since epoch') + return coord.standard_name == "time" and coord.units == "hours since epoch" def assert_bounded_message(self, **kwargs): - attributes = {'productDefinitionTemplateNumber': 0, - 'edition': 1, '_forecastTime': 15, - '_forecastTimeUnit': 'hours', - 'phenomenon_bounds': lambda u: (80, 120), - '_phenomenonDateTime': -1, - 'table2Version': 9999, - '_originatingCentre': 'xxx', - } + attributes = { + "productDefinitionTemplateNumber": 0, + "edition": 1, + "_forecastTime": 15, + "_forecastTimeUnit": "hours", + "phenomenon_bounds": lambda u: (80, 120), + "_phenomenonDateTime": -1, + "table2Version": 9999, + "_originatingCentre": "xxx", + } attributes.update(kwargs) message = mock.Mock(**attributes) - self._test_for_coord(message, grib1_convert, self.is_forecast_period, - expected_points=[35], - expected_bounds=[[15, 55]]) - self._test_for_coord(message, grib1_convert, self.is_time, - expected_points=[100], - expected_bounds=[[80, 120]]) + self._test_for_coord( + message, + grib1_convert, + self.is_forecast_period, + expected_points=[35], + expected_bounds=[[15, 55]], + ) + self._test_for_coord( + message, + grib1_convert, + self.is_time, + expected_points=[100], + expected_bounds=[[80, 120]], + ) def assert_bounded_message_3hours(self, **kwargs): - attributes = {'productDefinitionTemplateNumber': 0, - 'edition': 1, '_forecastTime': 252, - '_forecastTimeUnit': '3 hours', - 'phenomenon_bounds': lambda u: (252, 258), - '_phenomenonDateTime': -1, - 'table2Version': 9999, - '_originatingCentre': 'xxx', - } + attributes = { + "productDefinitionTemplateNumber": 0, + "edition": 1, + "_forecastTime": 252, + "_forecastTimeUnit": "3 hours", + "phenomenon_bounds": lambda u: (252, 258), + "_phenomenonDateTime": -1, + "table2Version": 9999, + "_originatingCentre": "xxx", + } attributes.update(kwargs) message = mock.Mock(**attributes) - self._test_for_coord(message, grib1_convert, self.is_forecast_period, - expected_points=[255], - expected_bounds=[[252, 258]]) - self._test_for_coord(message, grib1_convert, self.is_time, - expected_points=[255], - expected_bounds=[[252, 258]]) + self._test_for_coord( + message, + grib1_convert, + self.is_forecast_period, + expected_points=[255], + expected_bounds=[[252, 258]], + ) + self._test_for_coord( + message, + grib1_convert, + self.is_time, + expected_points=[255], + expected_bounds=[[252, 258]], + ) def test_time_range_indicator_2(self): self.assert_bounded_message(timeRangeIndicator=2) @@ -134,28 +152,27 @@ def test_time_range_indicator_125(self): class Test_GribLevels(tests.IrisTest): def test_grib1_hybrid_height(self): - gm = eccodes.codes_grib_new_from_samples('regular_gg_ml_grib1') + gm = eccodes.codes_grib_new_from_samples("regular_gg_ml_grib1") gw = GribWrapper(gm) results = grib1_convert(gw) - factory, = results[0] + (factory,) = results[0] self.assertEqual(factory.factory_class, HybridPressureFactory) delta, sigma, ref = factory.args - self.assertEqual(delta, {'long_name': 'level_pressure'}) - self.assertEqual(sigma, {'long_name': 'sigma'}) - self.assertEqual(ref, Reference(name='surface_pressure')) + self.assertEqual(delta, {"long_name": "level_pressure"}) + self.assertEqual(sigma, {"long_name": "sigma"}) + self.assertEqual(ref, Reference(name="surface_pressure")) coords_and_dims = results[8] - coord, = [co for co, _ in coords_and_dims - if co.name() == 'model_level_number'] - self.assertEqual(coord.units, '1') - self.assertEqual(coord.attributes['positive'], 'up') - coord, = [co for co, _ in coords_and_dims - if co.name() == 'level_pressure'] - self.assertEqual(coord.units, 'Pa') - coord, = [co for co, _ in coords_and_dims - if co.name() == 'sigma'] - self.assertEqual(coord.units, '1') + (coord,) = [ + co for co, _ in coords_and_dims if co.name() == "model_level_number" + ] + self.assertEqual(coord.units, "1") + self.assertEqual(coord.attributes["positive"], "up") + (coord,) = [co for co, _ in coords_and_dims if co.name() == "level_pressure"] + self.assertEqual(coord.units, "Pa") + (coord,) = [co for co, _ in coords_and_dims if co.name() == "sigma"] + self.assertEqual(coord.units, "1") if __name__ == "__main__": diff --git a/iris_grib/tests/unit/grib1_load_rules/test_grib1_load_translations.py b/iris_grib/tests/unit/grib1_load_rules/test_grib1_load_translations.py index a430943d7..caf876ee0 100644 --- a/iris_grib/tests/unit/grib1_load_rules/test_grib1_load_translations.py +++ b/iris_grib/tests/unit/grib1_load_rules/test_grib1_load_translations.py @@ -71,12 +71,8 @@ def _mock_eccodes__codes_get_native_type(grib_message, keyname): _mock_eccodes.codes_get_long = mock.Mock(side_effect=_mock_eccodes_fetch) _mock_eccodes.codes_get_string = mock.Mock(side_effect=_mock_eccodes_fetch) _mock_eccodes.codes_get_double = mock.Mock(side_effect=_mock_eccodes_fetch) -_mock_eccodes.codes_get_double_array = mock.Mock( - side_effect=_mock_eccodes_fetch -) -_mock_eccodes.codes_is_missing = mock.Mock( - side_effect=_mock_eccodes__codes_is_missing -) +_mock_eccodes.codes_get_double_array = mock.Mock(side_effect=_mock_eccodes_fetch) +_mock_eccodes.codes_is_missing = mock.Mock(side_effect=_mock_eccodes__codes_is_missing) _mock_eccodes.codes_get_native_type = mock.Mock( side_effect=_mock_eccodes__codes_get_native_type ) @@ -125,7 +121,7 @@ def _init_minimal_message(self, edition=1): "Nj": 1, "numberOfValues": 1, "alternativeRowScanning": 0, - "centre": "ecmf", + "centre": 74, # the UKMO centre id "year": 2007, "month": 3, "day": 23, @@ -191,9 +187,7 @@ def _run_timetests(self, test_set): # Expect GribWrapper construction to fail. with self.assertRaises(type(expected_error)) as ar_context: _ = iris_grib.GribWrapper(message) - self.assertEqual( - ar_context.exception.args, expected_error.args - ) + self.assertEqual(ar_context.exception.args, expected_error.args) continue # 'ELSE'... @@ -218,8 +212,7 @@ def _run_timetests(self, test_set): # Check the data-starttime calculation. interval_start_to_end = ( - wrapped_msg._phenomenonDateTime - - wrapped_msg._referenceDateTime + wrapped_msg._phenomenonDateTime - wrapped_msg._referenceDateTime ) if grib_edition == 1: interval_from_units = wrapped_msg.P1 @@ -281,9 +274,7 @@ def test_timeunits_calendar(self): TestGribTimecodes._run_timetests(self, tests) def test_timeunits_invalid(self): - tests = ( - (1, 111, TestGribTimecodes._err_bad_timeunit(111), 1.0, "??"), - ) + tests = ((1, 111, TestGribTimecodes._err_bad_timeunit(111), 1.0, "??"),) TestGribTimecodes._run_timetests(self, tests) def test_warn_unknown_pdts(self): diff --git a/iris_grib/tests/unit/grib_phenom_translation/test_grib_phenom_translation.py b/iris_grib/tests/unit/grib_phenom_translation/test_grib_phenom_translation.py index 519d76385..d16c7f7ad 100644 --- a/iris_grib/tests/unit/grib_phenom_translation/test_grib_phenom_translation.py +++ b/iris_grib/tests/unit/grib_phenom_translation/test_grib_phenom_translation.py @@ -2,13 +2,13 @@ # # This file is part of iris-grib and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -''' +""" Unit tests for the mod:`iris_grib.grib_phenom_translation` module. Carried over from old iris/tests/test_grib_phenom_translation.py. Code is out of step with current test conventions and standards. -''' +""" # Import iris_grib.tests first so that some things can be initialised before # importing anything else. @@ -22,30 +22,36 @@ class TestGribLookupTableType(tests.IrisTest): def test_lookuptable_type(self): - ll = gptx._LookupTable([('a', 1), ('b', 2)]) - assert ll['a'] == 1 - assert ll['q'] is None - ll['q'] = 15 - assert ll['q'] == 15 - ll['q'] = 15 - assert ll['q'] == 15 + ll = gptx._LookupTable([("a", 1), ("b", 2)]) + self.assertEqual(1, ll["a"]) + self.assertIsNone(ll["q"]) + ll["q"] = 15 + self.assertEqual(15, ll["q"]) + ll["q"] = 15 + self.assertEqual(15, ll["q"]) with self.assertRaises(KeyError): - ll['q'] = 7 - del ll['q'] - ll['q'] = 7 - assert ll['q'] == 7 + ll["q"] = 7 + del ll["q"] + ll["q"] = 7 + self.assertEqual(7, ll["q"]) class TestGribPhenomenonLookup(tests.IrisTest): def test_grib1_cf_lookup(self): - def check_grib1_cf(param, - standard_name, long_name, units, - height=None, - t2version=128, centre=98, expect_none=False): + def check_grib1_cf( + param, + standard_name, + long_name, + units, + height=None, + t2version=128, + centre=98, + expect_none=False, + ): a_cf_unit = cf_units.Unit(units) - cfdata = gptx.grib1_phenom_to_cf_info(param_number=param, - table2_version=t2version, - centre_number=centre) + cfdata = gptx.grib1_phenom_to_cf_info( + param_number=param, table2_version=t2version, centre_number=centre + ) if expect_none: self.assertIsNone(cfdata) else: @@ -57,25 +63,34 @@ def check_grib1_cf(param, else: self.assertEqual(cfdata.set_height, float(height)) - check_grib1_cf(165, 'x_wind', None, 'm s-1', 10.0) - check_grib1_cf(168, 'dew_point_temperature', None, 'K', 2) - check_grib1_cf(130, 'air_temperature', None, 'K') + check_grib1_cf(165, "x_wind", None, "m s-1", 10.0) + check_grib1_cf(168, "dew_point_temperature", None, "K", 2) + check_grib1_cf(130, "air_temperature", None, "K") check_grib1_cf(235, None, "grib_skin_temperature", "K") - check_grib1_cf(235, None, "grib_skin_temperature", "K", - t2version=9999, expect_none=True) - check_grib1_cf(235, None, "grib_skin_temperature", "K", - centre=9999, expect_none=True) - check_grib1_cf(9999, None, "grib_skin_temperature", "K", - expect_none=True) + check_grib1_cf( + 235, None, "grib_skin_temperature", "K", t2version=9999, expect_none=True + ) + check_grib1_cf( + 235, None, "grib_skin_temperature", "K", centre=9999, expect_none=True + ) + check_grib1_cf(9999, None, "grib_skin_temperature", "K", expect_none=True) def test_grib2_cf_lookup(self): - def check_grib2_cf(discipline, category, number, - standard_name, long_name, units, - expect_none=False): + def check_grib2_cf( + discipline, + category, + number, + standard_name, + long_name, + units, + expect_none=False, + ): a_cf_unit = cf_units.Unit(units) - cfdata = gptx.grib2_phenom_to_cf_info(param_discipline=discipline, - param_category=category, - param_number=number) + cfdata = gptx.grib2_phenom_to_cf_info( + param_discipline=discipline, + param_category=category, + param_number=number, + ) if expect_none: self.assertIsNone(cfdata) else: @@ -90,23 +105,26 @@ def check_grib2_cf(discipline, category, number, check_grib2_cf(10, 2, 0, "sea_ice_area_fraction", None, 1) check_grib2_cf(2, 0, 0, "land_area_fraction", None, 1) check_grib2_cf(0, 19, 1, None, "grib_physical_atmosphere_albedo", "%") - check_grib2_cf(0, 1, 64, - "atmosphere_mass_content_of_water_vapor", None, - "kg m-2") + check_grib2_cf( + 0, 1, 64, "atmosphere_mass_content_of_water_vapor", None, "kg m-2" + ) check_grib2_cf(2, 0, 7, "surface_altitude", None, "m") # These should fail - check_grib2_cf(9999, 2, 0, "sea_ice_area_fraction", None, 1, - expect_none=True) - check_grib2_cf(10, 9999, 0, "sea_ice_area_fraction", None, 1, - expect_none=True) - check_grib2_cf(10, 2, 9999, "sea_ice_area_fraction", None, 1, - expect_none=True) + check_grib2_cf(9999, 2, 0, "sea_ice_area_fraction", None, 1, expect_none=True) + check_grib2_cf(10, 9999, 0, "sea_ice_area_fraction", None, 1, expect_none=True) + check_grib2_cf(10, 2, 9999, "sea_ice_area_fraction", None, 1, expect_none=True) def test_cf_grib2_lookup(self): - def check_cf_grib2(standard_name, long_name, - discipline, category, number, units, - expect_none=False): + def check_cf_grib2( + standard_name, + long_name, + discipline, + category, + number, + units, + expect_none=False, + ): a_cf_unit = cf_units.Unit(units) gribdata = gptx.cf_phenom_to_grib2_info(standard_name, long_name) if expect_none: @@ -118,146 +136,272 @@ def check_cf_grib2(standard_name, long_name, self.assertEqual(gribdata.units, a_cf_unit) # These should work - check_cf_grib2("sea_surface_temperature", None, - 10, 3, 0, 'K') - check_cf_grib2("air_temperature", None, - 0, 0, 0, 'K') - check_cf_grib2("soil_temperature", None, - 2, 0, 2, "K") - check_cf_grib2("land_area_fraction", None, - 2, 0, 0, '1') - check_cf_grib2("land_binary_mask", None, - 2, 0, 0, '1') - check_cf_grib2("atmosphere_mass_content_of_water_vapor", None, - 0, 1, 64, "kg m-2") - check_cf_grib2("surface_altitude", None, - 2, 0, 7, "m") + check_cf_grib2("sea_surface_temperature", None, 10, 3, 0, "K") + check_cf_grib2("air_temperature", None, 0, 0, 0, "K") + check_cf_grib2("soil_temperature", None, 2, 0, 2, "K") + check_cf_grib2("land_area_fraction", None, 2, 0, 0, "1") + check_cf_grib2("land_binary_mask", None, 2, 0, 0, "1") + check_cf_grib2( + "atmosphere_mass_content_of_water_vapor", None, 0, 1, 64, "kg m-2" + ) + check_cf_grib2("surface_altitude", None, 2, 0, 7, "m") # These should fail - check_cf_grib2("air_temperature", "user_long_UNRECOGNISED", - 0, 0, 0, 'K') - check_cf_grib2("air_temperature_UNRECOGNISED", None, - 0, 0, 0, 'K', - expect_none=True) - check_cf_grib2(None, "user_long_UNRECOGNISED", - 0, 0, 0, 'K', - expect_none=True) - check_cf_grib2(None, "precipitable_water", - 0, 1, 3, 'kg m-2') - check_cf_grib2("invalid_unknown", "precipitable_water", - 0, 1, 3, 'kg m-2', - expect_none=True) - check_cf_grib2(None, None, 0, 0, 0, '', - expect_none=True) + check_cf_grib2("air_temperature", "user_long_UNRECOGNISED", 0, 0, 0, "K") + check_cf_grib2( + "air_temperature_UNRECOGNISED", None, 0, 0, 0, "K", expect_none=True + ) + check_cf_grib2(None, "user_long_UNRECOGNISED", 0, 0, 0, "K", expect_none=True) + check_cf_grib2(None, "precipitable_water", 0, 1, 3, "kg m-2") + check_cf_grib2( + "invalid_unknown", "precipitable_water", 0, 1, 3, "kg m-2", expect_none=True + ) + check_cf_grib2(None, None, 0, 0, 0, "", expect_none=True) class TestGRIBcode(tests.IrisTest): # GRIBCode is basically a namedtuple, so not all behaviour needs testing. # However, creation is a bit special so exercise all those cases. - def test_create_from_keys(self): - gribcode = GRIBCode( - edition_or_string=5, - discipline=7, - category=4, - number=199) - self.assertEqual(gribcode.edition, 5) + + # TODO: convert to pytest + replace duplications with parameterisation + # (mostly grib1/grib2, but also in one case str/repr) + def test_create_from_keys__grib2(self): + gribcode = GRIBCode(edition=2, discipline=7, category=4, number=199) + self.assertEqual(gribcode.edition, 2) self.assertEqual(gribcode.discipline, 7) self.assertEqual(gribcode.category, 4) self.assertEqual(gribcode.number, 199) - def test_create_from_args(self): - gribcode = GRIBCode(7, 3, 12, 99) - self.assertEqual(gribcode.edition, 7) + def test_create_from_keys__grib1(self): + gribcode = GRIBCode(edition=1, table_version=7, centre_number=4, number=199) + self.assertEqual(gribcode.edition, 1) + self.assertEqual(gribcode.table_version, 7) + self.assertEqual(gribcode.centre_number, 4) + self.assertEqual(gribcode.number, 199) + + def test_create_from_args__grib2(self): + gribcode = GRIBCode(2, 3, 12, 99) + self.assertEqual(gribcode.edition, 2) self.assertEqual(gribcode.discipline, 3) self.assertEqual(gribcode.category, 12) self.assertEqual(gribcode.number, 99) - def test_create_is_copy(self): - gribcode1 = GRIBCode(7, 3, 12, 99) - gribcode2 = GRIBCode(7, 3, 12, 99) + def test_create_from_args__grib1(self): + gribcode = GRIBCode(1, 3, 12, 99) + self.assertEqual(gribcode.edition, 1) + self.assertEqual(gribcode.table_version, 3) + self.assertEqual(gribcode.centre_number, 12) + self.assertEqual(gribcode.number, 99) + + def check_create_is_copy(self, edition): + gribcode1 = GRIBCode(edition, 3, 12, 99) + gribcode2 = GRIBCode(edition, 3, 12, 99) self.assertEqual(gribcode1, gribcode2) self.assertIsNot(gribcode1, gribcode2) - def test_create_from_gribcode(self): - gribcode1 = GRIBCode((4, 3, 2, 1)) + def test_create_is_copy__grib1(self): + self.check_create_is_copy(edition=1) + + def test_create_is_copy__grib2(self): + self.check_create_is_copy(edition=2) + + def check_create_from_gribcode(self, edition): + gribcode1 = GRIBCode((edition, 3, 2, 1)) gribcode2 = GRIBCode(gribcode1) self.assertEqual(gribcode1, gribcode2) # NOTE: *not* passthrough : it creates a copy # (though maybe not too significant, as it is immutable anyway?) self.assertIsNot(gribcode1, gribcode2) - def test_create_from_string(self): - gribcode = GRIBCode('xxx12xs-34 -5,678qqqq') + def test_create_from_gribcode__grib1(self): + self.check_create_from_gribcode(edition=1) + + def test_create_from_gribcode__grib2(self): + self.check_create_from_gribcode(edition=2) + + def check_create_from_string(self, edition): + gribcode = GRIBCode(f"xxx{edition}xs-34 -5,678qqqq") # NOTE: args 2 and 3 are *not* negative. - self.assertEqual(gribcode, GRIBCode(12, 34, 5, 678)) + self.assertEqual(gribcode, GRIBCode(edition, 34, 5, 678)) + + def test_create_from_string__grib1(self): + self.check_create_from_string(edition=1) - def test_create_from_own_string(self): + def test_create_from_string__grib2(self): + self.check_create_from_string(edition=2) + + def check_create_from_own_string(self, string_function, edition): # Check that GRIBCode string reprs are valid as create arguments. - gribcode = GRIBCode( - edition_or_string=2, - discipline=17, - category=94, - number=231) - grib_param_string = str(gribcode) + gribcode = GRIBCode(edition, 17, 94, 231) + grib_param_string = string_function(gribcode) newcode = GRIBCode(grib_param_string) self.assertEqual(newcode, gribcode) - def test_create_from_tuple(self): - gribcode = GRIBCode((4, 3, 2, 1)) - self.assertEqual(gribcode, GRIBCode(4, 3, 2, 1)) + def test_create_from_own_string__str__grib1(self): + self.check_create_from_own_string(str, edition=1) + + def test_create_from_own_string__str__grib2(self): + self.check_create_from_own_string(str, edition=2) + + def test_create_from_own_string__repr__grib1(self): + self.check_create_from_own_string(repr, edition=1) + + def test_create_from_own_string__repr__grib2(self): + self.check_create_from_own_string(repr, edition=2) + + def check_create_from_tuple(self, edition): + gribcode = GRIBCode((edition, 3, 2, 1)) + expected = GRIBCode(edition, 3, 2, 1) + self.assertEqual(expected, gribcode) + + def test_create_from_tuple__grib1(self): + self.check_create_from_tuple(edition=1) + + def test_create_from_tuple__grib2(self): + self.check_create_from_tuple(edition=2) def test_create_bad_nargs(self): # Between 1 and 4 args is not invalid call syntax, but it should fail. - with self.assertRaisesRegex( - ValueError, - 'Cannot create GRIBCode from 2 arguments'): + msg = ( + "Cannot create.* from 2 arguments.*" + r"GRIBCode\(\(1, 2\)\).*" + "expects either 1 or 4 arguments" + ) + with self.assertRaisesRegex(ValueError, msg): GRIBCode(1, 2) def test_create_bad_single_arg_None(self): - with self.assertRaisesRegex( - ValueError, - 'Cannot create GRIBCode from 0 arguments'): + msg = ( + "Cannot create GRIBCode from 0 arguments.*" + r"GRIBCode\(\(\)\).*" + "expects either 1 or 4 arguments" + ) + with self.assertRaisesRegex(ValueError, msg): GRIBCode(None) def test_create_bad_single_arg_empty_string(self): - with self.assertRaisesRegex( - ValueError, - 'Invalid argument for GRIBCode creation'): - GRIBCode('') + msg = ( + "Invalid argument for GRIBCode creation.*" + r"GRIBCode\(''\).*" + "requires 4 numbers, separated by non-numerals" + ) + with self.assertRaisesRegex(ValueError, msg): + GRIBCode("") def test_create_bad_single_arg_nonums(self): - with self.assertRaisesRegex( - ValueError, - 'Invalid argument for GRIBCode creation'): - GRIBCode('saas- dsa- ') + msg = ( + "Invalid argument for GRIBCode creation.*" + r"GRIBCode\('saas- dsa- '\).*" + "requires 4 numbers, separated by non-numerals" + ) + with self.assertRaisesRegex(ValueError, msg): + GRIBCode("saas- dsa- ") def test_create_bad_single_arg_less_than_4_nums(self): - with self.assertRaisesRegex( - ValueError, - 'Invalid argument for GRIBCode creation'): - GRIBCode('1,2,3') + msg = ( + "Invalid argument for GRIBCode creation.*" + r"GRIBCode\('1,2,3'\).*" + "requires 4 numbers, separated by non-numerals" + ) + with self.assertRaisesRegex(ValueError, msg): + GRIBCode("1,2,3") def test_create_bad_single_arg_number(self): - with self.assertRaisesRegex( - ValueError, - 'Invalid argument for GRIBCode creation'): + msg = ( + "Invalid argument for GRIBCode creation.*" + r"GRIBCode\('4'\).*" + "requires 4 numbers, separated by non-numerals" + ) + with self.assertRaisesRegex(ValueError, msg): GRIBCode(4) def test_create_bad_single_arg_single_numeric(self): - with self.assertRaisesRegex( - ValueError, - 'Invalid argument for GRIBCode creation'): - GRIBCode('44') + msg = ( + "Invalid argument for GRIBCode creation.*" + r"GRIBCode\('44'\).*" + "requires 4 numbers, separated by non-numerals" + ) + with self.assertRaisesRegex(ValueError, msg): + GRIBCode("44") def test_create_string_more_than_4_nums(self): # Note: does not error, just discards the extra. - gribcode = GRIBCode('1,2,3,4,5,6,7,8') + gribcode = GRIBCode("1,2,3,4,5,6,7,8") self.assertEqual(gribcode, GRIBCode(1, 2, 3, 4)) - def test__str__(self): - result = str(GRIBCode(2, 17, 3, 123)) - self.assertEqual(result, 'GRIB2:d017c003n123') - - -if __name__ == '__main__': + def check__str__(self, edition): + result = str(GRIBCode(edition, 17, 3, 123)) + arg1_char = {1: "t", 2: "d"}[edition] + expected = f"GRIB{edition}:{arg1_char}017c003n123" + self.assertEqual(expected, result) + + def test__str__grib1(self): + self.check__str__(edition=1) + + def test__str__grib2(self): + self.check__str__(edition=2) + + def check__repr__(self, edition): + result = repr(GRIBCode(edition, 17, 3, 123)) + if edition == 1: + expected = ( + "GRIBCode(edition=1, table_version=17, " "centre_number=3, number=123)" + ) + elif edition == 2: + expected = "GRIBCode(edition=2, discipline=17, " "category=3, number=123)" + self.assertEqual(result, expected) + + def test__repr__grib1(self): + self.check__repr__(edition=1) + + def test__repr__grib2(self): + self.check__repr__(edition=2) + + def test_bad_content__str_repr__badedition(self): + gribcode = GRIBCode(2, 11, 12, 13) + gribcode.edition = 77 + str_result = str(gribcode) + expected = ( + "" + ) + self.assertEqual(expected, str_result) + repr_result = repr(gribcode) + self.assertEqual(str_result, repr_result) + + def test_bad_content__str_repr__badmembervalue(self): + gribcode = GRIBCode(2, 11, 12, 13) + gribcode.discipline = None + str_result = str(gribcode) + expected = ( + "" + ) + self.assertEqual(expected, str_result) + repr_result = repr(gribcode) + self.assertEqual(str_result, repr_result) + + def test_bad_content__str_repr__missingmember(self): + gribcode = GRIBCode(2, 11, 12, 13) + del gribcode.category + str_result = str(gribcode) + expected = ( + "" + ) + self.assertEqual(expected, str_result) + repr_result = repr(gribcode) + self.assertEqual(str_result, repr_result) + + def test_bad_create__invalid_edition(self): + with self.assertRaisesRegex(ValueError, "Invalid grib edition"): + GRIBCode(77, 1, 2, 3) + + def test_bad_create__arg_and_kwarg(self): + msg = "Keyword 'number'=7 is not compatible with a 4th argument." + with self.assertRaisesRegex(ValueError, msg): + GRIBCode(1, 2, 3, 4, number=7) + + +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/__init__.py b/iris_grib/tests/unit/load_convert/__init__.py index 4fecf59ec..68d333dfe 100644 --- a/iris_grib/tests/unit/load_convert/__init__.py +++ b/iris_grib/tests/unit/load_convert/__init__.py @@ -13,15 +13,15 @@ def empty_metadata(): metadata = OrderedDict() - metadata['factories'] = [] - metadata['references'] = [] - metadata['standard_name'] = None - metadata['long_name'] = None - metadata['units'] = None - metadata['attributes'] = {} - metadata['cell_methods'] = [] - metadata['dim_coords_and_dims'] = [] - metadata['aux_coords_and_dims'] = [] + metadata["factories"] = [] + metadata["references"] = [] + metadata["standard_name"] = None + metadata["long_name"] = None + metadata["units"] = None + metadata["attributes"] = {} + metadata["cell_methods"] = [] + metadata["dim_coords_and_dims"] = [] + metadata["aux_coords_and_dims"] = [] return metadata diff --git a/iris_grib/tests/unit/load_convert/test__hindcast_fix.py b/iris_grib/tests/unit/load_convert/test__hindcast_fix.py index 1d611fba2..7e566ad1a 100644 --- a/iris_grib/tests/unit/load_convert/test__hindcast_fix.py +++ b/iris_grib/tests/unit/load_convert/test__hindcast_fix.py @@ -18,7 +18,7 @@ class TestHindcastFix(tests.IrisGribTest): # setup tests : provided value, fix-applies, expected-fixed - FixTest = namedtuple('FixTest', ('given', 'fixable', 'fixed')) + FixTest = namedtuple("FixTest", ("given", "fixable", "fixed")) test_values = [ FixTest(0, False, None), FixTest(100, False, None), @@ -27,10 +27,11 @@ class TestHindcastFix(tests.IrisGribTest): FixTest(2 * 2**30 + 1, True, -1), FixTest(2 * 2**30 + 2, True, -2), FixTest(3 * 2**30 - 1, True, -(2**30 - 1)), - FixTest(3 * 2**30, False, None)] + FixTest(3 * 2**30, False, None), + ] def setUp(self): - self.patch_warn = self.patch('warnings.warn') + self.patch_warn = self.patch("warnings.warn") def test_fix(self): # Check hindcast fixing. @@ -41,11 +42,12 @@ def test_fix(self): def test_fix_warning(self): # Check warning appears when enabled. - self.patch('iris_grib._load_convert.options.warn_on_unsupported', True) + self.patch("iris_grib._load_convert.options.warn_on_unsupported", True) hindcast_fix(2 * 2**30 + 5) self.assertEqual(self.patch_warn.call_count, 1) - self.assertIn('Re-interpreting large grib forecastTime', - self.patch_warn.call_args[0][0]) + self.assertIn( + "Re-interpreting large grib forecastTime", self.patch_warn.call_args[0][0] + ) def test_fix_warning_disabled(self): # Default is no warning. @@ -53,5 +55,5 @@ def test_fix_warning_disabled(self): self.assertEqual(self.patch_warn.call_count, 0) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_bitmap_section.py b/iris_grib/tests/unit/load_convert/test_bitmap_section.py index 64a24bd55..2fabdac22 100644 --- a/iris_grib/tests/unit/load_convert/test_bitmap_section.py +++ b/iris_grib/tests/unit/load_convert/test_bitmap_section.py @@ -22,11 +22,10 @@ def test_bitmap_unsupported(self): # bitMapIndicator in range 1-254. # Note that bitMapIndicator = 1-253 and bitMapIndicator = 254 mean two # different things, but load_convert treats them identically. - message = _make_test_message({6: {'bitMapIndicator': 100, - 'bitmap': None}}) - with self.assertRaisesRegex(TranslationError, 'unsupported bitmap'): + message = _make_test_message({6: {"bitMapIndicator": 100, "bitmap": None}}) + with self.assertRaisesRegex(TranslationError, "unsupported bitmap"): bitmap_section(message.sections[6]) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_calculate_increment.py b/iris_grib/tests/unit/load_convert/test_calculate_increment.py index 71dc27d4b..da0a63105 100644 --- a/iris_grib/tests/unit/load_convert/test_calculate_increment.py +++ b/iris_grib/tests/unit/load_convert/test_calculate_increment.py @@ -28,5 +28,5 @@ def test_with_mod(self): self.assertEqual(result, 1) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_convert.py b/iris_grib/tests/unit/load_convert/test_convert.py index b50f669c4..ebf02a6fd 100644 --- a/iris_grib/tests/unit/load_convert/test_convert.py +++ b/iris_grib/tests/unit/load_convert/test_convert.py @@ -19,11 +19,11 @@ class TestGribMessage(tests.IrisGribTest): def test_edition_2(self): def func(field, metadata): - return metadata['factories'].append(factory) + return metadata["factories"].append(factory) - sections = [{'editionNumber': 2}] + sections = [{"editionNumber": 2}] field = _make_test_message(sections) - this = 'iris_grib._load_convert.grib2_convert' + this = "iris_grib._load_convert.grib2_convert" factory = mock.sentinel.factory with mock.patch(this, side_effect=func) as grib2_convert: # The call being tested. @@ -33,9 +33,9 @@ def func(field, metadata): self.assertEqual(result, metadata) def test_edition_1_bad(self): - sections = [{'editionNumber': 1}] + sections = [{"editionNumber": 1}] field = _make_test_message(sections) - emsg = 'edition 1 is not supported' + emsg = "edition 1 is not supported" with self.assertRaisesRegex(TranslationError, emsg): convert(field) @@ -43,15 +43,15 @@ def test_edition_1_bad(self): class TestGribWrapper(tests.IrisGribTest): def test_edition_2_bad(self): # Test object with no '.sections', and '.edition' ==2. - field = mock.Mock(edition=2, spec=('edition')) - emsg = 'edition 2 is not supported' + field = mock.Mock(edition=2, spec=("edition")) + emsg = "edition 2 is not supported" with self.assertRaisesRegex(TranslationError, emsg): convert(field) def test_edition_1(self): # Test object with no '.sections', and '.edition' ==1. - field = mock.Mock(edition=1, spec=('edition')) - func = 'iris_grib._load_convert.grib1_convert' + field = mock.Mock(edition=1, spec=("edition")) + func = "iris_grib._load_convert.grib1_convert" metadata = mock.sentinel.metadata with mock.patch(func, return_value=metadata) as grib1_convert: result = convert(field) @@ -59,5 +59,5 @@ def test_edition_1(self): self.assertEqual(result, metadata) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_data_cutoff.py b/iris_grib/tests/unit/load_convert/test_data_cutoff.py index 03a4911ee..35eed1a72 100644 --- a/iris_grib/tests/unit/load_convert/test_data_cutoff.py +++ b/iris_grib/tests/unit/load_convert/test_data_cutoff.py @@ -20,17 +20,17 @@ class TestDataCutoff(tests.IrisGribTest): def _check(self, hours, minutes, request_warning, expect_warning=False): # Setup the environment. - patch_target = 'iris_grib._load_convert.options' + patch_target = "iris_grib._load_convert.options" with mock.patch(patch_target) as options: options.warn_on_unsupported = request_warning - with mock.patch('warnings.warn') as warn: + with mock.patch("warnings.warn") as warn: # The call being tested. data_cutoff(hours, minutes) # Check the result. if expect_warning: self.assertEqual(len(warn.mock_calls), 1) args, kwargs = warn.call_args - self.assertIn('data cutoff', args[0]) + self.assertIn("data cutoff", args[0]) else: self.assertEqual(len(warn.mock_calls), 0) @@ -59,5 +59,5 @@ def test_hours_and_minutes_warning(self): self._check(30, 40, True, True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_data_representation_section.py b/iris_grib/tests/unit/load_convert/test_data_representation_section.py index 094aa91fa..9bf034783 100644 --- a/iris_grib/tests/unit/load_convert/test_data_representation_section.py +++ b/iris_grib/tests/unit/load_convert/test_data_representation_section.py @@ -22,16 +22,16 @@ def test_supported_templates(self): template_nums = [0, 1, 2, 3, 4, 40, 41, 42, 50, 51, 61] for template_num in template_nums: message = _make_test_message( - {5: {'dataRepresentationTemplateNumber': template_num}}) + {5: {"dataRepresentationTemplateNumber": template_num}} + ) data_representation_section(message.sections[5]) def test_unsupported_template(self): - message = _make_test_message( - {5: {'dataRepresentationTemplateNumber': 5}}) - err_msg = r'Template \[5\] is not supported' + message = _make_test_message({5: {"dataRepresentationTemplateNumber": 5}}) + err_msg = r"Template \[5\] is not supported" with self.assertRaisesRegex(TranslationError, err_msg): data_representation_section(message.sections[5]) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_ellipsoid.py b/iris_grib/tests/unit/load_convert/test_ellipsoid.py index bf3c3d2f4..52dbbe23d 100644 --- a/iris_grib/tests/unit/load_convert/test_ellipsoid.py +++ b/iris_grib/tests/unit/load_convert/test_ellipsoid.py @@ -28,21 +28,23 @@ class Test(tests.IrisGribTest): def test_shape_unsupported(self): unsupported = [8, 9, 10, MDI] - emsg = 'unsupported shape of the earth' + emsg = "unsupported shape of the earth" for shape in unsupported: with self.assertRaisesRegex(TranslationError, emsg): ellipsoid(shape, MDI, MDI, MDI) def test_spherical_default_supported(self): - cs_by_shape = {0: icoord_systems.GeogCS(6367470), - 6: icoord_systems.GeogCS(6371229)} + cs_by_shape = { + 0: icoord_systems.GeogCS(6367470), + 6: icoord_systems.GeogCS(6371229), + } for shape, expected in cs_by_shape.items(): result = ellipsoid(shape, MDI, MDI, MDI) self.assertEqual(result, expected) def test_spherical_shape_1_no_radius(self): shape = 1 - emsg = 'radius to be specified' + emsg = "radius to be specified" with self.assertRaisesRegex(ValueError, emsg): ellipsoid(shape, MDI, MDI, MDI) @@ -55,19 +57,19 @@ def test_spherical_shape_1(self): def test_oblate_shape_3_7_no_axes(self): for shape in [3, 7]: - emsg = 'axis to be specified' + emsg = "axis to be specified" with self.assertRaisesRegex(ValueError, emsg): ellipsoid(shape, MDI, MDI, MDI) def test_oblate_shape_3_7_no_major(self): for shape in [3, 7]: - emsg = 'major axis to be specified' + emsg = "major axis to be specified" with self.assertRaisesRegex(ValueError, emsg): ellipsoid(shape, MDI, 1, MDI) def test_oblate_shape_3_7_no_minor(self): for shape in [3, 7]: - emsg = 'minor axis to be specified' + emsg = "minor axis to be specified" with self.assertRaisesRegex(ValueError, emsg): ellipsoid(shape, 1, MDI, MDI) @@ -83,5 +85,5 @@ def test_oblate_shape_3_7(self): self.assertEqual(result, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_ellipsoid_geometry.py b/iris_grib/tests/unit/load_convert/test_ellipsoid_geometry.py index 3f7e8b74d..0ed1e93b9 100644 --- a/iris_grib/tests/unit/load_convert/test_ellipsoid_geometry.py +++ b/iris_grib/tests/unit/load_convert/test_ellipsoid_geometry.py @@ -16,17 +16,19 @@ class Test(tests.IrisGribTest): def setUp(self): - self.section = {'scaledValueOfEarthMajorAxis': 10, - 'scaleFactorOfEarthMajorAxis': 1, - 'scaledValueOfEarthMinorAxis': 100, - 'scaleFactorOfEarthMinorAxis': 2, - 'scaledValueOfRadiusOfSphericalEarth': 1000, - 'scaleFactorOfRadiusOfSphericalEarth': 3} + self.section = { + "scaledValueOfEarthMajorAxis": 10, + "scaleFactorOfEarthMajorAxis": 1, + "scaledValueOfEarthMinorAxis": 100, + "scaleFactorOfEarthMinorAxis": 2, + "scaledValueOfRadiusOfSphericalEarth": 1000, + "scaleFactorOfRadiusOfSphericalEarth": 3, + } def test_geometry(self): result = ellipsoid_geometry(self.section) self.assertEqual(result, (1.0, 1.0, 1.0)) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_ensemble_identifier.py b/iris_grib/tests/unit/load_convert/test_ensemble_identifier.py index 11ae5edd5..84b78213d 100644 --- a/iris_grib/tests/unit/load_convert/test_ensemble_identifier.py +++ b/iris_grib/tests/unit/load_convert/test_ensemble_identifier.py @@ -22,21 +22,23 @@ class Test(tests.IrisGribTest): def setUp(self): - self.patch('warnings.warn') + self.patch("warnings.warn") def _check(self, request_warning): - section = {'perturbationNumber': 17} - this = 'iris_grib._load_convert.options' + section = {"perturbationNumber": 17} + this = "iris_grib._load_convert.options" with mock.patch(this, warn_on_unsupported=request_warning): realization = ensemble_identifier(section) - expected = DimCoord(section['perturbationNumber'], - standard_name='realization', - units='no_unit') + expected = DimCoord( + section["perturbationNumber"], + standard_name="realization", + units="no_unit", + ) self.assertEqual(realization, expected) if request_warning: warn_msgs = [mcall[1][0] for mcall in warnings.warn.mock_calls] - expected_msgs = ['type of ensemble', 'number of forecasts'] + expected_msgs = ["type of ensemble", "number of forecasts"] for emsg in expected_msgs: matches = [wmsg for wmsg in warn_msgs if emsg in wmsg] self.assertEqual(len(matches), 1) @@ -51,5 +53,5 @@ def test_ens_warn(self): self._check(True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_fixup_float32_from_int32.py b/iris_grib/tests/unit/load_convert/test_fixup_float32_from_int32.py index da02b1545..e5d187b0d 100644 --- a/iris_grib/tests/unit/load_convert/test_fixup_float32_from_int32.py +++ b/iris_grib/tests/unit/load_convert/test_fixup_float32_from_int32.py @@ -16,7 +16,7 @@ class Test(tests.IrisGribTest): def test_negative(self): - result = fixup_float32_from_int32(-0x3f000000) + result = fixup_float32_from_int32(-0x3F000000) self.assertEqual(result, -0.5) def test_zero(self): @@ -24,9 +24,9 @@ def test_zero(self): self.assertEqual(result, 0) def test_positive(self): - result = fixup_float32_from_int32(0x3f000000) + result = fixup_float32_from_int32(0x3F000000) self.assertEqual(result, 0.5) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_fixup_int32_from_uint32.py b/iris_grib/tests/unit/load_convert/test_fixup_int32_from_uint32.py index 6b4445684..b4c3f8f00 100644 --- a/iris_grib/tests/unit/load_convert/test_fixup_int32_from_uint32.py +++ b/iris_grib/tests/unit/load_convert/test_fixup_int32_from_uint32.py @@ -38,5 +38,5 @@ def test_already_negative(self): self.assertEqual(result, -7) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_forecast_period_coord.py b/iris_grib/tests/unit/load_convert/test_forecast_period_coord.py index a932428dd..75de7facb 100644 --- a/iris_grib/tests/unit/load_convert/test_forecast_period_coord.py +++ b/iris_grib/tests/unit/load_convert/test_forecast_period_coord.py @@ -19,24 +19,25 @@ class Test(tests.IrisGribTest): def test(self): # (indicatorOfUnitOfTimeRange, forecastTime, expected-hours) - times = [(0, 60, 1), # minutes - (1, 2, 2), # hours - (2, 1, 24), # days - (10, 2, 6), # 3 hours - (11, 3, 18), # 6 hours - (12, 2, 24), # 12 hours - (13, 3600, 1)] # seconds + times = [ + (0, 60, 1), # minutes + (1, 2, 2), # hours + (2, 1, 24), # days + (10, 2, 6), # 3 hours + (11, 3, 18), # 6 hours + (12, 2, 24), # 12 hours + (13, 3600, 1), + ] # seconds for indicatorOfUnitOfTimeRange, forecastTime, hours in times: - coord = forecast_period_coord(indicatorOfUnitOfTimeRange, - forecastTime) + coord = forecast_period_coord(indicatorOfUnitOfTimeRange, forecastTime) self.assertIsInstance(coord, DimCoord) - self.assertEqual(coord.standard_name, 'forecast_period') - self.assertEqual(coord.units, 'hours') + self.assertEqual(coord.standard_name, "forecast_period") + self.assertEqual(coord.units, "hours") self.assertEqual(coord.shape, (1,)) self.assertEqual(coord.points[0], hours) self.assertFalse(coord.has_bounds()) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_generating_process.py b/iris_grib/tests/unit/load_convert/test_generating_process.py index eca8e15d7..479a8ecce 100644 --- a/iris_grib/tests/unit/load_convert/test_generating_process.py +++ b/iris_grib/tests/unit/load_convert/test_generating_process.py @@ -17,29 +17,30 @@ class TestGeneratingProcess(tests.IrisGribTest): def setUp(self): - self.warn_patch = self.patch('warnings.warn') + self.warn_patch = self.patch("warnings.warn") def test_nowarn(self): generating_process(None) self.assertEqual(self.warn_patch.call_count, 0) def _check_warnings(self, with_forecast=True): - module = 'iris_grib._load_convert' - self.patch(module + '.options.warn_on_unsupported', True) + module = "iris_grib._load_convert" + self.patch(module + ".options.warn_on_unsupported", True) call_args = [None] call_kwargs = {} expected_fragments = [ - 'Unable to translate type of generating process', - 'Unable to translate background generating process'] + "Unable to translate type of generating process", + "Unable to translate background generating process", + ] if with_forecast: - expected_fragments.append( - 'Unable to translate forecast generating process') + expected_fragments.append("Unable to translate forecast generating process") else: - call_kwargs['include_forecast_process'] = False + call_kwargs["include_forecast_process"] = False generating_process(*call_args, **call_kwargs) got_msgs = [call[0][0] for call in self.warn_patch.call_args_list] - for got_msg, expected_fragment in zip(sorted(got_msgs), - sorted(expected_fragments)): + for got_msg, expected_fragment in zip( + sorted(got_msgs), sorted(expected_fragments) + ): self.assertIn(expected_fragment, got_msg) def test_warn_full(self): @@ -49,5 +50,5 @@ def test_warn_no_forecast(self): self._check_warnings(with_forecast=False) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grib2_convert.py b/iris_grib/tests/unit/load_convert/test_grib2_convert.py index 4463195c3..cc06cd097 100644 --- a/iris_grib/tests/unit/load_convert/test_grib2_convert.py +++ b/iris_grib/tests/unit/load_convert/test_grib2_convert.py @@ -18,42 +18,56 @@ class Test(tests.IrisGribTest): def setUp(self): - this = 'iris_grib._load_convert' - self.patch('{}.reference_time_coord'.format(this), return_value=None) - self.patch('{}.grid_definition_section'.format(this)) - self.patch('{}.product_definition_section'.format(this)) - self.patch('{}.data_representation_section'.format(this)) - self.patch('{}.bitmap_section'.format(this)) + this = "iris_grib._load_convert" + self.patch("{}.reference_time_coord".format(this), return_value=None) + self.patch("{}.grid_definition_section".format(this)) + self.patch("{}.product_definition_section".format(this)) + self.patch("{}.data_representation_section".format(this)) + self.patch("{}.bitmap_section".format(this)) def test(self): - sections = [{'discipline': mock.sentinel.discipline}, # section 0 - {'centre': 'ecmf', # section 1 - 'tablesVersion': mock.sentinel.tablesVersion}, - None, # section 2 - mock.sentinel.grid_definition_section, # section 3 - mock.sentinel.product_definition_section, # section 4 - mock.sentinel.data_representation_section, # section 5 - mock.sentinel.bitmap_section] # section 6 + sections = [ + {"discipline": mock.sentinel.discipline}, # section 0 + { + "centre": "ecmf", # section 1 + "tablesVersion": mock.sentinel.tablesVersion, + }, + None, # section 2 + mock.sentinel.grid_definition_section, # section 3 + mock.sentinel.product_definition_section, # section 4 + mock.sentinel.data_representation_section, # section 5 + mock.sentinel.bitmap_section, + ] # section 6 field = _make_test_message(sections) - metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } expected = copy.deepcopy(metadata) - centre = 'European Centre for Medium Range Weather Forecasts' - expected['attributes'] = {'centre': centre} + centre = "European Centre for Medium Range Weather Forecasts" + expected["attributes"] = {"centre": centre} # The call being tested. grib2_convert(field, metadata) self.assertEqual(metadata, expected) this = iris_grib._load_convert this.reference_time_coord.assert_called_with(sections[1]) - this.grid_definition_section.assert_called_with(sections[3], - expected) - args = (sections[4], expected, sections[0]['discipline'], - sections[1]['tablesVersion'], None) + this.grid_definition_section.assert_called_with(sections[3], expected) + args = ( + sections[4], + expected, + sections[0]["discipline"], + sections[1]["tablesVersion"], + None, + ) this.product_definition_section.assert_called_with(*args) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_0_and_1.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_0_and_1.py index 8d8be13b9..1d8a9fa3b 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_0_and_1.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_0_and_1.py @@ -30,142 +30,139 @@ def get_computed_key(self, key): class Test_resolution_flags(tests.IrisGribTest): - def section_3(self): - section = _Section({ - 'Ni': 6, - 'Nj': 6, - 'latitudeOfFirstGridPoint': 0, - 'longitudeOfFirstGridPoint': 0, - 'resolutionAndComponentFlags': 0, - 'latitudeOfLastGridPoint': 5000000, - 'longitudeOfLastGridPoint': 5000000, - 'iDirectionIncrement': 0, - 'jDirectionIncrement': 0, - 'scanningMode': 0b01000000, - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 0, - }) + section = _Section( + { + "Ni": 6, + "Nj": 6, + "latitudeOfFirstGridPoint": 0, + "longitudeOfFirstGridPoint": 0, + "resolutionAndComponentFlags": 0, + "latitudeOfLastGridPoint": 5000000, + "longitudeOfLastGridPoint": 5000000, + "iDirectionIncrement": 0, + "jDirectionIncrement": 0, + "scanningMode": 0b01000000, + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 0, + } + ) return section - def expected(self, x_dim, y_dim, x_points, y_points, x_neg=True, - y_neg=True): + def expected(self, x_dim, y_dim, x_points, y_points, x_neg=True, y_neg=True): # Prepare the expectation. expected = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) if x_neg: x_points = x_points[::-1] - x = iris.coords.DimCoord(x_points, - standard_name='longitude', - units='degrees', - coord_system=cs) + x = iris.coords.DimCoord( + x_points, standard_name="longitude", units="degrees", coord_system=cs + ) if y_neg: y_points = y_points[::-1] - y = iris.coords.DimCoord(y_points, - standard_name='latitude', - units='degrees', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + y = iris.coords.DimCoord( + y_points, standard_name="latitude", units="degrees", coord_system=cs + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test_without_increments(self): section = self.section_3() metadata = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - grid_definition_template_0_and_1(section, metadata, 'latitude', - 'longitude', cs) - x_points = np.array([0., 1., 2., 3., 4., 5.]) - y_points = np.array([0., 1., 2., 3., 4., 5.]) - expected = self.expected(1, 0, x_points, y_points, x_neg=False, - y_neg=False) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) + x_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + y_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + expected = self.expected(1, 0, x_points, y_points, x_neg=False, y_neg=False) self.assertEqual(metadata, expected) def test_with_increments(self): section = self.section_3() - section['resolutionAndComponentFlags'] = 48 - section['iDirectionIncrement'] = 1000000 - section['jDirectionIncrement'] = 1000000 + section["resolutionAndComponentFlags"] = 48 + section["iDirectionIncrement"] = 1000000 + section["jDirectionIncrement"] = 1000000 metadata = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - grid_definition_template_0_and_1(section, metadata, 'latitude', - 'longitude', cs) - x_points = np.array([0., 1., 2., 3., 4., 5.]) - y_points = np.array([0., 1., 2., 3., 4., 5.]) - expected = self.expected(1, 0, x_points, y_points, x_neg=False, - y_neg=False) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) + x_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + y_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + expected = self.expected(1, 0, x_points, y_points, x_neg=False, y_neg=False) self.assertEqual(metadata, expected) def test_with_i_not_j_increment(self): section = self.section_3() - section['resolutionAndComponentFlags'] = 32 - section['iDirectionIncrement'] = 1000000 + section["resolutionAndComponentFlags"] = 32 + section["iDirectionIncrement"] = 1000000 metadata = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - grid_definition_template_0_and_1(section, metadata, 'latitude', - 'longitude', cs) - x_points = np.array([0., 1., 2., 3., 4., 5.]) - y_points = np.array([0., 1., 2., 3., 4., 5.]) - expected = self.expected(1, 0, x_points, y_points, x_neg=False, - y_neg=False) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) + x_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + y_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + expected = self.expected(1, 0, x_points, y_points, x_neg=False, y_neg=False) self.assertEqual(metadata, expected) def test_with_j_not_i_increment(self): section = self.section_3() - section['resolutionAndComponentFlags'] = 16 - section['jDirectionIncrement'] = 1000000 + section["resolutionAndComponentFlags"] = 16 + section["jDirectionIncrement"] = 1000000 metadata = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - grid_definition_template_0_and_1(section, metadata, 'latitude', - 'longitude', cs) - x_points = np.array([0., 1., 2., 3., 4., 5.]) - y_points = np.array([0., 1., 2., 3., 4., 5.]) - expected = self.expected(1, 0, x_points, y_points, x_neg=False, - y_neg=False) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) + x_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + y_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + expected = self.expected(1, 0, x_points, y_points, x_neg=False, y_neg=False) self.assertEqual(metadata, expected) def test_without_increments_crossing_0_lon(self): section = self.section_3() - section['longitudeOfFirstGridPoint'] = 355000000 - section['Ni'] = 11 + section["longitudeOfFirstGridPoint"] = 355000000 + section["Ni"] = 11 metadata = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - grid_definition_template_0_and_1(section, metadata, 'latitude', - 'longitude', cs) + grid_definition_template_0_and_1(section, metadata, "latitude", "longitude", cs) x_points = np.array( - [355., 356., 357., 358., 359., 360., 361., 362., 363., 364., 365.] + [ + 355.0, + 356.0, + 357.0, + 358.0, + 359.0, + 360.0, + 361.0, + 362.0, + 363.0, + 364.0, + 365.0, + ] ) - y_points = np.array([0., 1., 2., 3., 4., 5.]) - expected = self.expected(1, 0, x_points, y_points, x_neg=False, - y_neg=False) + y_points = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) + expected = self.expected(1, 0, x_points, y_points, x_neg=False, y_neg=False) self.assertEqual(metadata, expected) class Test(tests.IrisGribTest): - def test_unsupported_quasi_regular__number_of_octets(self): - section = {'numberOfOctectsForNumberOfPoints': 1} + section = {"numberOfOctectsForNumberOfPoints": 1} cs = None metadata = None - with self.assertRaisesRegex(TranslationError, 'quasi-regular'): - grid_definition_template_0_and_1(section, - metadata, - 'latitude', - 'longitude', - cs) + with self.assertRaisesRegex(TranslationError, "quasi-regular"): + grid_definition_template_0_and_1( + section, metadata, "latitude", "longitude", cs + ) def test_unsupported_quasi_regular__interpretation(self): - section = {'numberOfOctectsForNumberOfPoints': 1, - 'interpretationOfNumberOfPoints': 1} + section = { + "numberOfOctectsForNumberOfPoints": 1, + "interpretationOfNumberOfPoints": 1, + } cs = None metadata = None - with self.assertRaisesRegex(TranslationError, 'quasi-regular'): - grid_definition_template_0_and_1(section, - metadata, - 'latitude', - 'longitude', - cs) + with self.assertRaisesRegex(TranslationError, "quasi-regular"): + grid_definition_template_0_and_1( + section, metadata, "latitude", "longitude", cs + ) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_10.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_10.py index 13d17a73d..093f75079 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_10.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_10.py @@ -26,25 +26,25 @@ class Test(tests.IrisGribTest): def section_3(self): section = { - 'gridDefinitionTemplateNumber': 10, - 'shapeOfTheEarth': 1, - 'scaleFactorOfRadiusOfSphericalEarth': 0, - 'scaledValueOfRadiusOfSphericalEarth': 6371200, - 'scaleFactorOfEarthMajorAxis': 0, - 'scaledValueOfEarthMajorAxis': 0, - 'scaleFactorOfEarthMinorAxis': 0, - 'scaledValueOfEarthMinorAxis': 0, - 'Ni': 181, - 'Nj': 213, - 'latitudeOfFirstGridPoint': 2351555, - 'latitudeOfLastGridPoint': 25088204, - 'LaD': 14000000, - 'longitudeOfFirstGridPoint': 114990304, - 'longitudeOfLastGridPoint': 135009712, - 'resolutionAndComponentFlags': 56, - 'scanningMode': 64, - 'Di': 12000000, - 'Dj': 12000000 + "gridDefinitionTemplateNumber": 10, + "shapeOfTheEarth": 1, + "scaleFactorOfRadiusOfSphericalEarth": 0, + "scaledValueOfRadiusOfSphericalEarth": 6371200, + "scaleFactorOfEarthMajorAxis": 0, + "scaledValueOfEarthMajorAxis": 0, + "scaleFactorOfEarthMinorAxis": 0, + "scaledValueOfEarthMinorAxis": 0, + "Ni": 181, + "Nj": 213, + "latitudeOfFirstGridPoint": 2351555, + "latitudeOfLastGridPoint": 25088204, + "LaD": 14000000, + "longitudeOfFirstGridPoint": 114990304, + "longitudeOfLastGridPoint": 135009712, + "resolutionAndComponentFlags": 56, + "scanningMode": 64, + "Di": 12000000, + "Dj": 12000000, } return section @@ -52,25 +52,28 @@ def expected(self, y_dim, x_dim): # Prepare the expectation. expected = empty_metadata() ellipsoid = iris.coord_systems.GeogCS(6371200.0) - cs = iris.coord_systems.Mercator(standard_parallel=14., - ellipsoid=ellipsoid) + cs = iris.coord_systems.Mercator(standard_parallel=14.0, ellipsoid=ellipsoid) nx = 181 x_origin = 12406918.990644248 dx = 12000 - x = iris.coords.DimCoord(np.arange(nx) * dx + x_origin, - 'projection_x_coordinate', - units='m', - coord_system=cs) + x = iris.coords.DimCoord( + np.arange(nx) * dx + x_origin, + "projection_x_coordinate", + units="m", + coord_system=cs, + ) ny = 213 y_origin = 253793.10903714459 dy = 12000 - y = iris.coords.DimCoord(np.arange(ny) * dy + y_origin, - 'projection_y_coordinate', - units='m', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + y = iris.coords.DimCoord( + np.arange(ny) * dy + y_origin, + "projection_y_coordinate", + units="m", + coord_system=cs, + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -81,5 +84,5 @@ def test(self): self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_12.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_12.py index f75b63ae1..09d46a64b 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_12.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_12.py @@ -28,28 +28,28 @@ class Test(tests.IrisGribTest): def section_3(self): section = { - 'shapeOfTheEarth': 7, - 'scaleFactorOfRadiusOfSphericalEarth': MDI, - 'scaledValueOfRadiusOfSphericalEarth': MDI, - 'scaleFactorOfEarthMajorAxis': 3, - 'scaledValueOfEarthMajorAxis': 6377563396, - 'scaleFactorOfEarthMinorAxis': 3, - 'scaledValueOfEarthMinorAxis': 6356256909, - 'Ni': 4, - 'Nj': 3, - 'latitudeOfReferencePoint': 49000000, - 'longitudeOfReferencePoint': -2000000, - 'resolutionAndComponentFlags': 0, - 'scaleFactorAtReferencePoint': 0.9996012717, - 'XR': 40000000, - 'YR': -10000000, - 'scanningMode': 64, - 'Di': 200000, - 'Dj': 100000, - 'X1': 29300000, - 'Y1': 9200000, - 'X2': 29900000, - 'Y2': 9400000 + "shapeOfTheEarth": 7, + "scaleFactorOfRadiusOfSphericalEarth": MDI, + "scaledValueOfRadiusOfSphericalEarth": MDI, + "scaleFactorOfEarthMajorAxis": 3, + "scaledValueOfEarthMajorAxis": 6377563396, + "scaleFactorOfEarthMinorAxis": 3, + "scaledValueOfEarthMinorAxis": 6356256909, + "Ni": 4, + "Nj": 3, + "latitudeOfReferencePoint": 49000000, + "longitudeOfReferencePoint": -2000000, + "resolutionAndComponentFlags": 0, + "scaleFactorAtReferencePoint": 0.9996012717, + "XR": 40000000, + "YR": -10000000, + "scanningMode": 64, + "Di": 200000, + "Dj": 100000, + "X1": 29300000, + "Y1": 9200000, + "X2": 29900000, + "Y2": 9400000, } return section @@ -57,28 +57,29 @@ def expected(self, y_dim, x_dim, x_negative=False, y_negative=False): # Prepare the expectation. expected = empty_metadata() ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cs = iris.coord_systems.TransverseMercator(49, -2, 400000, -100000, - 0.9996012717, ellipsoid) + cs = iris.coord_systems.TransverseMercator( + 49, -2, 400000, -100000, 0.9996012717, ellipsoid + ) nx = 4 x_origin = 293000 dx = 2000 x_array = np.arange(nx) * dx + x_origin if x_negative: x_array = np.flip(x_array) - x = iris.coords.DimCoord(x_array, - 'projection_x_coordinate', units='m', - coord_system=cs) + x = iris.coords.DimCoord( + x_array, "projection_x_coordinate", units="m", coord_system=cs + ) ny = 3 y_origin = 92000 dy = 1000 y_array = np.arange(ny) * dy + y_origin if y_negative: y_array = np.flip(y_array) - y = iris.coords.DimCoord(y_array, - 'projection_y_coordinate', units='m', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + y = iris.coords.DimCoord( + y_array, "projection_y_coordinate", units="m", coord_system=cs + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -90,18 +91,18 @@ def test(self): def test_spherical(self): section = self.section_3() - section['shapeOfTheEarth'] = 0 + section["shapeOfTheEarth"] = 0 metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(0, 1) - cs = expected['dim_coords_and_dims'][0][0].coord_system + cs = expected["dim_coords_and_dims"][0][0].coord_system cs.ellipsoid = iris.coord_systems.GeogCS(6367470) self.assertEqual(metadata, expected) def test_negative_x(self): section = self.section_3() - section['scanningMode'] = 0b11000000 - section['X1'], section['X2'] = section['X2'], section['X1'] + section["scanningMode"] = 0b11000000 + section["X1"], section["X2"] = section["X2"], section["X1"] metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(0, 1, x_negative=True) @@ -109,7 +110,7 @@ def test_negative_x(self): def test_x_inconsistent_direction(self): section = self.section_3() - section['scanningMode'] = 0b11000000 + section["scanningMode"] = 0b11000000 metadata = empty_metadata() with warnings.catch_warnings(record=True) as warn: grid_definition_template_12(section, metadata) @@ -121,18 +122,16 @@ def test_x_inconsistent_direction(self): def test_x_inconsistent_steps(self): section = self.section_3() - section['Ni'] += 1 + section["Ni"] += 1 metadata = empty_metadata() - expected_regex = ( - "X definition inconsistent: .* incompatible with step-size") - with self.assertRaisesRegex(iris.exceptions.TranslationError, - expected_regex): + expected_regex = "X definition inconsistent: .* incompatible with step-size" + with self.assertRaisesRegex(iris.exceptions.TranslationError, expected_regex): grid_definition_template_12(section, metadata) def test_negative_y(self): section = self.section_3() - section['scanningMode'] = 0b00000000 - section['Y1'], section['Y2'] = section['Y2'], section['Y1'] + section["scanningMode"] = 0b00000000 + section["Y1"], section["Y2"] = section["Y2"], section["Y1"] metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(0, 1, y_negative=True) @@ -140,7 +139,7 @@ def test_negative_y(self): def test_y_inconsistent_direction(self): section = self.section_3() - section['scanningMode'] = 0b00000000 + section["scanningMode"] = 0b00000000 metadata = empty_metadata() with warnings.catch_warnings(record=True) as warn: grid_definition_template_12(section, metadata) @@ -152,17 +151,15 @@ def test_y_inconsistent_direction(self): def test_y_inconsistent_steps(self): section = self.section_3() - section['Nj'] += 1 + section["Nj"] += 1 metadata = empty_metadata() - expected_regex = ( - "Y definition inconsistent: .* incompatible with step-size") - with self.assertRaisesRegex(iris.exceptions.TranslationError, - expected_regex): + expected_regex = "Y definition inconsistent: .* incompatible with step-size" + with self.assertRaisesRegex(iris.exceptions.TranslationError, expected_regex): grid_definition_template_12(section, metadata) def test_transposed(self): section = self.section_3() - section['scanningMode'] = 0b01100000 + section["scanningMode"] = 0b01100000 metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(1, 0) @@ -172,33 +169,32 @@ def test_di_tolerance(self): # Even though Ni * Di doesn't exactly match X1 to X2 it should # be close enough to allow the translation. section = self.section_3() - section['X2'] += 1 + section["X2"] += 1 metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(0, 1) - x = expected['dim_coords_and_dims'][1][0] + x = expected["dim_coords_and_dims"][1][0] x.points = np.linspace(293000, 299000.01, 4) self.assertEqual(metadata, expected) def test_incompatible_grid_extent(self): section = self.section_3() - section['X2'] += 100 + section["X2"] += 100 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, - 'grid'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, "grid"): grid_definition_template_12(section, metadata) def test_scale_workaround(self): section = self.section_3() - section['scaleFactorAtReferencePoint'] = 1065346526 + section["scaleFactorAtReferencePoint"] = 1065346526 metadata = empty_metadata() grid_definition_template_12(section, metadata) expected = self.expected(0, 1) # A float32 can't hold exactly the same value. - cs = expected['dim_coords_and_dims'][0][0].coord_system + cs = expected["dim_coords_and_dims"][0][0].coord_system cs.scale_factor_at_central_meridian = 0.9996012449264526 self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_140.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_140.py index 95e4328bd..a99010e31 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_140.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_140.py @@ -24,59 +24,60 @@ class Test(tests.IrisGribTest): - def section_3(self): section = { - 'gridDefinitionTemplateNumber': 140, - 'shapeOfTheEarth': 4, - 'scaleFactorOfRadiusOfSphericalEarth': MDI, - 'scaledValueOfRadiusOfSphericalEarth': MDI, - 'scaleFactorOfEarthMajorAxis': MDI, - 'scaledValueOfEarthMajorAxis': MDI, - 'scaleFactorOfEarthMinorAxis': MDI, - 'scaledValueOfEarthMinorAxis': MDI, - 'numberOfPointsAlongXAxis': 2, - 'numberOfPointsAlongYAxis': 2, - 'latitudeOfFirstGridPoint': 53988880, - 'longitudeOfFirstGridPoint': -4027984, - 'standardParallelInMicrodegrees': 54900000, - 'centralLongitudeInMicrodegrees': -2500000, - 'resolutionAndComponentFlags': 0b00110000, - 'xDirectionGridLengthInMillimetres': 2000000, - 'yDirectionGridLengthInMillimetres': 2000000, - 'scanningMode': 0b01000000, + "gridDefinitionTemplateNumber": 140, + "shapeOfTheEarth": 4, + "scaleFactorOfRadiusOfSphericalEarth": MDI, + "scaledValueOfRadiusOfSphericalEarth": MDI, + "scaleFactorOfEarthMajorAxis": MDI, + "scaledValueOfEarthMajorAxis": MDI, + "scaleFactorOfEarthMinorAxis": MDI, + "scaledValueOfEarthMinorAxis": MDI, + "numberOfPointsAlongXAxis": 2, + "numberOfPointsAlongYAxis": 2, + "latitudeOfFirstGridPoint": 53988880, + "longitudeOfFirstGridPoint": -4027984, + "standardParallelInMicrodegrees": 54900000, + "centralLongitudeInMicrodegrees": -2500000, + "resolutionAndComponentFlags": 0b00110000, + "xDirectionGridLengthInMillimetres": 2000000, + "yDirectionGridLengthInMillimetres": 2000000, + "scanningMode": 0b01000000, } return section def expected(self, y_dim, x_dim): # Prepare the expectation. expected = empty_metadata() - ellipsoid = iris.coord_systems.GeogCS( - 6378137, - inverse_flattening=298.257222101) + ellipsoid = iris.coord_systems.GeogCS(6378137, inverse_flattening=298.257222101) cs = iris.coord_systems.LambertAzimuthalEqualArea( latitude_of_projection_origin=54.9, longitude_of_projection_origin=-2.5, false_easting=0, false_northing=0, - ellipsoid=ellipsoid) + ellipsoid=ellipsoid, + ) lon0 = -4027984 * 1e-6 lat0 = 53988880 * 1e-6 - x0m, y0m = cs.as_cartopy_crs().transform_point( - lon0, lat0, ccrs.Geodetic()) - dxm = dym = 2000. + x0m, y0m = cs.as_cartopy_crs().transform_point(lon0, lat0, ccrs.Geodetic()) + dxm = dym = 2000.0 x_points = x0m + dxm * np.arange(2) y_points = y0m + dym * np.arange(2) - x = iris.coords.DimCoord(x_points, - standard_name='projection_x_coordinate', - units='m', - coord_system=cs) - y = iris.coords.DimCoord(y_points, - standard_name='projection_y_coordinate', - units='m', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + x = iris.coords.DimCoord( + x_points, + standard_name="projection_x_coordinate", + units="m", + coord_system=cs, + ) + y = iris.coords.DimCoord( + y_points, + standard_name="projection_y_coordinate", + units="m", + coord_system=cs, + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -87,5 +88,5 @@ def test(self): self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_20.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_20.py index 3eb17c25b..949ec0452 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_20.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_20.py @@ -24,28 +24,27 @@ class Test(tests.IrisGribTest): - def section_3(self): section = { - 'gridDefinitionTemplateNumber': 20, - 'shapeOfTheEarth': 0, - 'scaleFactorOfRadiusOfSphericalEarth': 0, - 'scaledValueOfRadiusOfSphericalEarth': 6367470, - 'scaleFactorOfEarthMajorAxis': 0, - 'scaledValueOfEarthMajorAxis': MDI, - 'scaleFactorOfEarthMinorAxis': 0, - 'scaledValueOfEarthMinorAxis': MDI, - 'Nx': 15, - 'Ny': 10, - 'latitudeOfFirstGridPoint': 32549114, - 'longitudeOfFirstGridPoint': 225385728, - 'resolutionAndComponentFlags': 0b00001000, - 'LaD': 60000000, - 'orientationOfTheGrid': 262000000, - 'Dx': 320000000, - 'Dy': 320000000, - 'projectionCentreFlag': 0b00000000, - 'scanningMode': 0b01000000, + "gridDefinitionTemplateNumber": 20, + "shapeOfTheEarth": 0, + "scaleFactorOfRadiusOfSphericalEarth": 0, + "scaledValueOfRadiusOfSphericalEarth": 6367470, + "scaleFactorOfEarthMajorAxis": 0, + "scaledValueOfEarthMajorAxis": MDI, + "scaleFactorOfEarthMinorAxis": 0, + "scaledValueOfEarthMinorAxis": MDI, + "Nx": 15, + "Ny": 10, + "latitudeOfFirstGridPoint": 32549114, + "longitudeOfFirstGridPoint": 225385728, + "resolutionAndComponentFlags": 0b00001000, + "LaD": 60000000, + "orientationOfTheGrid": 262000000, + "Dx": 320000000, + "Dy": 320000000, + "projectionCentreFlag": 0b00000000, + "scanningMode": 0b01000000, } return section @@ -53,30 +52,35 @@ def expected(self, y_dim, x_dim): # Prepare the expectation. expected = empty_metadata() ellipsoid = iris.coord_systems.GeogCS(6367470) - cs = iris.coord_systems.Stereographic(central_lat=90., - central_lon=262., - false_easting=0, - false_northing=0, - true_scale_lat=60., - ellipsoid=ellipsoid) + cs = iris.coord_systems.Stereographic( + central_lat=90.0, + central_lon=262.0, + false_easting=0, + false_northing=0, + true_scale_lat=60.0, + ellipsoid=ellipsoid, + ) lon0 = 225385728 * 1e-6 lat0 = 32549114 * 1e-6 - x0m, y0m = cs.as_cartopy_crs().transform_point( - lon0, lat0, ccrs.Geodetic()) - dxm = dym = 320000. + x0m, y0m = cs.as_cartopy_crs().transform_point(lon0, lat0, ccrs.Geodetic()) + dxm = dym = 320000.0 x_points = x0m + dxm * np.arange(15) y_points = y0m + dym * np.arange(10) - x = iris.coords.DimCoord(x_points, - standard_name='projection_x_coordinate', - units='m', - coord_system=cs, - circular=False) - y = iris.coords.DimCoord(y_points, - standard_name='projection_y_coordinate', - units='m', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + x = iris.coords.DimCoord( + x_points, + standard_name="projection_x_coordinate", + units="m", + coord_system=cs, + circular=False, + ) + y = iris.coords.DimCoord( + y_points, + standard_name="projection_y_coordinate", + units="m", + coord_system=cs, + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -87,5 +91,5 @@ def test(self): self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_30.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_30.py index a52a33e24..635bf1735 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_30.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_30.py @@ -24,30 +24,29 @@ class Test(tests.IrisGribTest): - def section_3(self): section = { - 'gridDefinitionTemplateNumber': 30, - 'shapeOfTheEarth': 0, - 'scaleFactorOfRadiusOfSphericalEarth': 0, - 'scaledValueOfRadiusOfSphericalEarth': 6367470, - 'scaleFactorOfEarthMajorAxis': 0, - 'scaledValueOfEarthMajorAxis': MDI, - 'scaleFactorOfEarthMinorAxis': 0, - 'scaledValueOfEarthMinorAxis': MDI, - 'Nx': 15, - 'Ny': 10, - 'longitudeOfFirstGridPoint': 239550000, - 'latitudeOfFirstGridPoint': 21641000, - 'resolutionAndComponentFlags': 0b00001000, - 'LaD': 60000000, - 'LoV': 262000000, - 'Dx': 320000000, - 'Dy': 320000000, - 'projectionCentreFlag': 0b00000000, - 'scanningMode': 0b01000000, - 'Latin1': 60000000, - 'Latin2': 30000000, + "gridDefinitionTemplateNumber": 30, + "shapeOfTheEarth": 0, + "scaleFactorOfRadiusOfSphericalEarth": 0, + "scaledValueOfRadiusOfSphericalEarth": 6367470, + "scaleFactorOfEarthMajorAxis": 0, + "scaledValueOfEarthMajorAxis": MDI, + "scaleFactorOfEarthMinorAxis": 0, + "scaledValueOfEarthMinorAxis": MDI, + "Nx": 15, + "Ny": 10, + "longitudeOfFirstGridPoint": 239550000, + "latitudeOfFirstGridPoint": 21641000, + "resolutionAndComponentFlags": 0b00001000, + "LaD": 60000000, + "LoV": 262000000, + "Dx": 320000000, + "Dy": 320000000, + "projectionCentreFlag": 0b00000000, + "scanningMode": 0b01000000, + "Latin1": 60000000, + "Latin2": 30000000, } return section @@ -56,30 +55,34 @@ def expected(self, y_dim, x_dim): expected = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) cs = iris.coord_systems.LambertConformal( - central_lat=60., - central_lon=262., + central_lat=60.0, + central_lon=262.0, false_easting=0, false_northing=0, - secant_latitudes=(60., 30.), - ellipsoid=iris.coord_systems.GeogCS(6367470)) + secant_latitudes=(60.0, 30.0), + ellipsoid=iris.coord_systems.GeogCS(6367470), + ) lon0 = 239.55 lat0 = 21.641 - x0m, y0m = cs.as_cartopy_crs().transform_point( - lon0, lat0, ccrs.Geodetic()) - dxm = dym = 320000. + x0m, y0m = cs.as_cartopy_crs().transform_point(lon0, lat0, ccrs.Geodetic()) + dxm = dym = 320000.0 x_points = x0m + dxm * np.arange(15) y_points = y0m + dym * np.arange(10) - x = iris.coords.DimCoord(x_points, - standard_name='projection_x_coordinate', - units='m', - coord_system=cs, - circular=False) - y = iris.coords.DimCoord(y_points, - standard_name='projection_y_coordinate', - units='m', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + x = iris.coords.DimCoord( + x_points, + standard_name="projection_x_coordinate", + units="m", + coord_system=cs, + circular=False, + ) + y = iris.coords.DimCoord( + y_points, + standard_name="projection_y_coordinate", + units="m", + coord_system=cs, + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -90,5 +93,5 @@ def test(self): self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_40.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_40.py index a05d41817..0b4cca011 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_40.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_40.py @@ -29,28 +29,37 @@ def get_computed_key(self, key): class Test_regular(tests.IrisGribTest): - def section_3(self): - section = _Section({ - 'shapeOfTheEarth': 0, - 'scaleFactorOfRadiusOfSphericalEarth': 0, - 'scaledValueOfRadiusOfSphericalEarth': 6367470, - 'scaleFactorOfEarthMajorAxis': 0, - 'scaledValueOfEarthMajorAxis': MDI, - 'scaleFactorOfEarthMinorAxis': 0, - 'scaledValueOfEarthMinorAxis': MDI, - 'iDirectionIncrement': 22500000, - 'longitudeOfFirstGridPoint': 0, - 'resolutionAndComponentFlags': 32, - 'Ni': 16, - 'scanningMode': 0b01000000, - 'distinctLatitudes': np.array([-73.79921363, -52.81294319, - -31.70409175, -10.56988231, - 10.56988231, 31.70409175, - 52.81294319, 73.79921363]), - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 0, - }) + section = _Section( + { + "shapeOfTheEarth": 0, + "scaleFactorOfRadiusOfSphericalEarth": 0, + "scaledValueOfRadiusOfSphericalEarth": 6367470, + "scaleFactorOfEarthMajorAxis": 0, + "scaledValueOfEarthMajorAxis": MDI, + "scaleFactorOfEarthMinorAxis": 0, + "scaledValueOfEarthMinorAxis": MDI, + "iDirectionIncrement": 22500000, + "longitudeOfFirstGridPoint": 0, + "resolutionAndComponentFlags": 32, + "Ni": 16, + "scanningMode": 0b01000000, + "distinctLatitudes": np.array( + [ + -73.79921363, + -52.81294319, + -31.70409175, + -10.56988231, + 10.56988231, + 31.70409175, + 52.81294319, + 73.79921363, + ] + ), + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 0, + } + ) return section def expected(self, y_dim, x_dim, y_neg=True): @@ -60,23 +69,32 @@ def expected(self, y_dim, x_dim, y_neg=True): nx = 16 dx = 22.5 x_origin = 0 - x = iris.coords.DimCoord(np.arange(nx) * dx + x_origin, - standard_name='longitude', - units='degrees_east', - coord_system=cs, - circular=True) - y_points = np.array([73.79921363, 52.81294319, - 31.70409175, 10.56988231, - -10.56988231, -31.70409175, - -52.81294319, -73.79921363]) + x = iris.coords.DimCoord( + np.arange(nx) * dx + x_origin, + standard_name="longitude", + units="degrees_east", + coord_system=cs, + circular=True, + ) + y_points = np.array( + [ + 73.79921363, + 52.81294319, + 31.70409175, + 10.56988231, + -10.56988231, + -31.70409175, + -52.81294319, + -73.79921363, + ] + ) if not y_neg: y_points = y_points[::-1] - y = iris.coords.DimCoord(y_points, - standard_name='latitude', - units='degrees_north', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + y = iris.coords.DimCoord( + y_points, standard_name="latitude", units="degrees_north", coord_system=cs + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def test(self): @@ -88,7 +106,7 @@ def test(self): def test_transposed(self): section = self.section_3() - section['scanningMode'] = 0b01100000 + section["scanningMode"] = 0b01100000 metadata = empty_metadata() grid_definition_template_40(section, metadata) expected = self.expected(1, 0, y_neg=False) @@ -96,7 +114,7 @@ def test_transposed(self): def test_reverse_latitude(self): section = self.section_3() - section['scanningMode'] = 0b00000000 + section["scanningMode"] = 0b00000000 metadata = empty_metadata() grid_definition_template_40(section, metadata) expected = self.expected(0, 1, y_neg=True) @@ -104,51 +122,68 @@ def test_reverse_latitude(self): class Test_reduced(tests.IrisGribTest): - def section_3(self): - section = _Section({ - 'shapeOfTheEarth': 0, - 'scaleFactorOfRadiusOfSphericalEarth': 0, - 'scaledValueOfRadiusOfSphericalEarth': 6367470, - 'scaleFactorOfEarthMajorAxis': 0, - 'scaledValueOfEarthMajorAxis': MDI, - 'scaleFactorOfEarthMinorAxis': 0, - 'scaledValueOfEarthMinorAxis': MDI, - 'longitudes': np.array([0., 180., - 0., 120., 240., - 0., 120., 240., - 0., 180.]), - 'latitudes': np.array([-59.44440829, -59.44440829, - -19.87571915, -19.87571915, -19.87571915, - 19.87571915, 19.87571915, 19.87571915, - 59.44440829, 59.44440829]), - 'numberOfOctectsForNumberOfPoints': 1, - 'interpretationOfNumberOfPoints': 1, - }) + section = _Section( + { + "shapeOfTheEarth": 0, + "scaleFactorOfRadiusOfSphericalEarth": 0, + "scaledValueOfRadiusOfSphericalEarth": 6367470, + "scaleFactorOfEarthMajorAxis": 0, + "scaledValueOfEarthMajorAxis": MDI, + "scaleFactorOfEarthMinorAxis": 0, + "scaledValueOfEarthMinorAxis": MDI, + "longitudes": np.array( + [0.0, 180.0, 0.0, 120.0, 240.0, 0.0, 120.0, 240.0, 0.0, 180.0] + ), + "latitudes": np.array( + [ + -59.44440829, + -59.44440829, + -19.87571915, + -19.87571915, + -19.87571915, + 19.87571915, + 19.87571915, + 19.87571915, + 59.44440829, + 59.44440829, + ] + ), + "numberOfOctectsForNumberOfPoints": 1, + "interpretationOfNumberOfPoints": 1, + } + ) return section def expected(self): # Prepare the expectation. expected = empty_metadata() cs = iris.coord_systems.GeogCS(6367470) - x_points = np.array([0., 180., - 0., 120., 240., - 0., 120., 240., - 0., 180.]) - y_points = np.array([-59.44440829, -59.44440829, - -19.87571915, -19.87571915, -19.87571915, - 19.87571915, 19.87571915, 19.87571915, - 59.44440829, 59.44440829]) - x = iris.coords.AuxCoord(x_points, - standard_name='longitude', - units='degrees_east', - coord_system=cs) - y = iris.coords.AuxCoord(y_points, - standard_name='latitude', - units='degrees_north', - coord_system=cs) - expected['aux_coords_and_dims'].append((y, 0)) - expected['aux_coords_and_dims'].append((x, 0)) + x_points = np.array( + [0.0, 180.0, 0.0, 120.0, 240.0, 0.0, 120.0, 240.0, 0.0, 180.0] + ) + y_points = np.array( + [ + -59.44440829, + -59.44440829, + -19.87571915, + -19.87571915, + -19.87571915, + 19.87571915, + 19.87571915, + 19.87571915, + 59.44440829, + 59.44440829, + ] + ) + x = iris.coords.AuxCoord( + x_points, standard_name="longitude", units="degrees_east", coord_system=cs + ) + y = iris.coords.AuxCoord( + y_points, standard_name="latitude", units="degrees_north", coord_system=cs + ) + expected["aux_coords_and_dims"].append((y, 0)) + expected["aux_coords_and_dims"].append((x, 0)) return expected def test(self): @@ -159,5 +194,5 @@ def test(self): self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_4_and_5.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_4_and_5.py index c7114e653..6cfafda6e 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_4_and_5.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_4_and_5.py @@ -19,8 +19,7 @@ from iris.coords import DimCoord import numpy as np -from iris_grib._load_convert import (grid_definition_template_4_and_5, - _MDI as MDI) +from iris_grib._load_convert import grid_definition_template_4_and_5, _MDI as MDI RESOLUTION = 1e6 @@ -28,96 +27,116 @@ class Test(tests.IrisGribTest): def setUp(self): - self.patch('warnings.warn') - self.patch('iris_grib._load_convert._is_circular', return_value=False) - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + self.patch("warnings.warn") + self.patch("iris_grib._load_convert._is_circular", return_value=False) + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } self.cs = mock.sentinel.coord_system self.data = np.arange(10, dtype=np.float64) - def _check(self, section, request_warning, - expect_warning=False, y_dim=0, x_dim=1): - this = 'iris_grib._load_convert.options' + def _check(self, section, request_warning, expect_warning=False, y_dim=0, x_dim=1): + this = "iris_grib._load_convert.options" with mock.patch(this, warn_on_unsupported=request_warning): metadata = deepcopy(self.metadata) # The called being tested. - grid_definition_template_4_and_5(section, metadata, - 'latitude', 'longitude', self.cs) + grid_definition_template_4_and_5( + section, metadata, "latitude", "longitude", self.cs + ) expected = deepcopy(self.metadata) - coord = DimCoord(self.data, - standard_name='latitude', - units='degrees', - coord_system=self.cs) - expected['dim_coords_and_dims'].append((coord, y_dim)) - coord = DimCoord(self.data, - standard_name='longitude', - units='degrees', - coord_system=self.cs) - expected['dim_coords_and_dims'].append((coord, x_dim)) + coord = DimCoord( + self.data, + standard_name="latitude", + units="degrees", + coord_system=self.cs, + ) + expected["dim_coords_and_dims"].append((coord, y_dim)) + coord = DimCoord( + self.data, + standard_name="longitude", + units="degrees", + coord_system=self.cs, + ) + expected["dim_coords_and_dims"].append((coord, x_dim)) self.assertEqual(metadata, expected) if expect_warning: self.assertEqual(len(warnings.warn.mock_calls), 1) args, kwargs = warnings.warn.call_args - self.assertIn('resolution and component flags', args[0]) + self.assertIn("resolution and component flags", args[0]) else: self.assertEqual(len(warnings.warn.mock_calls), 0) def test_resolution_default_0(self): for request_warn in [False, True]: - section = {'basicAngleOfTheInitialProductionDomain': 0, - 'subdivisionsOfBasicAngle': 0, - 'resolutionAndComponentFlags': 0, - 'longitudes': self.data * RESOLUTION, - 'latitudes': self.data * RESOLUTION, - 'scanningMode': 0} + section = { + "basicAngleOfTheInitialProductionDomain": 0, + "subdivisionsOfBasicAngle": 0, + "resolutionAndComponentFlags": 0, + "longitudes": self.data * RESOLUTION, + "latitudes": self.data * RESOLUTION, + "scanningMode": 0, + } self._check(section, request_warn) def test_resolution_default_mdi(self): for request_warn in [False, True]: - section = {'basicAngleOfTheInitialProductionDomain': MDI, - 'subdivisionsOfBasicAngle': MDI, - 'resolutionAndComponentFlags': 0, - 'longitudes': self.data * RESOLUTION, - 'latitudes': self.data * RESOLUTION, - 'scanningMode': 0} + section = { + "basicAngleOfTheInitialProductionDomain": MDI, + "subdivisionsOfBasicAngle": MDI, + "resolutionAndComponentFlags": 0, + "longitudes": self.data * RESOLUTION, + "latitudes": self.data * RESOLUTION, + "scanningMode": 0, + } self._check(section, request_warn) def test_resolution(self): angle = 10 for request_warn in [False, True]: - section = {'basicAngleOfTheInitialProductionDomain': 1, - 'subdivisionsOfBasicAngle': angle, - 'resolutionAndComponentFlags': 0, - 'longitudes': self.data * angle, - 'latitudes': self.data * angle, - 'scanningMode': 0} + section = { + "basicAngleOfTheInitialProductionDomain": 1, + "subdivisionsOfBasicAngle": angle, + "resolutionAndComponentFlags": 0, + "longitudes": self.data * angle, + "latitudes": self.data * angle, + "scanningMode": 0, + } self._check(section, request_warn) def test_uv_resolved_warn(self): angle = 100 for warn in [False, True]: - section = {'basicAngleOfTheInitialProductionDomain': 1, - 'subdivisionsOfBasicAngle': angle, - 'resolutionAndComponentFlags': 0x08, - 'longitudes': self.data * angle, - 'latitudes': self.data * angle, - 'scanningMode': 0} + section = { + "basicAngleOfTheInitialProductionDomain": 1, + "subdivisionsOfBasicAngle": angle, + "resolutionAndComponentFlags": 0x08, + "longitudes": self.data * angle, + "latitudes": self.data * angle, + "scanningMode": 0, + } self._check(section, warn, expect_warning=warn) def test_j_consecutive(self): angle = 1000 for request_warn in [False, True]: - section = {'basicAngleOfTheInitialProductionDomain': 1, - 'subdivisionsOfBasicAngle': angle, - 'resolutionAndComponentFlags': 0, - 'longitudes': self.data * angle, - 'latitudes': self.data * angle, - 'scanningMode': 0x20} + section = { + "basicAngleOfTheInitialProductionDomain": 1, + "subdivisionsOfBasicAngle": angle, + "resolutionAndComponentFlags": 0, + "longitudes": self.data * angle, + "latitudes": self.data * angle, + "scanningMode": 0x20, + } self._check(section, request_warn, y_dim=1, x_dim=0) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_5.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_5.py index 909942949..4662db5e6 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_5.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_5.py @@ -21,64 +21,78 @@ class Test(tests.IrisGribTest): def setUp(self): def func(s, m, y, x, c): - return m['dim_coords_and_dims'].append(item) + return m["dim_coords_and_dims"].append(item) - module = 'iris_grib._load_convert' + module = "iris_grib._load_convert" self.major = mock.sentinel.major self.minor = mock.sentinel.minor self.radius = mock.sentinel.radius - mfunc = '{}.ellipsoid_geometry'.format(module) + mfunc = "{}.ellipsoid_geometry".format(module) return_value = (self.major, self.minor, self.radius) self.patch(mfunc, return_value=return_value) - mfunc = '{}.ellipsoid'.format(module) + mfunc = "{}.ellipsoid".format(module) self.ellipsoid = mock.sentinel.ellipsoid self.patch(mfunc, return_value=self.ellipsoid) - mfunc = '{}.grid_definition_template_4_and_5'.format(module) + mfunc = "{}.grid_definition_template_4_and_5".format(module) self.coord = mock.sentinel.coord self.dim = mock.sentinel.dim item = (self.coord, self.dim) self.patch(mfunc, side_effect=func) - mclass = 'iris.coord_systems.RotatedGeogCS' + mclass = "iris.coord_systems.RotatedGeogCS" self.cs = mock.sentinel.cs self.patch(mclass, return_value=self.cs) - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } def test(self): metadata = deepcopy(self.metadata) angleOfRotation = mock.sentinel.angleOfRotation shapeOfTheEarth = mock.sentinel.shapeOfTheEarth - section = {'latitudeOfSouthernPole': 45000000, - 'longitudeOfSouthernPole': 90000000, - 'angleOfRotation': angleOfRotation, - 'shapeOfTheEarth': shapeOfTheEarth} + section = { + "latitudeOfSouthernPole": 45000000, + "longitudeOfSouthernPole": 90000000, + "angleOfRotation": angleOfRotation, + "shapeOfTheEarth": shapeOfTheEarth, + } # The called being tested. grid_definition_template_5(section, metadata) - from iris_grib._load_convert import \ - ellipsoid_geometry, \ - ellipsoid, \ - grid_definition_template_4_and_5 as gdt_4_5 + from iris_grib._load_convert import ( + ellipsoid_geometry, + ellipsoid, + grid_definition_template_4_and_5 as gdt_4_5, + ) + self.assertEqual(ellipsoid_geometry.call_count, 1) - ellipsoid.assert_called_once_with(shapeOfTheEarth, self.major, - self.minor, self.radius) + ellipsoid.assert_called_once_with( + shapeOfTheEarth, self.major, self.minor, self.radius + ) from iris.coord_systems import RotatedGeogCS - RotatedGeogCS.assert_called_once_with(-45.0, 270.0, angleOfRotation, - self.ellipsoid) - gdt_4_5.assert_called_once_with(section, metadata, 'grid_latitude', - 'grid_longitude', self.cs) + + RotatedGeogCS.assert_called_once_with( + -45.0, 270.0, angleOfRotation, self.ellipsoid + ) + gdt_4_5.assert_called_once_with( + section, metadata, "grid_latitude", "grid_longitude", self.cs + ) expected = deepcopy(self.metadata) - expected['dim_coords_and_dims'].append((self.coord, self.dim)) + expected["dim_coords_and_dims"].append((self.coord, self.dim)) self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_grid_definition_template_90.py b/iris_grib/tests/unit/load_convert/test_grid_definition_template_90.py index cdbf0fd9f..202132d2c 100644 --- a/iris_grib/tests/unit/load_convert/test_grid_definition_template_90.py +++ b/iris_grib/tests/unit/load_convert/test_grid_definition_template_90.py @@ -27,27 +27,27 @@ class Test(tests.IrisGribTest): def uk(self): section = { - 'shapeOfTheEarth': 3, - 'scaleFactorOfRadiusOfSphericalEarth': MDI, - 'scaledValueOfRadiusOfSphericalEarth': MDI, - 'scaleFactorOfEarthMajorAxis': 4, - 'scaledValueOfEarthMajorAxis': 63781688, - 'scaleFactorOfEarthMinorAxis': 4, - 'scaledValueOfEarthMinorAxis': 63565840, - 'Nx': 390, - 'Ny': 227, - 'latitudeOfSubSatellitePoint': 0, - 'longitudeOfSubSatellitePoint': 0, - 'resolutionAndComponentFlags': 0, - 'dx': 3622, - 'dy': 3610, - 'Xp': 1856000, - 'Yp': 1856000, - 'scanningMode': 192, - 'orientationOfTheGrid': 0, - 'Nr': 6610674, - 'Xo': 1733, - 'Yo': 3320 + "shapeOfTheEarth": 3, + "scaleFactorOfRadiusOfSphericalEarth": MDI, + "scaledValueOfRadiusOfSphericalEarth": MDI, + "scaleFactorOfEarthMajorAxis": 4, + "scaledValueOfEarthMajorAxis": 63781688, + "scaleFactorOfEarthMinorAxis": 4, + "scaledValueOfEarthMinorAxis": 63565840, + "Nx": 390, + "Ny": 227, + "latitudeOfSubSatellitePoint": 0, + "longitudeOfSubSatellitePoint": 0, + "resolutionAndComponentFlags": 0, + "dx": 3622, + "dy": 3610, + "Xp": 1856000, + "Yp": 1856000, + "scanningMode": 192, + "orientationOfTheGrid": 0, + "Nr": 6610674, + "Xo": 1733, + "Yo": 3320, } return section @@ -63,57 +63,74 @@ def expected_uk(self, y_dim, x_dim): latitude_of_projection_origin=lat, longitude_of_projection_origin=lon, perspective_point_height=height, - sweep_angle_axis='y', + sweep_angle_axis="y", false_easting=easting, false_northing=northing, - ellipsoid=ellipsoid) + ellipsoid=ellipsoid, + ) nx = 390 x_origin = 0.010313624253429191 dx = -8.38506036864162e-05 - x = iris.coords.DimCoord(np.arange(nx) * dx + x_origin, - 'projection_x_coordinate', units='radians', - coord_system=cs) + x = iris.coords.DimCoord( + np.arange(nx) * dx + x_origin, + "projection_x_coordinate", + units="radians", + coord_system=cs, + ) ny = 227 y_origin = 0.12275487535118533 dy = 8.384895857321404e-05 - y = iris.coords.DimCoord(np.arange(ny) * dy + y_origin, - 'projection_y_coordinate', units='radians', - coord_system=cs) - expected['dim_coords_and_dims'].append((y, y_dim)) - expected['dim_coords_and_dims'].append((x, x_dim)) + y = iris.coords.DimCoord( + np.arange(ny) * dy + y_origin, + "projection_y_coordinate", + units="radians", + coord_system=cs, + ) + expected["dim_coords_and_dims"].append((y, y_dim)) + expected["dim_coords_and_dims"].append((x, x_dim)) return expected def compare(self, metadata, expected): # Compare the result with the expectation. - self.assertEqual(len(metadata['dim_coords_and_dims']), - len(expected['dim_coords_and_dims'])) - for result_pair, expected_pair in zip(metadata['dim_coords_and_dims'], - expected['dim_coords_and_dims']): + self.assertEqual( + len(metadata["dim_coords_and_dims"]), len(expected["dim_coords_and_dims"]) + ) + for result_pair, expected_pair in zip( + metadata["dim_coords_and_dims"], expected["dim_coords_and_dims"] + ): result_coord, result_dims = result_pair expected_coord, expected_dims = expected_pair # Take copies for safety, as we are going to modify them. - result_coord, expected_coord = [co.copy() for co in - (result_coord, expected_coord)] + result_coord, expected_coord = [ + co.copy() for co in (result_coord, expected_coord) + ] # Ensure the dims match. self.assertEqual(result_dims, expected_dims) # Ensure the coordinate systems match (allowing for precision). result_cs = result_coord.coord_system expected_cs = expected_coord.coord_system self.assertEqual(type(result_cs), type(expected_cs)) - self.assertEqual(result_cs.latitude_of_projection_origin, - expected_cs.latitude_of_projection_origin) - self.assertEqual(result_cs.longitude_of_projection_origin, - expected_cs.longitude_of_projection_origin) - self.assertAlmostEqual(result_cs.perspective_point_height, - expected_cs.perspective_point_height) - self.assertEqual(result_cs.false_easting, - expected_cs.false_easting) - self.assertEqual(result_cs.false_northing, - expected_cs.false_northing) - self.assertAlmostEqual(result_cs.ellipsoid.semi_major_axis, - expected_cs.ellipsoid.semi_major_axis) - self.assertEqual(result_cs.ellipsoid.semi_minor_axis, - expected_cs.ellipsoid.semi_minor_axis) + self.assertEqual( + result_cs.latitude_of_projection_origin, + expected_cs.latitude_of_projection_origin, + ) + self.assertEqual( + result_cs.longitude_of_projection_origin, + expected_cs.longitude_of_projection_origin, + ) + self.assertAlmostEqual( + result_cs.perspective_point_height, expected_cs.perspective_point_height + ) + self.assertEqual(result_cs.false_easting, expected_cs.false_easting) + self.assertEqual(result_cs.false_northing, expected_cs.false_northing) + self.assertAlmostEqual( + result_cs.ellipsoid.semi_major_axis, + expected_cs.ellipsoid.semi_major_axis, + ) + self.assertEqual( + result_cs.ellipsoid.semi_minor_axis, + expected_cs.ellipsoid.semi_minor_axis, + ) # Now we can ignore the coordinate systems and compare the # rest of the coordinate attributes. result_coord.coord_system = None @@ -121,12 +138,10 @@ def compare(self, metadata, expected): # Likewise, first compare the points (and optional bounds) # *approximately*, then force those equal + compare other aspects. - self.assertArrayAlmostEqual(result_coord.points, - expected_coord.points) + self.assertArrayAlmostEqual(result_coord.points, expected_coord.points) result_coord.points = expected_coord.points if result_coord.has_bounds() and expected_coord.has_bounds(): - self.assertArrayAlmostEqual(result_coord.bounds, - expected_coord.bounds) + self.assertArrayAlmostEqual(result_coord.bounds, expected_coord.bounds) result_coord.bounds = expected_coord.bounds # Compare the coords, having equalised the array values. @@ -134,7 +149,7 @@ def compare(self, metadata, expected): # Ensure no other metadata was created. for name in expected.keys(): - if name == 'dim_coords_and_dims': + if name == "dim_coords_and_dims": continue self.assertEqual(metadata[name], expected[name]) @@ -147,7 +162,7 @@ def test_uk(self): def test_uk_transposed(self): section = self.uk() - section['scanningMode'] = 0b11100000 + section["scanningMode"] = 0b11100000 metadata = empty_metadata() grid_definition_template_90(section, metadata) expected = self.expected_uk(1, 0) @@ -155,50 +170,48 @@ def test_uk_transposed(self): def test_non_zero_latitude(self): section = self.uk() - section['latitudeOfSubSatellitePoint'] = 1 + section["latitudeOfSubSatellitePoint"] = 1 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, - 'non-zero latitude'): + with self.assertRaisesRegex( + iris.exceptions.TranslationError, "non-zero latitude" + ): grid_definition_template_90(section, metadata) def test_rotated_meridian(self): section = self.uk() - section['orientationOfTheGrid'] = 1 + section["orientationOfTheGrid"] = 1 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, - 'orientation'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, "orientation"): grid_definition_template_90(section, metadata) def test_zero_height(self): section = self.uk() - section['Nr'] = 0 + section["Nr"] = 0 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, - 'zero'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, "zero"): grid_definition_template_90(section, metadata) def test_orthographic(self): section = self.uk() - section['Nr'] = MDI + section["Nr"] = MDI metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, - 'orthographic'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, "orthographic"): grid_definition_template_90(section, metadata) def test_scanning_mode_positive_x(self): section = self.uk() - section['scanningMode'] = 0b01000000 + section["scanningMode"] = 0b01000000 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, r'\+x'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, r"\+x"): grid_definition_template_90(section, metadata) def test_scanning_mode_negative_y(self): section = self.uk() - section['scanningMode'] = 0b10000000 + section["scanningMode"] = 0b10000000 metadata = empty_metadata() - with self.assertRaisesRegex(iris.exceptions.TranslationError, '-y'): + with self.assertRaisesRegex(iris.exceptions.TranslationError, "-y"): grid_definition_template_90(section, metadata) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_other_time_coord.py b/iris_grib/tests/unit/load_convert/test_other_time_coord.py index ecac767ca..18226c819 100644 --- a/iris_grib/tests/unit/load_convert/test_other_time_coord.py +++ b/iris_grib/tests/unit/load_convert/test_other_time_coord.py @@ -18,89 +18,93 @@ class TestValid(tests.IrisGribTest): def test_t(self): - rt = iris.coords.DimCoord(48, 'time', units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') + rt = iris.coords.DimCoord(48, "time", units="hours since epoch") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") result = other_time_coord(rt, fp) - expected = iris.coords.DimCoord(42, 'forecast_reference_time', - units='hours since epoch') + expected = iris.coords.DimCoord( + 42, "forecast_reference_time", units="hours since epoch" + ) self.assertEqual(result, expected) def test_frt(self): - rt = iris.coords.DimCoord(48, 'forecast_reference_time', - units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') + rt = iris.coords.DimCoord( + 48, "forecast_reference_time", units="hours since epoch" + ) + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") result = other_time_coord(rt, fp) - expected = iris.coords.DimCoord(54, 'time', units='hours since epoch') + expected = iris.coords.DimCoord(54, "time", units="hours since epoch") self.assertEqual(result, expected) class TestInvalid(tests.IrisGribTest): def test_t_with_bounds(self): - rt = iris.coords.DimCoord(48, 'time', units='hours since epoch', - bounds=[36, 60]) - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'bounds'): + rt = iris.coords.DimCoord( + 48, "time", units="hours since epoch", bounds=[36, 60] + ) + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "bounds"): other_time_coord(rt, fp) def test_frt_with_bounds(self): - rt = iris.coords.DimCoord(48, 'forecast_reference_time', - units='hours since epoch', - bounds=[42, 54]) - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'bounds'): + rt = iris.coords.DimCoord( + 48, "forecast_reference_time", units="hours since epoch", bounds=[42, 54] + ) + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "bounds"): other_time_coord(rt, fp) def test_fp_with_bounds(self): - rt = iris.coords.DimCoord(48, 'time', units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours', - bounds=[3, 9]) - with self.assertRaisesRegex(ValueError, 'bounds'): + rt = iris.coords.DimCoord(48, "time", units="hours since epoch") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours", bounds=[3, 9]) + with self.assertRaisesRegex(ValueError, "bounds"): other_time_coord(rt, fp) def test_vector_t(self): - rt = iris.coords.DimCoord([0, 3], 'time', units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'Vector'): + rt = iris.coords.DimCoord([0, 3], "time", units="hours since epoch") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "Vector"): other_time_coord(rt, fp) def test_vector_frt(self): - rt = iris.coords.DimCoord([0, 3], 'forecast_reference_time', - units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'Vector'): + rt = iris.coords.DimCoord( + [0, 3], "forecast_reference_time", units="hours since epoch" + ) + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "Vector"): other_time_coord(rt, fp) def test_vector_fp(self): - rt = iris.coords.DimCoord(48, 'time', units='hours since epoch') - fp = iris.coords.DimCoord([6, 12], 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'Vector'): + rt = iris.coords.DimCoord(48, "time", units="hours since epoch") + fp = iris.coords.DimCoord([6, 12], "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "Vector"): other_time_coord(rt, fp) def test_invalid_rt_name(self): - rt = iris.coords.DimCoord(1, 'height') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'reference time'): + rt = iris.coords.DimCoord(1, "height") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "reference time"): other_time_coord(rt, fp) def test_invalid_t_unit(self): - rt = iris.coords.DimCoord(1, 'time', units='Pa') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'unit.*Pa'): + rt = iris.coords.DimCoord(1, "time", units="Pa") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "unit.*Pa"): other_time_coord(rt, fp) def test_invalid_frt_unit(self): - rt = iris.coords.DimCoord(1, 'forecast_reference_time', units='km') - fp = iris.coords.DimCoord(6, 'forecast_period', units='hours') - with self.assertRaisesRegex(ValueError, 'unit.*km'): + rt = iris.coords.DimCoord(1, "forecast_reference_time", units="km") + fp = iris.coords.DimCoord(6, "forecast_period", units="hours") + with self.assertRaisesRegex(ValueError, "unit.*km"): other_time_coord(rt, fp) def test_invalid_fp_unit(self): - rt = iris.coords.DimCoord(48, 'forecast_reference_time', - units='hours since epoch') - fp = iris.coords.DimCoord(6, 'forecast_period', units='kg') - with self.assertRaisesRegex(ValueError, 'unit.*kg'): + rt = iris.coords.DimCoord( + 48, "forecast_reference_time", units="hours since epoch" + ) + fp = iris.coords.DimCoord(6, "forecast_period", units="kg") + with self.assertRaisesRegex(ValueError, "unit.*kg"): other_time_coord(rt, fp) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_section.py b/iris_grib/tests/unit/load_convert/test_product_definition_section.py index 409346f9d..329034932 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_section.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_section.py @@ -18,10 +18,12 @@ from iris_grib._load_convert import product_definition_section from iris_grib.tests.unit.load_convert import empty_metadata -from iris_grib.tests.unit.load_convert.test_product_definition_template_0 \ - import section_4 as pdt_0_section_4 -from iris_grib.tests.unit.load_convert.test_product_definition_template_31 \ - import section_4 as pdt_31_section_4 +from iris_grib.tests.unit.load_convert.test_product_definition_template_0 import ( + section_4 as pdt_0_section_4, +) +from iris_grib.tests.unit.load_convert.test_product_definition_template_31 import ( + section_4 as pdt_31_section_4, +) class TestFixedSurfaces(tests.IrisGribTest): @@ -29,24 +31,26 @@ class TestFixedSurfaces(tests.IrisGribTest): Tests focussing on the handling of fixed surface elements in section 4. Expects/ignores depending on the template number. """ + def setUp(self): - self.patch('warnings.warn') + self.patch("warnings.warn") self.translate_phenomenon_patch = self.patch( - 'iris_grib._load_convert.translate_phenomenon' + "iris_grib._load_convert.translate_phenomenon" ) # Prep placeholder variables for product_definition_section. self.discipline = mock.sentinel.discipline self.tablesVersion = mock.sentinel.tablesVersion - self.rt_coord = DimCoord(24, 'forecast_reference_time', - units='hours since epoch') + self.rt_coord = DimCoord( + 24, "forecast_reference_time", units="hours since epoch" + ) self.metadata = empty_metadata() self.templates = {0: pdt_0_section_4(), 31: pdt_31_section_4()} self.fixed_surface_keys = [ - 'typeOfFirstFixedSurface', - 'scaledValueOfFirstFixedSurface', - 'typeOfSecondFixedSurface' + "typeOfFirstFixedSurface", + "scaledValueOfFirstFixedSurface", + "typeOfSecondFixedSurface", ] def _check_fixed_surface(self, fs_is_expected, fs_is_present): @@ -60,11 +64,13 @@ def _check_fixed_surface(self, fs_is_expected, fs_is_present): # #0 contains fixed surface elements, #31 does not. template_number = 0 if fs_is_expected else 31 section_4 = self.templates[template_number] - section_4.update({ - 'productDefinitionTemplateNumber': template_number, - 'parameterCategory': None, - 'parameterNumber': None - }) + section_4.update( + { + "productDefinitionTemplateNumber": template_number, + "parameterCategory": None, + "parameterNumber": None, + } + ) for key in self.fixed_surface_keys: # Force the presence or absence of the fixed surface elements even @@ -77,12 +83,16 @@ def _check_fixed_surface(self, fs_is_expected, fs_is_present): def run_function(): # For reuse in every type of test below. product_definition_section( - section_4, self.metadata, self.discipline, self.tablesVersion, - self.rt_coord) + section_4, + self.metadata, + self.discipline, + self.tablesVersion, + self.rt_coord, + ) if fs_is_expected and not fs_is_present: # Should error since the expected keys are missing. - error_message = 'FixedSurface' + error_message = "FixedSurface" with self.assertRaisesRegex(KeyError, error_message): run_function() else: @@ -93,8 +103,9 @@ def run_function(): # arguments. So should always have run. previous_call_count = self.translate_phenomenon_patch.call_count run_function() - self.assertEqual(self.translate_phenomenon_patch.call_count, - previous_call_count + 1) + self.assertEqual( + self.translate_phenomenon_patch.call_count, previous_call_count + 1 + ) phenom_call_args = self.translate_phenomenon_patch.call_args[1] for key in self.fixed_surface_keys: # Check whether None or actual values have been passed for @@ -119,5 +130,5 @@ def test_all_combinations(self): self._check_fixed_surface(*pair) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_0.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_0.py index 42593b546..0fa8a04ad 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_0.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_0.py @@ -25,71 +25,74 @@ def section_4(): - return {'hoursAfterDataCutoff': MDI, - 'minutesAfterDataCutoff': MDI, - 'indicatorOfUnitOfTimeRange': 0, # minutes - 'forecastTime': 360, - 'NV': 0, - 'typeOfFirstFixedSurface': 103, - 'scaleFactorOfFirstFixedSurface': 0, - 'scaledValueOfFirstFixedSurface': 9999, - 'typeOfSecondFixedSurface': 255} + return { + "hoursAfterDataCutoff": MDI, + "minutesAfterDataCutoff": MDI, + "indicatorOfUnitOfTimeRange": 0, # minutes + "forecastTime": 360, + "NV": 0, + "typeOfFirstFixedSurface": 103, + "scaleFactorOfFirstFixedSurface": 0, + "scaledValueOfFirstFixedSurface": 9999, + "typeOfSecondFixedSurface": 255, + } class Test(LoadConvertTest): def test_given_frt(self): metadata = empty_metadata() - rt_coord = iris.coords.DimCoord(24, 'forecast_reference_time', - units='hours since epoch') + rt_coord = iris.coords.DimCoord( + 24, "forecast_reference_time", units="hours since epoch" + ) product_definition_template_0(section_4(), metadata, rt_coord) expected = empty_metadata() - aux = expected['aux_coords_and_dims'] - aux.append((iris.coords.DimCoord(6, 'forecast_period', units='hours'), - None)) - aux.append(( - iris.coords.DimCoord(30, 'time', units='hours since epoch'), None)) + aux = expected["aux_coords_and_dims"] + aux.append((iris.coords.DimCoord(6, "forecast_period", units="hours"), None)) + aux.append((iris.coords.DimCoord(30, "time", units="hours since epoch"), None)) aux.append((rt_coord, None)) - aux.append((iris.coords.DimCoord(9999, long_name='height', units='m'), - None)) + aux.append((iris.coords.DimCoord(9999, long_name="height", units="m"), None)) self.assertMetadataEqual(metadata, expected) def test_given_t(self): metadata = empty_metadata() - rt_coord = iris.coords.DimCoord(24, 'time', - units='hours since epoch') + rt_coord = iris.coords.DimCoord(24, "time", units="hours since epoch") product_definition_template_0(section_4(), metadata, rt_coord) expected = empty_metadata() - aux = expected['aux_coords_and_dims'] - aux.append((iris.coords.DimCoord(6, 'forecast_period', units='hours'), - None)) - aux.append(( - iris.coords.DimCoord(18, 'forecast_reference_time', - units='hours since epoch'), None)) + aux = expected["aux_coords_and_dims"] + aux.append((iris.coords.DimCoord(6, "forecast_period", units="hours"), None)) + aux.append( + ( + iris.coords.DimCoord( + 18, "forecast_reference_time", units="hours since epoch" + ), + None, + ) + ) aux.append((rt_coord, None)) - aux.append((iris.coords.DimCoord(9999, long_name='height', units='m'), - None)) + aux.append((iris.coords.DimCoord(9999, long_name="height", units="m"), None)) self.assertMetadataEqual(metadata, expected) def test_generating_process_warnings(self): metadata = empty_metadata() - rt_coord = iris.coords.DimCoord(24, 'forecast_reference_time', - units='hours since epoch') + rt_coord = iris.coords.DimCoord( + 24, "forecast_reference_time", units="hours since epoch" + ) convert_options = iris_grib._load_convert.options emit_warnings = convert_options.warn_on_unsupported try: convert_options.warn_on_unsupported = True - with mock.patch('warnings.warn') as warn: + with mock.patch("warnings.warn") as warn: product_definition_template_0(section_4(), metadata, rt_coord) warn_msgs = [call[1][0] for call in warn.mock_calls] - expected = ['Unable to translate type of generating process.', - 'Unable to translate background generating process ' - 'identifier.', - 'Unable to translate forecast generating process ' - 'identifier.'] + expected = [ + "Unable to translate type of generating process.", + "Unable to translate background generating process " "identifier.", + "Unable to translate forecast generating process " "identifier.", + ] self.assertEqual(warn_msgs, expected) finally: convert_options.warn_on_unsupported = emit_warnings -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_1.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_1.py index 2c607ba13..47bde0eee 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_1.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_1.py @@ -24,39 +24,44 @@ class Test(tests.IrisGribTest): def setUp(self): def func(s, m, f): - return m['cell_methods'].append(self.cell_method) + return m["cell_methods"].append(self.cell_method) - module = 'iris_grib._load_convert' - self.patch('warnings.warn') - this = '{}.product_definition_template_0'.format(module) + module = "iris_grib._load_convert" + self.patch("warnings.warn") + this = "{}.product_definition_template_0".format(module) self.cell_method = mock.sentinel.cell_method self.patch(this, side_effect=func) - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } def _check(self, request_warning): - this = 'iris_grib._load_convert.options' + this = "iris_grib._load_convert.options" with mock.patch(this, warn_on_unsupported=request_warning): metadata = deepcopy(self.metadata) perturbationNumber = 666 - section = {'perturbationNumber': perturbationNumber} + section = {"perturbationNumber": perturbationNumber} forecast_reference_time = mock.sentinel.forecast_reference_time # The called being tested. - product_definition_template_1(section, metadata, - forecast_reference_time) + product_definition_template_1(section, metadata, forecast_reference_time) expected = deepcopy(self.metadata) - expected['cell_methods'].append(self.cell_method) - realization = DimCoord(perturbationNumber, - standard_name='realization', - units='no_unit') - expected['aux_coords_and_dims'].append((realization, None)) + expected["cell_methods"].append(self.cell_method) + realization = DimCoord( + perturbationNumber, standard_name="realization", units="no_unit" + ) + expected["aux_coords_and_dims"].append((realization, None)) self.assertEqual(metadata, expected) if request_warning: warn_msgs = [mcall[1][0] for mcall in warnings.warn.mock_calls] - expected_msgs = ['type of ensemble', 'number of forecasts'] + expected_msgs = ["type of ensemble", "number of forecasts"] for emsg in expected_msgs: matches = [wmsg for wmsg in warn_msgs if emsg in wmsg] self.assertEqual(len(matches), 1) @@ -71,5 +76,5 @@ def test_pdt_warn(self): self._check(True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_10.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_10.py index 741ed85f2..b725a1561 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_10.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_10.py @@ -20,46 +20,49 @@ class Test(tests.IrisGribTest): def setUp(self): - module = 'iris_grib._load_convert' + module = "iris_grib._load_convert" self.patch_statistical_fp_coord = self.patch( - module + '.statistical_forecast_period_coord', - return_value=mock.sentinel.dummy_fp_coord) + module + ".statistical_forecast_period_coord", + return_value=mock.sentinel.dummy_fp_coord, + ) self.patch_time_coord = self.patch( - module + '.validity_time_coord', - return_value=mock.sentinel.dummy_time_coord) - self.patch_vertical_coords = self.patch(module + '.vertical_coords') + module + ".validity_time_coord", return_value=mock.sentinel.dummy_time_coord + ) + self.patch_vertical_coords = self.patch(module + ".vertical_coords") def test_percentile_coord(self): metadata = empty_metadata() percentileValue = 75 - section = {'productDefinitionTemplateNumber': 10, - 'percentileValue': percentileValue, - 'hoursAfterDataCutoff': 1, - 'minutesAfterDataCutoff': 1, - 'numberOfTimeRange': 1, - 'typeOfStatisticalProcessing': 1, - 'typeOfTimeIncrement': 2, - 'timeIncrement': 0, - 'yearOfEndOfOverallTimeInterval': 2000, - 'monthOfEndOfOverallTimeInterval': 1, - 'dayOfEndOfOverallTimeInterval': 1, - 'hourOfEndOfOverallTimeInterval': 1, - 'minuteOfEndOfOverallTimeInterval': 0, - 'secondOfEndOfOverallTimeInterval': 1} + section = { + "productDefinitionTemplateNumber": 10, + "percentileValue": percentileValue, + "hoursAfterDataCutoff": 1, + "minutesAfterDataCutoff": 1, + "numberOfTimeRange": 1, + "typeOfStatisticalProcessing": 1, + "typeOfTimeIncrement": 2, + "timeIncrement": 0, + "yearOfEndOfOverallTimeInterval": 2000, + "monthOfEndOfOverallTimeInterval": 1, + "dayOfEndOfOverallTimeInterval": 1, + "hourOfEndOfOverallTimeInterval": 1, + "minuteOfEndOfOverallTimeInterval": 0, + "secondOfEndOfOverallTimeInterval": 1, + } forecast_reference_time = mock.Mock() # The called being tested. - product_definition_template_10(section, metadata, - forecast_reference_time) + product_definition_template_10(section, metadata, forecast_reference_time) - expected = {'aux_coords_and_dims': []} - percentile = DimCoord(percentileValue, - long_name='percentile_over_time', - units='no_unit') - expected['aux_coords_and_dims'].append((percentile, None)) + expected = {"aux_coords_and_dims": []} + percentile = DimCoord( + percentileValue, long_name="percentile_over_time", units="no_unit" + ) + expected["aux_coords_and_dims"].append((percentile, None)) - self.assertEqual(metadata['aux_coords_and_dims'][-1], - expected['aux_coords_and_dims'][0]) + self.assertEqual( + metadata["aux_coords_and_dims"][-1], expected["aux_coords_and_dims"][0] + ) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_11.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_11.py index 5df27d710..891ba4477 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_11.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_11.py @@ -23,65 +23,72 @@ class Test(tests.IrisGribTest): def setUp(self): def func(s, m, f): - return m['cell_methods'].append(self.cell_method) + return m["cell_methods"].append(self.cell_method) - module = 'iris_grib._load_convert' - self.patch('warnings.warn') - this_module = '{}.product_definition_template_11'.format(module) + module = "iris_grib._load_convert" + self.patch("warnings.warn") + this_module = "{}.product_definition_template_11".format(module) self.cell_method = mock.sentinel.cell_method self.patch(this_module, side_effect=func) self.patch_statistical_fp_coord = self.patch( - module + '.statistical_forecast_period_coord', - return_value=mock.sentinel.dummy_fp_coord) + module + ".statistical_forecast_period_coord", + return_value=mock.sentinel.dummy_fp_coord, + ) self.patch_time_coord = self.patch( - module + '.validity_time_coord', - return_value=mock.sentinel.dummy_time_coord) - self.patch_vertical_coords = self.patch(module + '.vertical_coords') - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + module + ".validity_time_coord", return_value=mock.sentinel.dummy_time_coord + ) + self.patch_vertical_coords = self.patch(module + ".vertical_coords") + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } def _check(self, request_warning): - grib_lconv_opt = 'iris_grib._load_convert.options' + grib_lconv_opt = "iris_grib._load_convert.options" with mock.patch(grib_lconv_opt, warn_on_unsupported=request_warning): metadata = deepcopy(self.metadata) perturbationNumber = 666 - section = {'productDefinitionTemplateNumber': 11, - 'perturbationNumber': perturbationNumber, - 'hoursAfterDataCutoff': 1, - 'minutesAfterDataCutoff': 1, - 'numberOfTimeRange': 1, - 'typeOfStatisticalProcessing': 1, - 'typeOfTimeIncrement': 2, - 'timeIncrement': 0, - 'yearOfEndOfOverallTimeInterval': 2000, - 'monthOfEndOfOverallTimeInterval': 1, - 'dayOfEndOfOverallTimeInterval': 1, - 'hourOfEndOfOverallTimeInterval': 1, - 'minuteOfEndOfOverallTimeInterval': 0, - 'secondOfEndOfOverallTimeInterval': 1} + section = { + "productDefinitionTemplateNumber": 11, + "perturbationNumber": perturbationNumber, + "hoursAfterDataCutoff": 1, + "minutesAfterDataCutoff": 1, + "numberOfTimeRange": 1, + "typeOfStatisticalProcessing": 1, + "typeOfTimeIncrement": 2, + "timeIncrement": 0, + "yearOfEndOfOverallTimeInterval": 2000, + "monthOfEndOfOverallTimeInterval": 1, + "dayOfEndOfOverallTimeInterval": 1, + "hourOfEndOfOverallTimeInterval": 1, + "minuteOfEndOfOverallTimeInterval": 0, + "secondOfEndOfOverallTimeInterval": 1, + } forecast_reference_time = mock.Mock() # The called being tested. - product_definition_template_11(section, metadata, - forecast_reference_time) - expected = {'cell_methods': [], 'aux_coords_and_dims': []} - expected['cell_methods'].append(CellMethod(method='sum', - coords=('time',))) - realization = DimCoord(perturbationNumber, - standard_name='realization', - units='no_unit') - expected['aux_coords_and_dims'].append((realization, None)) + product_definition_template_11(section, metadata, forecast_reference_time) + expected = {"cell_methods": [], "aux_coords_and_dims": []} + expected["cell_methods"].append(CellMethod(method="sum", coords=("time",))) + realization = DimCoord( + perturbationNumber, standard_name="realization", units="no_unit" + ) + expected["aux_coords_and_dims"].append((realization, None)) self.maxDiff = None - self.assertEqual(metadata['aux_coords_and_dims'][-1], - expected['aux_coords_and_dims'][0]) - self.assertEqual(metadata['cell_methods'][-1], - expected['cell_methods'][0]) + self.assertEqual( + metadata["aux_coords_and_dims"][-1], expected["aux_coords_and_dims"][0] + ) + self.assertEqual(metadata["cell_methods"][-1], expected["cell_methods"][0]) if request_warning: warn_msgs = [mcall[1][0] for mcall in warnings.warn.mock_calls] - expected_msgs = ['type of ensemble', 'number of forecasts'] + expected_msgs = ["type of ensemble", "number of forecasts"] for emsg in expected_msgs: matches = [wmsg for wmsg in warn_msgs if emsg in wmsg] self.assertEqual(len(matches), 1) @@ -96,5 +103,5 @@ def test_pdt_warn(self): self._check(True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_15.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_15.py index 5ac92f696..8e81457e0 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_15.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_15.py @@ -28,37 +28,36 @@ def section_4_sample(): # Create a dictionary representing a sample section 4 from a grib file. - return {'productDefinitionTemplateNumber': 15, - 'hoursAfterDataCutoff': MDI, - 'minutesAfterDataCutoff': MDI, - 'indicatorOfUnitOfTimeRange': 0, # minutes - 'forecastTime': 360, - 'NV': 0, - 'typeOfFirstFixedSurface': 103, - 'scaleFactorOfFirstFixedSurface': 0, - 'scaledValueOfFirstFixedSurface': 9999, - 'typeOfSecondFixedSurface': 255, - 'statisticalProcess': 2, # method = maximum - 'spatialProcessing': 0, # from source grid, no interpolation - 'numberOfPointsUsed': 0 # no points used because no interpolation - } + return { + "productDefinitionTemplateNumber": 15, + "hoursAfterDataCutoff": MDI, + "minutesAfterDataCutoff": MDI, + "indicatorOfUnitOfTimeRange": 0, # minutes + "forecastTime": 360, + "NV": 0, + "typeOfFirstFixedSurface": 103, + "scaleFactorOfFirstFixedSurface": 0, + "scaledValueOfFirstFixedSurface": 9999, + "typeOfSecondFixedSurface": 255, + "statisticalProcess": 2, # method = maximum + "spatialProcessing": 0, # from source grid, no interpolation + "numberOfPointsUsed": 0, # no points used because no interpolation + } class Test(LoadConvertTest): def setUp(self): - self.time_coord = DimCoord(24, 'time', units='hours since epoch') - self.forecast_period_coord = DimCoord(6, 'forecast_period', - units='hours') - self.forecast_ref_time_coord = DimCoord(18, 'forecast_reference_time', - units='hours since epoch') - self.height_coord = iris.coords.DimCoord(9999, long_name='height', - units='m') + self.time_coord = DimCoord(24, "time", units="hours since epoch") + self.forecast_period_coord = DimCoord(6, "forecast_period", units="hours") + self.forecast_ref_time_coord = DimCoord( + 18, "forecast_reference_time", units="hours since epoch" + ) + self.height_coord = iris.coords.DimCoord(9999, long_name="height", units="m") def _translate(self, section): # Use pdt 4.15 to populate a metadata dict from the section 4 keys metadata = empty_metadata() - product_definition_template_15(section, metadata, - self.time_coord) + product_definition_template_15(section, metadata, self.time_coord) return metadata def test_translation(self): @@ -68,16 +67,14 @@ def test_translation(self): # Generate a fresh metadata dict and manually populate it with metadata # that we expect will be generated from our sample section. expected = empty_metadata() - aux = expected['aux_coords_and_dims'] + aux = expected["aux_coords_and_dims"] aux.append((self.forecast_period_coord, None)) aux.append((self.forecast_ref_time_coord, None)) aux.append((self.time_coord, None)) aux.append((self.height_coord, None)) - expected['cell_methods'] = [CellMethod(coords=('area',), - method='maximum')] - expected['attributes']['spatial_processing_type'] = \ - 'No interpolation' + expected["cell_methods"] = [CellMethod(coords=("area",), method="maximum")] + expected["attributes"]["spatial_processing_type"] = "No interpolation" # Now check that the section conversion produces the metadata we # expect. @@ -85,18 +82,18 @@ def test_translation(self): def test_bad_statistic_method(self): section = section_4_sample() - section['statisticalProcess'] = 999 - msg = r'unsupported statistical process type \[999\]' + section["statisticalProcess"] = 999 + msg = r"unsupported statistical process type \[999\]" with self.assertRaisesRegex(TranslationError, msg): self._translate(section) def test_bad_spatial_processing_code(self): section = section_4_sample() - section['spatialProcessing'] = 999 - msg = r'unsupported spatial processing type \[999\]' + section["spatialProcessing"] = 999 + msg = r"unsupported spatial processing type \[999\]" with self.assertRaisesRegex(TranslationError, msg): self._translate(section) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_31.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_31.py index 05a2ab081..182a01782 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_31.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_31.py @@ -23,21 +23,25 @@ def section_4(): series = mock.sentinel.satelliteSeries number = mock.sentinel.satelliteNumber instrument = mock.sentinel.instrumentType - return {'NB': 1, - 'satelliteSeries': series, - 'satelliteNumber': number, - 'instrumentType': instrument, - 'scaleFactorOfCentralWaveNumber': 1, - 'scaledValueOfCentralWaveNumber': 12} + return { + "NB": 1, + "satelliteSeries": series, + "satelliteNumber": number, + "instrumentType": instrument, + "scaleFactorOfCentralWaveNumber": 1, + "scaledValueOfCentralWaveNumber": 12, + } class Test(tests.IrisGribTest): def setUp(self): - self.patch('warnings.warn') + self.patch("warnings.warn") self.satellite_common_patch = self.patch( - 'iris_grib._load_convert.satellite_common') + "iris_grib._load_convert.satellite_common" + ) self.generating_process_patch = self.patch( - 'iris_grib._load_convert.generating_process') + "iris_grib._load_convert.generating_process" + ) def test(self): # Prepare the arguments. @@ -53,8 +57,8 @@ def test(self): # Check that 'generating_process' was called. self.assertEqual(self.generating_process_patch.call_count, 1) # Check that the scalar time coord was added in. - self.assertIn((rt_coord, None), metadata['aux_coords_and_dims']) + self.assertIn((rt_coord, None), metadata["aux_coords_and_dims"]) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_32.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_32.py index c60e98c54..563e6d3e9 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_32.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_32.py @@ -19,15 +19,15 @@ class Test(tests.IrisGribTest): def setUp(self): - self.patch('warnings.warn') + self.patch("warnings.warn") self.generating_process_patch = self.patch( - 'iris_grib._load_convert.generating_process') + "iris_grib._load_convert.generating_process" + ) self.satellite_common_patch = self.patch( - 'iris_grib._load_convert.satellite_common') - self.time_coords_patch = self.patch( - 'iris_grib._load_convert.time_coords') - self.data_cutoff_patch = self.patch( - 'iris_grib._load_convert.data_cutoff') + "iris_grib._load_convert.satellite_common" + ) + self.time_coords_patch = self.patch("iris_grib._load_convert.time_coords") + self.data_cutoff_patch = self.patch("iris_grib._load_convert.data_cutoff") def test(self, value=10, factor=1): # Prepare the arguments. @@ -35,15 +35,16 @@ def test(self, value=10, factor=1): number = mock.sentinel.satelliteNumber instrument = mock.sentinel.instrumentType rt_coord = mock.sentinel.observation_time - section = {'NB': 1, - 'hoursAfterDataCutoff': None, - 'minutesAfterDataCutoff': None, - 'satelliteSeries': series, - 'satelliteNumber': number, - 'instrumentType': instrument, - 'scaleFactorOfCentralWaveNumber': 1, - 'scaledValueOfCentralWaveNumber': 12, - } + section = { + "NB": 1, + "hoursAfterDataCutoff": None, + "minutesAfterDataCutoff": None, + "satelliteSeries": series, + "satelliteNumber": number, + "instrumentType": instrument, + "scaleFactorOfCentralWaveNumber": 1, + "scaledValueOfCentralWaveNumber": 12, + } # Call the function. metadata = empty_metadata() @@ -59,5 +60,5 @@ def test(self, value=10, factor=1): self.assertEqual(self.time_coords_patch.call_count, 1) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_40.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_40.py index 8e9275d49..a7c0b3b26 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_40.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_40.py @@ -19,26 +19,29 @@ class Test(tests.IrisGribTest): def setUp(self): - self.section_4 = {'hoursAfterDataCutoff': _MDI, - 'minutesAfterDataCutoff': _MDI, - 'constituentType': 1, - 'indicatorOfUnitOfTimeRange': 0, # minutes - 'startStep': 360, - 'NV': 0, - 'typeOfFirstFixedSurface': 103, - 'scaleFactorOfFirstFixedSurface': 0, - 'scaledValueOfFirstFixedSurface': 9999, - 'typeOfSecondFixedSurface': 255} + self.section_4 = { + "hoursAfterDataCutoff": _MDI, + "minutesAfterDataCutoff": _MDI, + "constituentType": 1, + "indicatorOfUnitOfTimeRange": 0, # minutes + "startStep": 360, + "NV": 0, + "typeOfFirstFixedSurface": 103, + "scaleFactorOfFirstFixedSurface": 0, + "scaledValueOfFirstFixedSurface": 9999, + "typeOfSecondFixedSurface": 255, + } def test_constituent_type(self): metadata = empty_metadata() - rt_coord = iris.coords.DimCoord(24, 'forecast_reference_time', - units='hours since epoch') + rt_coord = iris.coords.DimCoord( + 24, "forecast_reference_time", units="hours since epoch" + ) product_definition_template_40(self.section_4, metadata, rt_coord) expected = empty_metadata() - expected['attributes']['WMO_constituent_type'] = 1 - self.assertEqual(metadata['attributes'], expected['attributes']) + expected["attributes"]["WMO_constituent_type"] = 1 + self.assertEqual(metadata["attributes"], expected["attributes"]) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_6.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_6.py index f8f21c69a..3c45fabcc 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_6.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_6.py @@ -23,35 +23,38 @@ class Test(tests.IrisGribTest): def setUp(self): def func(s, m, f): - return m['cell_methods'].append(self.cell_method) + return m["cell_methods"].append(self.cell_method) - module = 'iris_grib._load_convert' - self.patch('warnings.warn') - this = '{}.product_definition_template_0'.format(module) + module = "iris_grib._load_convert" + self.patch("warnings.warn") + this = "{}.product_definition_template_0".format(module) self.cell_method = mock.sentinel.cell_method self.patch(this, side_effect=func) - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } def _check(self, request_warning): - this = 'iris_grib._load_convert.options' + this = "iris_grib._load_convert.options" with mock.patch(this, warn_on_unsupported=request_warning): metadata = deepcopy(self.metadata) percentile = 50 - section = {'percentileValue': percentile} + section = {"percentileValue": percentile} forecast_reference_time = mock.sentinel.forecast_reference_time # The called being tested. - product_definition_template_6(section, metadata, - forecast_reference_time) + product_definition_template_6(section, metadata, forecast_reference_time) expected = deepcopy(self.metadata) - expected['cell_methods'].append(self.cell_method) - percentile = DimCoord(percentile, - long_name='percentile', - units='%') - expected['aux_coords_and_dims'].append((percentile, None)) + expected["cell_methods"].append(self.cell_method) + percentile = DimCoord(percentile, long_name="percentile", units="%") + expected["aux_coords_and_dims"].append((percentile, None)) self.assertEqual(metadata, expected) def test_pdt_no_warn(self): @@ -61,5 +64,5 @@ def test_pdt_warn(self): self._check(True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_8.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_8.py index e2ab0e48d..f8dab4ed1 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_8.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_8.py @@ -19,32 +19,32 @@ class Test(tests.IrisGribTest): def setUp(self): - module = 'iris_grib._load_convert' + module = "iris_grib._load_convert" self.module = module # Create patches for called routines - self.patch_generating_process = self.patch( - module + '.generating_process') - self.patch_data_cutoff = self.patch(module + '.data_cutoff') + self.patch_generating_process = self.patch(module + ".generating_process") + self.patch_data_cutoff = self.patch(module + ".data_cutoff") self.patch_statistical_cell_method = self.patch( - module + '.statistical_cell_method', - return_value=mock.sentinel.dummy_cell_method) + module + ".statistical_cell_method", + return_value=mock.sentinel.dummy_cell_method, + ) self.patch_statistical_fp_coord = self.patch( - module + '.statistical_forecast_period_coord', - return_value=mock.sentinel.dummy_fp_coord) + module + ".statistical_forecast_period_coord", + return_value=mock.sentinel.dummy_fp_coord, + ) self.patch_time_coord = self.patch( - module + '.validity_time_coord', - return_value=mock.sentinel.dummy_time_coord) - self.patch_vertical_coords = self.patch(module + '.vertical_coords') + module + ".validity_time_coord", return_value=mock.sentinel.dummy_time_coord + ) + self.patch_vertical_coords = self.patch(module + ".vertical_coords") # Construct dummy call arguments self.section = {} - self.section['hoursAfterDataCutoff'] = mock.sentinel.cutoff_hours - self.section['minutesAfterDataCutoff'] = mock.sentinel.cutoff_mins + self.section["hoursAfterDataCutoff"] = mock.sentinel.cutoff_hours + self.section["minutesAfterDataCutoff"] = mock.sentinel.cutoff_mins self.frt_coord = mock.Mock() - self.metadata = {'cell_methods': [], 'aux_coords_and_dims': []} + self.metadata = {"cell_methods": [], "aux_coords_and_dims": []} def test_basic(self): - product_definition_template_8( - self.section, self.metadata, self.frt_coord) + product_definition_template_8(self.section, self.metadata, self.frt_coord) # Check all expected functions were called just once. self.assertEqual(self.patch_generating_process.call_count, 1) self.assertEqual(self.patch_data_cutoff.call_count, 1) @@ -53,15 +53,21 @@ def test_basic(self): self.assertEqual(self.patch_time_coord.call_count, 1) self.assertEqual(self.patch_vertical_coords.call_count, 1) # Check metadata content. - self.assertEqual(sorted(self.metadata.keys()), - ['aux_coords_and_dims', 'cell_methods']) - self.assertEqual(self.metadata['cell_methods'], - [mock.sentinel.dummy_cell_method]) - self.assertCountEqual(self.metadata['aux_coords_and_dims'], - [(self.frt_coord, None), - (mock.sentinel.dummy_fp_coord, None), - (mock.sentinel.dummy_time_coord, None)]) + self.assertEqual( + sorted(self.metadata.keys()), ["aux_coords_and_dims", "cell_methods"] + ) + self.assertEqual( + self.metadata["cell_methods"], [mock.sentinel.dummy_cell_method] + ) + self.assertCountEqual( + self.metadata["aux_coords_and_dims"], + [ + (self.frt_coord, None), + (mock.sentinel.dummy_fp_coord, None), + (mock.sentinel.dummy_time_coord, None), + ], + ) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_product_definition_template_9.py b/iris_grib/tests/unit/load_convert/test_product_definition_template_9.py index 83bfbe30d..626ec6afa 100644 --- a/iris_grib/tests/unit/load_convert/test_product_definition_template_9.py +++ b/iris_grib/tests/unit/load_convert/test_product_definition_template_9.py @@ -23,52 +23,48 @@ class Test(tests.IrisGribTest): def setUp(self): # Create patches for called routines - module = 'iris_grib._load_convert' - self.patch_pdt8_call = self.patch( - module + '.product_definition_template_8') + module = "iris_grib._load_convert" + self.patch_pdt8_call = self.patch(module + ".product_definition_template_8") # Construct dummy call arguments self.section = {} - self.section['probabilityType'] = 1 - self.section['scaledValueOfUpperLimit'] = 53 - self.section['scaleFactorOfUpperLimit'] = 1 + self.section["probabilityType"] = 1 + self.section["scaledValueOfUpperLimit"] = 53 + self.section["scaleFactorOfUpperLimit"] = 1 self.frt_coord = mock.sentinel.frt_coord - self.metadata = {'cell_methods': [mock.sentinel.cell_method], - 'aux_coords_and_dims': []} + self.metadata = { + "cell_methods": [mock.sentinel.cell_method], + "aux_coords_and_dims": [], + } def test_basic(self): result = product_definition_template_9( - self.section, self.metadata, self.frt_coord) + self.section, self.metadata, self.frt_coord + ) # Check expected function was called. self.assertEqual( self.patch_pdt8_call.call_args_list, - [mock.call(self.section, self.metadata, self.frt_coord)]) + [mock.call(self.section, self.metadata, self.frt_coord)], + ) # Check metadata content (N.B. cell_method has been removed!). - self.assertEqual(self.metadata, {'cell_methods': [], - 'aux_coords_and_dims': []}) + self.assertEqual(self.metadata, {"cell_methods": [], "aux_coords_and_dims": []}) # Check result. - self.assertEqual(result, Probability('above_threshold', 5.3)) + self.assertEqual(result, Probability("above_threshold", 5.3)) def test_fail_bad_probability_type(self): - self.section['probabilityType'] = 17 - with self.assertRaisesRegex(TranslationError, - 'unsupported probability type'): - product_definition_template_9( - self.section, self.metadata, self.frt_coord) + self.section["probabilityType"] = 17 + with self.assertRaisesRegex(TranslationError, "unsupported probability type"): + product_definition_template_9(self.section, self.metadata, self.frt_coord) def test_fail_bad_threshold_value(self): - self.section['scaledValueOfUpperLimit'] = _MDI - with self.assertRaisesRegex(TranslationError, - 'missing scaled value'): - product_definition_template_9( - self.section, self.metadata, self.frt_coord) + self.section["scaledValueOfUpperLimit"] = _MDI + with self.assertRaisesRegex(TranslationError, "missing scaled value"): + product_definition_template_9(self.section, self.metadata, self.frt_coord) def test_fail_bad_threshold_scalefactor(self): - self.section['scaleFactorOfUpperLimit'] = _MDI - with self.assertRaisesRegex(TranslationError, - 'missing scale factor'): - product_definition_template_9( - self.section, self.metadata, self.frt_coord) + self.section["scaleFactorOfUpperLimit"] = _MDI + with self.assertRaisesRegex(TranslationError, "missing scale factor"): + product_definition_template_9(self.section, self.metadata, self.frt_coord) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_projection_centre.py b/iris_grib/tests/unit/load_convert/test_projection_centre.py index e2c18d6b1..5bbfa6252 100644 --- a/iris_grib/tests/unit/load_convert/test_projection_centre.py +++ b/iris_grib/tests/unit/load_convert/test_projection_centre.py @@ -29,8 +29,8 @@ def test_south_pole_on_projection_plane(self): def test_both(self): expected = ProjectionCentre(True, True) - self.assertEqual(projection_centre(0xc0), expected) + self.assertEqual(projection_centre(0xC0), expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_reference_time_coord.py b/iris_grib/tests/unit/load_convert/test_reference_time_coord.py index 76e4c36c4..f3d2b5f97 100644 --- a/iris_grib/tests/unit/load_convert/test_reference_time_coord.py +++ b/iris_grib/tests/unit/load_convert/test_reference_time_coord.py @@ -26,47 +26,53 @@ class Test(tests.IrisGribTest): def setUp(self): - self.section = {'year': 2007, - 'month': 1, - 'day': 15, - 'hour': 0, - 'minute': 3, - 'second': 0} - self.unit = Unit('hours since epoch', calendar=CALENDAR_GREGORIAN) - dt = datetime(self.section['year'], self.section['month'], - self.section['day'], self.section['hour'], - self.section['minute'], self.section['second']) + self.section = { + "year": 2007, + "month": 1, + "day": 15, + "hour": 0, + "minute": 3, + "second": 0, + } + self.unit = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) + dt = datetime( + self.section["year"], + self.section["month"], + self.section["day"], + self.section["hour"], + self.section["minute"], + self.section["second"], + ) self.point = self.unit.date2num(dt) def _check(self, section, standard_name=None): - expected = DimCoord(self.point, standard_name=standard_name, - units=self.unit) + expected = DimCoord(self.point, standard_name=standard_name, units=self.unit) # The call being tested. coord = reference_time_coord(section) self.assertEqual(coord, expected) def test_start_of_forecast__0(self): section = deepcopy(self.section) - section['significanceOfReferenceTime'] = 0 - self._check(section, 'forecast_reference_time') + section["significanceOfReferenceTime"] = 0 + self._check(section, "forecast_reference_time") def test_start_of_forecast__1(self): section = deepcopy(self.section) - section['significanceOfReferenceTime'] = 1 - self._check(section, 'forecast_reference_time') + section["significanceOfReferenceTime"] = 1 + self._check(section, "forecast_reference_time") def test_observation_time(self): section = deepcopy(self.section) - section['significanceOfReferenceTime'] = 3 - self._check(section, 'time') + section["significanceOfReferenceTime"] = 3 + self._check(section, "time") def test_unknown_significance(self): section = deepcopy(self.section) - section['significanceOfReferenceTime'] = 5 - emsg = 'unsupported significance' + section["significanceOfReferenceTime"] = 5 + emsg = "unsupported significance" with self.assertRaisesRegex(TranslationError, emsg): self._check(section) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_resolution_flags.py b/iris_grib/tests/unit/load_convert/test_resolution_flags.py index dc47e36d6..dc26fa9e4 100644 --- a/iris_grib/tests/unit/load_convert/test_resolution_flags.py +++ b/iris_grib/tests/unit/load_convert/test_resolution_flags.py @@ -32,5 +32,5 @@ def test_uv_resolved(self): self.assertEqual(resolution_flags(0x08), expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_satellite_common.py b/iris_grib/tests/unit/load_convert/test_satellite_common.py index 3e1e3dde1..fb0ab6545 100644 --- a/iris_grib/tests/unit/load_convert/test_satellite_common.py +++ b/iris_grib/tests/unit/load_convert/test_satellite_common.py @@ -27,12 +27,14 @@ def _check(self, factors=1, values=111): series = mock.sentinel.satelliteSeries number = mock.sentinel.satelliteNumber instrument = mock.sentinel.instrumentType - section = {'NB': 1, - 'satelliteSeries': series, - 'satelliteNumber': number, - 'instrumentType': instrument, - 'scaleFactorOfCentralWaveNumber': factors, - 'scaledValueOfCentralWaveNumber': values} + section = { + "NB": 1, + "satelliteSeries": series, + "satelliteNumber": number, + "instrumentType": instrument, + "scaleFactorOfCentralWaveNumber": factors, + "scaledValueOfCentralWaveNumber": values, + } # Call the function. metadata = empty_metadata() @@ -40,17 +42,17 @@ def _check(self, factors=1, values=111): # Check the result. expected = empty_metadata() - coord = AuxCoord(series, long_name='satellite_series', units=1) - expected['aux_coords_and_dims'].append((coord, None)) - coord = AuxCoord(number, long_name='satellite_number', units=1) - expected['aux_coords_and_dims'].append((coord, None)) - coord = AuxCoord(instrument, long_name='instrument_type', units=1) - expected['aux_coords_and_dims'].append((coord, None)) - standard_name = 'sensor_band_central_radiation_wavenumber' - coord = AuxCoord(values / (10.0 ** factors), - standard_name=standard_name, - units='m-1') - expected['aux_coords_and_dims'].append((coord, None)) + coord = AuxCoord(series, long_name="satellite_series", units=1) + expected["aux_coords_and_dims"].append((coord, None)) + coord = AuxCoord(number, long_name="satellite_number", units=1) + expected["aux_coords_and_dims"].append((coord, None)) + coord = AuxCoord(instrument, long_name="instrument_type", units=1) + expected["aux_coords_and_dims"].append((coord, None)) + standard_name = "sensor_band_central_radiation_wavenumber" + coord = AuxCoord( + values / (10.0**factors), standard_name=standard_name, units="m-1" + ) + expected["aux_coords_and_dims"].append((coord, None)) self.assertEqual(metadata, expected) def test_basic(self): @@ -64,5 +66,5 @@ def test_multiple_wavelengths(self): self._check(values=values, factors=factors) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_scanning_mode.py b/iris_grib/tests/unit/load_convert/test_scanning_mode.py index da554f538..00d8332a4 100644 --- a/iris_grib/tests/unit/load_convert/test_scanning_mode.py +++ b/iris_grib/tests/unit/load_convert/test_scanning_mode.py @@ -22,18 +22,21 @@ def test_unset(self): self.assertEqual(scanning_mode(0x0), expected) def test_i_negative(self): - expected = ScanningMode(i_negative=True, j_positive=False, - j_consecutive=False, i_alternative=False) + expected = ScanningMode( + i_negative=True, j_positive=False, j_consecutive=False, i_alternative=False + ) self.assertEqual(scanning_mode(0x80), expected) def test_j_positive(self): - expected = ScanningMode(i_negative=False, j_positive=True, - j_consecutive=False, i_alternative=False) + expected = ScanningMode( + i_negative=False, j_positive=True, j_consecutive=False, i_alternative=False + ) self.assertEqual(scanning_mode(0x40), expected) def test_j_consecutive(self): - expected = ScanningMode(i_negative=False, j_positive=False, - j_consecutive=True, i_alternative=False) + expected = ScanningMode( + i_negative=False, j_positive=False, j_consecutive=True, i_alternative=False + ) self.assertEqual(scanning_mode(0x20), expected) def test_i_alternative(self): @@ -41,5 +44,5 @@ def test_i_alternative(self): scanning_mode(0x10) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_statistical_cell_method.py b/iris_grib/tests/unit/load_convert/test_statistical_cell_method.py index 551b18267..ef2ba3bef 100644 --- a/iris_grib/tests/unit/load_convert/test_statistical_cell_method.py +++ b/iris_grib/tests/unit/load_convert/test_statistical_cell_method.py @@ -21,14 +21,13 @@ class Test(tests.IrisGribTest): def setUp(self): self.section = {} - self.section['productDefinitionTemplateNumber'] = 8 - self.section['numberOfTimeRange'] = 1 - self.section['typeOfStatisticalProcessing'] = 0 - self.section['typeOfTimeIncrement'] = 2 - self.section['timeIncrement'] = 0 - - def expected_cell_method(self, - coords=('time',), method='mean', intervals=None): + self.section["productDefinitionTemplateNumber"] = 8 + self.section["numberOfTimeRange"] = 1 + self.section["typeOfStatisticalProcessing"] = 0 + self.section["typeOfTimeIncrement"] = 2 + self.section["timeIncrement"] = 0 + + def expected_cell_method(self, coords=("time",), method="mean", intervals=None): keys = dict(coords=coords, method=method, intervals=intervals) cell_method = CellMethod(**keys) return cell_method @@ -38,85 +37,84 @@ def test_basic(self): self.assertEqual(cell_method, self.expected_cell_method()) def test_intervals(self): - self.section['timeIncrement'] = 3 - self.section['indicatorOfUnitForTimeIncrement'] = 1 + self.section["timeIncrement"] = 3 + self.section["indicatorOfUnitForTimeIncrement"] = 1 cell_method = statistical_cell_method(self.section) - self.assertEqual(cell_method, - self.expected_cell_method(intervals=('3 hours',))) + self.assertEqual(cell_method, self.expected_cell_method(intervals=("3 hours",))) def test_increment_missing(self): - self.section['timeIncrement'] = 2 ** 32 - 1 - self.section['indicatorOfUnitForTimeIncrement'] = 255 + self.section["timeIncrement"] = 2**32 - 1 + self.section["indicatorOfUnitForTimeIncrement"] = 255 cell_method = statistical_cell_method(self.section) self.assertEqual(cell_method, self.expected_cell_method()) def test_different_statistic(self): - self.section['typeOfStatisticalProcessing'] = 6 + self.section["typeOfStatisticalProcessing"] = 6 cell_method = statistical_cell_method(self.section) self.assertEqual( - cell_method, - self.expected_cell_method(method='standard_deviation')) + cell_method, self.expected_cell_method(method="standard_deviation") + ) def test_fail_bad_ranges(self): - self.section['numberOfTimeRange'] = 0 - with self.assertRaisesRegex(TranslationError, - 'aggregation over "0 time ranges"'): + self.section["numberOfTimeRange"] = 0 + with self.assertRaisesRegex( + TranslationError, 'aggregation over "0 time ranges"' + ): statistical_cell_method(self.section) def test_fail_multiple_ranges(self): - self.section['numberOfTimeRange'] = 2 - with self.assertRaisesRegex(TranslationError, - r'multiple time ranges \[2\]'): + self.section["numberOfTimeRange"] = 2 + with self.assertRaisesRegex(TranslationError, r"multiple time ranges \[2\]"): statistical_cell_method(self.section) def test_fail_unknown_statistic(self): - self.section['typeOfStatisticalProcessing'] = 17 + self.section["typeOfStatisticalProcessing"] = 17 with self.assertRaisesRegex( - TranslationError, - r'contains an unsupported statistical process type \[17\]'): + TranslationError, r"contains an unsupported statistical process type \[17\]" + ): statistical_cell_method(self.section) def test_fail_bad_increment_type(self): - self.section['typeOfTimeIncrement'] = 7 + self.section["typeOfTimeIncrement"] = 7 with self.assertRaisesRegex( - TranslationError, - r'time-increment type \[7\] is not supported'): + TranslationError, r"time-increment type \[7\] is not supported" + ): statistical_cell_method(self.section) def test_pdt_9(self): # Should behave the same as PDT 4.8. - self.section['productDefinitionTemplateNumber'] = 9 + self.section["productDefinitionTemplateNumber"] = 9 cell_method = statistical_cell_method(self.section) self.assertEqual(cell_method, self.expected_cell_method()) def test_pdt_10(self): # Should behave the same as PDT 4.8. - self.section['productDefinitionTemplateNumber'] = 10 + self.section["productDefinitionTemplateNumber"] = 10 cell_method = statistical_cell_method(self.section) self.assertEqual(cell_method, self.expected_cell_method()) def test_pdt_11(self): # Should behave the same as PDT 4.8. - self.section['productDefinitionTemplateNumber'] = 11 + self.section["productDefinitionTemplateNumber"] = 11 cell_method = statistical_cell_method(self.section) self.assertEqual(cell_method, self.expected_cell_method()) def test_pdt_15(self): # Encoded slightly differently to PDT 4.8. - self.section['productDefinitionTemplateNumber'] = 15 - test_code = self.section['typeOfStatisticalProcessing'] - del self.section['typeOfStatisticalProcessing'] - self.section['statisticalProcess'] = test_code + self.section["productDefinitionTemplateNumber"] = 15 + test_code = self.section["typeOfStatisticalProcessing"] + del self.section["typeOfStatisticalProcessing"] + self.section["statisticalProcess"] = test_code cell_method = statistical_cell_method(self.section) self.assertEqual(cell_method, self.expected_cell_method()) def test_fail_unsupported_pdt(self): # Rejects PDTs other than the ones tested above. - self.section['productDefinitionTemplateNumber'] = 101 + self.section["productDefinitionTemplateNumber"] = 101 msg = "can't get statistical method for unsupported pdt : 4.101" with self.assertRaisesRegex(ValueError, msg): statistical_cell_method(self.section) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_statistical_forecast_period_coord.py b/iris_grib/tests/unit/load_convert/test_statistical_forecast_period_coord.py index 1873597d4..b825edc61 100644 --- a/iris_grib/tests/unit/load_convert/test_statistical_forecast_period_coord.py +++ b/iris_grib/tests/unit/load_convert/test_statistical_forecast_period_coord.py @@ -19,33 +19,32 @@ class Test(tests.IrisGribTest): def setUp(self): - module = 'iris_grib._load_convert' + module = "iris_grib._load_convert" self.module = module - self.patch_hindcast = self.patch(module + '._hindcast_fix') + self.patch_hindcast = self.patch(module + "._hindcast_fix") self.forecast_seconds = 0.0 self.forecast_units = mock.Mock() self.forecast_units.convert = lambda x, y: self.forecast_seconds - self.patch(module + '.time_range_unit', - return_value=self.forecast_units) + self.patch(module + ".time_range_unit", return_value=self.forecast_units) self.frt_coord = mock.Mock() self.frt_coord.points = [1] self.frt_coord.units.num2date = mock.Mock( - return_value=datetime.datetime(2010, 2, 3)) + return_value=datetime.datetime(2010, 2, 3) + ) self.section = {} - self.section['yearOfEndOfOverallTimeInterval'] = 2010 - self.section['monthOfEndOfOverallTimeInterval'] = 2 - self.section['dayOfEndOfOverallTimeInterval'] = 3 - self.section['hourOfEndOfOverallTimeInterval'] = 8 - self.section['minuteOfEndOfOverallTimeInterval'] = 0 - self.section['secondOfEndOfOverallTimeInterval'] = 0 - self.section['forecastTime'] = mock.Mock() - self.section['indicatorOfUnitOfTimeRange'] = mock.Mock() + self.section["yearOfEndOfOverallTimeInterval"] = 2010 + self.section["monthOfEndOfOverallTimeInterval"] = 2 + self.section["dayOfEndOfOverallTimeInterval"] = 3 + self.section["hourOfEndOfOverallTimeInterval"] = 8 + self.section["minuteOfEndOfOverallTimeInterval"] = 0 + self.section["secondOfEndOfOverallTimeInterval"] = 0 + self.section["forecastTime"] = mock.Mock() + self.section["indicatorOfUnitOfTimeRange"] = mock.Mock() def test_basic(self): - coord = statistical_forecast_period_coord(self.section, - self.frt_coord) - self.assertEqual(coord.standard_name, 'forecast_period') - self.assertEqual(coord.units, 'hours') + coord = statistical_forecast_period_coord(self.section, self.frt_coord) + self.assertEqual(coord.standard_name, "forecast_period") + self.assertEqual(coord.units, "hours") self.assertArrayAlmostEqual(coord.points, [4.0]) self.assertArrayAlmostEqual(coord.bounds, [[0.0, 8.0]]) @@ -54,10 +53,10 @@ def test_with_hindcast(self): self.assertEqual(self.patch_hindcast.call_count, 1) def test_no_hindcast(self): - self.patch(self.module + '.options.support_hindcast_values', False) + self.patch(self.module + ".options.support_hindcast_values", False) _ = statistical_forecast_period_coord(self.section, self.frt_coord) self.assertEqual(self.patch_hindcast.call_count, 0) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_time_range_unit.py b/iris_grib/tests/unit/load_convert/test_time_range_unit.py index ffe09250d..e8b9f73e4 100644 --- a/iris_grib/tests/unit/load_convert/test_time_range_unit.py +++ b/iris_grib/tests/unit/load_convert/test_time_range_unit.py @@ -19,13 +19,15 @@ class Test(tests.IrisGribTest): def setUp(self): - self.unit_by_indicator = {0: Unit('minutes'), - 1: Unit('hours'), - 2: Unit('days'), - 10: Unit('3 hours'), - 11: Unit('6 hours'), - 12: Unit('12 hours'), - 13: Unit('seconds')} + self.unit_by_indicator = { + 0: Unit("minutes"), + 1: Unit("hours"), + 2: Unit("days"), + 10: Unit("3 hours"), + 11: Unit("6 hours"), + 12: Unit("12 hours"), + 13: Unit("seconds"), + } def test_units(self): for indicator, unit in self.unit_by_indicator.items(): @@ -33,10 +35,10 @@ def test_units(self): self.assertEqual(result, unit) def test_bad_indicator(self): - emsg = 'unsupported time range' + emsg = "unsupported time range" with self.assertRaisesRegex(TranslationError, emsg): time_range_unit(-1) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_translate_phenomenon.py b/iris_grib/tests/unit/load_convert/test_translate_phenomenon.py index fed63e4db..b19a03db0 100644 --- a/iris_grib/tests/unit/load_convert/test_translate_phenomenon.py +++ b/iris_grib/tests/unit/load_convert/test_translate_phenomenon.py @@ -21,43 +21,50 @@ class Test_probability(tests.IrisGribTest): def setUp(self): # Patch inner call to return a given phenomenon type. - target_module = 'iris_grib._load_convert' + target_module = "iris_grib._load_convert" self.phenom_lookup_patch = self.patch( - target_module + '.itranslation.grib2_phenom_to_cf_info', - return_value=Grib1CfData('air_temperature', '', 'K', None)) + target_module + ".itranslation.grib2_phenom_to_cf_info", + return_value=Grib1CfData("air_temperature", "", "K", None), + ) # Construct dummy call arguments - self.probability = Probability('', 22.0) - self.metadata = {'aux_coords_and_dims': [], 'attributes': {}} + self.probability = Probability("", 22.0) + self.metadata = {"aux_coords_and_dims": [], "attributes": {}} def test_basic(self): - translate_phenomenon(self.metadata, 7, 8, 9, None, - None, None, probability=self.probability) + translate_phenomenon( + self.metadata, 7, 8, 9, None, None, None, probability=self.probability + ) # Check metadata. - thresh_coord = DimCoord([22.0], - standard_name='air_temperature', - long_name='', units='K') - self.assertEqual(self.metadata, { - 'standard_name': None, - 'long_name': 'probability_of_air_temperature_', - 'units': Unit(1), - 'aux_coords_and_dims': [(thresh_coord, None)], - 'attributes': {'GRIB_PARAM': GRIBCode(2, 7, 8, 9)}}) + thresh_coord = DimCoord( + [22.0], standard_name="air_temperature", long_name="", units="K" + ) + self.assertEqual( + self.metadata, + { + "standard_name": None, + "long_name": "probability_of_air_temperature_", + "units": Unit(1), + "aux_coords_and_dims": [(thresh_coord, None)], + "attributes": {"GRIB_PARAM": GRIBCode(2, 7, 8, 9)}, + }, + ) def test_no_phenomenon(self): self.phenom_lookup_patch.return_value = None expected_metadata = self.metadata.copy() - translate_phenomenon(self.metadata, - discipline=7, - parameterCategory=77, - parameterNumber=777, - typeOfFirstFixedSurface=None, - scaledValueOfFirstFixedSurface=None, - typeOfSecondFixedSurface=None, - probability=self.probability) - expected_metadata['attributes']['GRIB_PARAM'] = \ - GRIBCode(2, 7, 77, 777) + translate_phenomenon( + self.metadata, + discipline=7, + parameterCategory=77, + parameterNumber=777, + typeOfFirstFixedSurface=None, + scaledValueOfFirstFixedSurface=None, + typeOfSecondFixedSurface=None, + probability=self.probability, + ) + expected_metadata["attributes"]["GRIB_PARAM"] = GRIBCode(2, 7, 77, 777) self.assertEqual(self.metadata, expected_metadata) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_unscale.py b/iris_grib/tests/unit/load_convert/test_unscale.py index a3f615bbd..0005b13a3 100644 --- a/iris_grib/tests/unit/load_convert/test_unscale.py +++ b/iris_grib/tests/unit/load_convert/test_unscale.py @@ -31,10 +31,12 @@ def test_single_mdi(self): self.assertIs(unscale(MDI, 1), ma.masked) def test_array(self): - items = [[1, [0.1, 1.2, 12.3, 123.4]], - [-1, [10.0, 120.0, 1230.0, 12340.0]], - [2, [0.01, 0.12, 1.23, 12.34]], - [-2, [100.0, 1200.0, 12300.0, 123400.0]]] + items = [ + [1, [0.1, 1.2, 12.3, 123.4]], + [-1, [10.0, 120.0, 1230.0, 12340.0]], + [2, [0.01, 0.12, 1.23, 12.34]], + [-2, [100.0, 1200.0, 12300.0, 123400.0]], + ] values = np.array([1, 12, 123, 1234]) for factor, expected in items: result = unscale(values, [factor] * values.size) @@ -48,5 +50,5 @@ def test_array_mdi(self): np.testing.assert_array_almost_equal(result, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_validity_time_coord.py b/iris_grib/tests/unit/load_convert/test_validity_time_coord.py index e3ba9c691..450682262 100644 --- a/iris_grib/tests/unit/load_convert/test_validity_time_coord.py +++ b/iris_grib/tests/unit/load_convert/test_validity_time_coord.py @@ -22,30 +22,31 @@ class Test(tests.IrisGribTest): def setUp(self): - self.fp = DimCoord(5, standard_name='forecast_period', units='hours') + self.fp = DimCoord(5, standard_name="forecast_period", units="hours") self.fp_test_bounds = np.array([[1.0, 9.0]]) - self.unit = Unit('hours since epoch') - self.frt = DimCoord(10, standard_name='forecast_reference_time', - units=self.unit) + self.unit = Unit("hours since epoch") + self.frt = DimCoord( + 10, standard_name="forecast_reference_time", units=self.unit + ) def test_frt_shape(self): frt = mock.Mock(shape=(2,)) fp = mock.Mock(shape=(1,)) - emsg = 'scalar forecast reference time' + emsg = "scalar forecast reference time" with self.assertRaisesRegex(ValueError, emsg): validity_time_coord(frt, fp) def test_fp_shape(self): frt = mock.Mock(shape=(1,)) fp = mock.Mock(shape=(2,)) - emsg = 'scalar forecast period' + emsg = "scalar forecast period" with self.assertRaisesRegex(ValueError, emsg): validity_time_coord(frt, fp) def test(self): coord = validity_time_coord(self.frt, self.fp) self.assertIsInstance(coord, DimCoord) - self.assertEqual(coord.standard_name, 'time') + self.assertEqual(coord.standard_name, "time") self.assertEqual(coord.units, self.unit) self.assertEqual(coord.shape, (1,)) point = self.frt.points[0] + self.fp.points[0] @@ -56,7 +57,7 @@ def test_bounded(self): self.fp.bounds = self.fp_test_bounds coord = validity_time_coord(self.frt, self.fp) self.assertIsInstance(coord, DimCoord) - self.assertEqual(coord.standard_name, 'time') + self.assertEqual(coord.standard_name, "time") self.assertEqual(coord.units, self.unit) self.assertEqual(coord.shape, (1,)) point = self.frt.points[0] + self.fp.points[0] @@ -66,5 +67,5 @@ def test_bounded(self): self.assertArrayAlmostEqual(coord.bounds, bounds) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/load_convert/test_vertical_coords.py b/iris_grib/tests/unit/load_convert/test_vertical_coords.py index 2f77c0b81..d619e0185 100644 --- a/iris_grib/tests/unit/load_convert/test_vertical_coords.py +++ b/iris_grib/tests/unit/load_convert/test_vertical_coords.py @@ -18,183 +18,210 @@ from iris.exceptions import TranslationError from iris_grib._load_convert import vertical_coords -from iris_grib._load_convert import \ - _TYPE_OF_FIXED_SURFACE_MISSING as MISSING_SURFACE, \ - _MDI as MISSING_LEVEL +from iris_grib._load_convert import ( + _TYPE_OF_FIXED_SURFACE_MISSING as MISSING_SURFACE, + _MDI as MISSING_LEVEL, +) class Test(tests.IrisGribTest): def setUp(self): - self.metadata = {'factories': [], 'references': [], - 'standard_name': None, - 'long_name': None, 'units': None, 'attributes': {}, - 'cell_methods': [], 'dim_coords_and_dims': [], - 'aux_coords_and_dims': []} + self.metadata = { + "factories": [], + "references": [], + "standard_name": None, + "long_name": None, + "units": None, + "attributes": {}, + "cell_methods": [], + "dim_coords_and_dims": [], + "aux_coords_and_dims": [], + } def test_hybrid_factories(self): def func(section, metadata): - return metadata['factories'].append(factory) + return metadata["factories"].append(factory) metadata = deepcopy(self.metadata) - section = {'NV': 1} - this = 'iris_grib._load_convert.hybrid_factories' + section = {"NV": 1} + this = "iris_grib._load_convert.hybrid_factories" factory = mock.sentinel.factory with mock.patch(this, side_effect=func) as hybrid_factories: vertical_coords(section, metadata) self.assertTrue(hybrid_factories.called) - self.assertEqual(metadata['factories'], [factory]) + self.assertEqual(metadata["factories"], [factory]) def test_no_first_fixed_surface(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': MISSING_SURFACE, - 'scaledValueOfFirstFixedSurface': MISSING_LEVEL} + section = { + "NV": 0, + "typeOfFirstFixedSurface": MISSING_SURFACE, + "scaledValueOfFirstFixedSurface": MISSING_LEVEL, + } vertical_coords(section, metadata) self.assertEqual(metadata, self.metadata) def test_fixed_surface_type_1(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 1, - 'scaledValueOfFirstFixedSurface': 0, - 'scaleFactorOfFirstFixedSurface': 0, - 'typeOfSecondFixedSurface': 255} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 1, + "scaledValueOfFirstFixedSurface": 0, + "scaleFactorOfFirstFixedSurface": 0, + "typeOfSecondFixedSurface": 255, + } vertical_coords(section, metadata) # No metadata change, as surfaceType=1 translates to "no vertical # coord" without error or warning. self.assertEqual(metadata, self.metadata) def test_unknown_first_fixed_surface_with_missing_scaled_value(self): - this = 'iris_grib._load_convert.options' - with mock.patch('warnings.warn') as warn: + this = "iris_grib._load_convert.options" + with mock.patch("warnings.warn") as warn: with mock.patch(this) as options: for request_warning in [False, True]: options.warn_on_unsupported = request_warning metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 0, - 'scaledValueOfFirstFixedSurface': MISSING_LEVEL} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 0, + "scaledValueOfFirstFixedSurface": MISSING_LEVEL, + } # The call being tested. vertical_coords(section, metadata) self.assertEqual(metadata, self.metadata) if request_warning: self.assertEqual(len(warn.mock_calls), 1) args, _ = warn.call_args - self.assertIn('surface with missing scaled value', - args[0]) + self.assertIn("surface with missing scaled value", args[0]) else: self.assertEqual(len(warn.mock_calls), 0) def test_unknown_first_fixed_surface(self): metadata = deepcopy(self.metadata) expected = deepcopy(self.metadata) - coord = DimCoord(600.0, attributes={'GRIB_fixed_surface_type': 106}) - expected['aux_coords_and_dims'].append((coord, None)) - - section = {'NV': 0, - 'typeOfFirstFixedSurface': 106, - 'scaledValueOfFirstFixedSurface': 600, - 'scaleFactorOfFirstFixedSurface': 0, - 'typeOfSecondFixedSurface': MISSING_SURFACE} + coord = DimCoord(600.0, attributes={"GRIB_fixed_surface_type": 106}) + expected["aux_coords_and_dims"].append((coord, None)) + + section = { + "NV": 0, + "typeOfFirstFixedSurface": 106, + "scaledValueOfFirstFixedSurface": 600, + "scaleFactorOfFirstFixedSurface": 0, + "typeOfSecondFixedSurface": MISSING_SURFACE, + } vertical_coords(section, metadata) self.assertEqual(metadata, expected) def test_unknown_first_fixed_surface_with_second_fixed_surface(self): metadata = deepcopy(self.metadata) expected = deepcopy(self.metadata) - coord = DimCoord(9000.0, bounds=[18000, 0], - attributes={'GRIB_fixed_surface_type': 108}) - expected['aux_coords_and_dims'].append((coord, None)) - - section = {'NV': 0, - 'typeOfFirstFixedSurface': 108, - 'scaledValueOfFirstFixedSurface': 18000, - 'scaleFactorOfFirstFixedSurface': 0, - 'typeOfSecondFixedSurface': 108, - 'scaledValueOfSecondFixedSurface': 0, - 'scaleFactorOfSecondFixedSurface': 0} + coord = DimCoord( + 9000.0, bounds=[18000, 0], attributes={"GRIB_fixed_surface_type": 108} + ) + expected["aux_coords_and_dims"].append((coord, None)) + + section = { + "NV": 0, + "typeOfFirstFixedSurface": 108, + "scaledValueOfFirstFixedSurface": 18000, + "scaleFactorOfFirstFixedSurface": 0, + "typeOfSecondFixedSurface": 108, + "scaledValueOfSecondFixedSurface": 0, + "scaleFactorOfSecondFixedSurface": 0, + } vertical_coords(section, metadata) self.assertEqual(metadata, expected) def test_pressure_with_no_second_fixed_surface(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 100, # pressure / Pa - 'scaledValueOfFirstFixedSurface': 10, - 'scaleFactorOfFirstFixedSurface': 1, - 'typeOfSecondFixedSurface': MISSING_SURFACE} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 100, # pressure / Pa + "scaledValueOfFirstFixedSurface": 10, + "scaleFactorOfFirstFixedSurface": 1, + "typeOfSecondFixedSurface": MISSING_SURFACE, + } vertical_coords(section, metadata) - coord = DimCoord(1.0, long_name='pressure', units='Pa') + coord = DimCoord(1.0, long_name="pressure", units="Pa") expected = deepcopy(self.metadata) - expected['aux_coords_and_dims'].append((coord, None)) + expected["aux_coords_and_dims"].append((coord, None)) self.assertEqual(metadata, expected) def test_height_with_no_second_fixed_surface(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 103, # height / m - 'scaledValueOfFirstFixedSurface': 100, - 'scaleFactorOfFirstFixedSurface': 2, - 'typeOfSecondFixedSurface': MISSING_SURFACE} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 103, # height / m + "scaledValueOfFirstFixedSurface": 100, + "scaleFactorOfFirstFixedSurface": 2, + "typeOfSecondFixedSurface": MISSING_SURFACE, + } vertical_coords(section, metadata) - coord = DimCoord(1.0, long_name='height', units='m') + coord = DimCoord(1.0, long_name="height", units="m") expected = deepcopy(self.metadata) - expected['aux_coords_and_dims'].append((coord, None)) + expected["aux_coords_and_dims"].append((coord, None)) self.assertEqual(metadata, expected) def test_different_fixed_surfaces(self): - section = {'NV': 0, - 'typeOfFirstFixedSurface': 100, - 'scaledValueOfFirstFixedSurface': 10, - 'scaleFactorOfFirstFixedSurface': 1, - 'typeOfSecondFixedSurface': 0} - emsg = 'different types of first and second fixed surface' + section = { + "NV": 0, + "typeOfFirstFixedSurface": 100, + "scaledValueOfFirstFixedSurface": 10, + "scaleFactorOfFirstFixedSurface": 1, + "typeOfSecondFixedSurface": 0, + } + emsg = "different types of first and second fixed surface" with self.assertRaisesRegex(TranslationError, emsg): vertical_coords(section, None) def test_same_fixed_surfaces_missing_second_scaled_value(self): - section = {'NV': 0, - 'typeOfFirstFixedSurface': 100, - 'scaledValueOfFirstFixedSurface': 10, - 'scaleFactorOfFirstFixedSurface': 1, - 'typeOfSecondFixedSurface': 100, - 'scaledValueOfSecondFixedSurface': MISSING_LEVEL} - emsg = 'missing scaled value of second fixed surface' + section = { + "NV": 0, + "typeOfFirstFixedSurface": 100, + "scaledValueOfFirstFixedSurface": 10, + "scaleFactorOfFirstFixedSurface": 1, + "typeOfSecondFixedSurface": 100, + "scaledValueOfSecondFixedSurface": MISSING_LEVEL, + } + emsg = "missing scaled value of second fixed surface" with self.assertRaisesRegex(TranslationError, emsg): vertical_coords(section, None) def test_pressure_with_second_fixed_surface(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 100, - 'scaledValueOfFirstFixedSurface': 10, - 'scaleFactorOfFirstFixedSurface': 1, - 'typeOfSecondFixedSurface': 100, - 'scaledValueOfSecondFixedSurface': 30, - 'scaleFactorOfSecondFixedSurface': 1} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 100, + "scaledValueOfFirstFixedSurface": 10, + "scaleFactorOfFirstFixedSurface": 1, + "typeOfSecondFixedSurface": 100, + "scaledValueOfSecondFixedSurface": 30, + "scaleFactorOfSecondFixedSurface": 1, + } vertical_coords(section, metadata) - coord = DimCoord(2.0, long_name='pressure', units='Pa', - bounds=[1.0, 3.0]) + coord = DimCoord(2.0, long_name="pressure", units="Pa", bounds=[1.0, 3.0]) expected = deepcopy(self.metadata) - expected['aux_coords_and_dims'].append((coord, None)) + expected["aux_coords_and_dims"].append((coord, None)) self.assertEqual(metadata, expected) def test_height_with_second_fixed_surface(self): metadata = deepcopy(self.metadata) - section = {'NV': 0, - 'typeOfFirstFixedSurface': 103, - 'scaledValueOfFirstFixedSurface': 1000, - 'scaleFactorOfFirstFixedSurface': 2, - 'typeOfSecondFixedSurface': 103, - 'scaledValueOfSecondFixedSurface': 3000, - 'scaleFactorOfSecondFixedSurface': 2} + section = { + "NV": 0, + "typeOfFirstFixedSurface": 103, + "scaledValueOfFirstFixedSurface": 1000, + "scaleFactorOfFirstFixedSurface": 2, + "typeOfSecondFixedSurface": 103, + "scaledValueOfSecondFixedSurface": 3000, + "scaleFactorOfSecondFixedSurface": 2, + } vertical_coords(section, metadata) - coord = DimCoord(20.0, long_name='height', units='m', - bounds=[10.0, 30.0]) + coord = DimCoord(20.0, long_name="height", units="m", bounds=[10.0, 30.0]) expected = deepcopy(self.metadata) - expected['aux_coords_and_dims'].append((coord, None)) + expected["aux_coords_and_dims"].append((coord, None)) self.assertEqual(metadata, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/message/test_GribMessage.py b/iris_grib/tests/unit/message/test_GribMessage.py index 12318b937..ca2248da4 100644 --- a/iris_grib/tests/unit/message/test_GribMessage.py +++ b/iris_grib/tests/unit/message/test_GribMessage.py @@ -24,24 +24,23 @@ from iris_grib.tests.unit import _make_test_message -SECTION_6_NO_BITMAP = {'bitMapIndicator': 255, 'bitmap': None} +SECTION_6_NO_BITMAP = {"bitMapIndicator": 255, "bitmap": None} @tests.skip_data class Test_messages_from_filename(tests.IrisGribTest): def test(self): - filename = tests.get_data_path(('GRIB', '3_layer_viz', - '3_layer.grib2')) + filename = tests.get_data_path(("GRIB", "3_layer_viz", "3_layer.grib2")) messages = list(GribMessage.messages_from_filename(filename)) self.assertEqual(len(messages), 3) def test_release_file(self): - filename = tests.get_data_path(('GRIB', '3_layer_viz', - '3_layer.grib2')) + filename = tests.get_data_path(("GRIB", "3_layer_viz", "3_layer.grib2")) my_file = open(filename) import builtins # noqa: F401 - self.patch('builtins.open', mock.Mock(return_value=my_file)) + + self.patch("builtins.open", mock.Mock(return_value=my_file)) messages = list(GribMessage.messages_from_filename(filename)) self.assertFalse(my_file.closed) @@ -61,19 +60,21 @@ class Test_data__masked(tests.IrisGribTest): def setUp(self): self.bitmap = np.array([0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1]) self.shape = (3, 4) - self._section_3 = {'sourceOfGridDefinition': 0, - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 0, - 'gridDefinitionTemplateNumber': 0, - 'scanningMode': 0, - 'Nj': self.shape[0], - 'Ni': self.shape[1]} + self._section_3 = { + "sourceOfGridDefinition": 0, + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 0, + "gridDefinitionTemplateNumber": 0, + "scanningMode": 0, + "Nj": self.shape[0], + "Ni": self.shape[1], + } def test_no_bitmap(self): values = np.arange(12) - message = _make_test_message({3: self._section_3, - 6: SECTION_6_NO_BITMAP, - 7: {'codedValues': values}}) + message = _make_test_message( + {3: self._section_3, 6: SECTION_6_NO_BITMAP, 7: {"codedValues": values}} + ) result = as_concrete_data(message.data) expected = values.reshape(self.shape) self.assertEqual(result.shape, self.shape) @@ -84,13 +85,15 @@ def test_bitmap_present(self): # are not equal. input_values = np.arange(5) output_values = np.array([-1, -1, 0, 1, -1, -1, -1, 2, -1, 3, -1, 4]) - message = _make_test_message({3: self._section_3, - 6: {'bitMapIndicator': 0, - 'bitmap': self.bitmap}, - 7: {'codedValues': input_values}}) + message = _make_test_message( + { + 3: self._section_3, + 6: {"bitMapIndicator": 0, "bitmap": self.bitmap}, + 7: {"codedValues": input_values}, + } + ) result = as_concrete_data(message.data) - expected = ma.masked_array(output_values, - np.logical_not(self.bitmap)) + expected = ma.masked_array(output_values, np.logical_not(self.bitmap)) expected = expected.reshape(self.shape) self.assertMaskedArrayEqual(result, expected) @@ -98,56 +101,78 @@ def test_bitmap__shapes_mismatch(self): # Test the behaviour where bitmap and codedValues shapes do not match. # Too many or too few unmasked values in codedValues will cause this. values = np.arange(6) - message = _make_test_message({3: self._section_3, - 6: {'bitMapIndicator': 0, - 'bitmap': self.bitmap}, - 7: {'codedValues': values}}) - with self.assertRaisesRegex(TranslationError, 'do not match'): + message = _make_test_message( + { + 3: self._section_3, + 6: {"bitMapIndicator": 0, "bitmap": self.bitmap}, + 7: {"codedValues": values}, + } + ) + with self.assertRaisesRegex(TranslationError, "do not match"): as_concrete_data(message.data) def test_bitmap__invalid_indicator(self): values = np.arange(12) - message = _make_test_message({3: self._section_3, - 6: {'bitMapIndicator': 100, - 'bitmap': None}, - 7: {'codedValues': values}}) - with self.assertRaisesRegex(TranslationError, 'unsupported bitmap'): + message = _make_test_message( + { + 3: self._section_3, + 6: {"bitMapIndicator": 100, "bitmap": None}, + 7: {"codedValues": values}, + } + ) + with self.assertRaisesRegex(TranslationError, "unsupported bitmap"): as_concrete_data(message.data) class Test_data__unsupported(tests.IrisGribTest): def test_unsupported_grid_definition(self): - message = _make_test_message({3: {'sourceOfGridDefinition': 1}, - 6: SECTION_6_NO_BITMAP}) - with self.assertRaisesRegex(TranslationError, 'source'): + message = _make_test_message( + {3: {"sourceOfGridDefinition": 1}, 6: SECTION_6_NO_BITMAP} + ) + with self.assertRaisesRegex(TranslationError, "source"): message.data def test_unsupported_quasi_regular__number_of_octets(self): message = _make_test_message( - {3: {'sourceOfGridDefinition': 0, - 'numberOfOctectsForNumberOfPoints': 1, - 'gridDefinitionTemplateNumber': 0}, - 6: SECTION_6_NO_BITMAP}) - with self.assertRaisesRegex(TranslationError, 'quasi-regular'): + { + 3: { + "sourceOfGridDefinition": 0, + "numberOfOctectsForNumberOfPoints": 1, + "gridDefinitionTemplateNumber": 0, + }, + 6: SECTION_6_NO_BITMAP, + } + ) + with self.assertRaisesRegex(TranslationError, "quasi-regular"): message.data def test_unsupported_quasi_regular__interpretation(self): message = _make_test_message( - {3: {'sourceOfGridDefinition': 0, - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 1, - 'gridDefinitionTemplateNumber': 0}, - 6: SECTION_6_NO_BITMAP}) - with self.assertRaisesRegex(TranslationError, 'quasi-regular'): + { + 3: { + "sourceOfGridDefinition": 0, + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 1, + "gridDefinitionTemplateNumber": 0, + }, + 6: SECTION_6_NO_BITMAP, + } + ) + with self.assertRaisesRegex(TranslationError, "quasi-regular"): message.data def test_unsupported_template(self): message = _make_test_message( - {3: {'sourceOfGridDefinition': 0, - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 0, - 'gridDefinitionTemplateNumber': 2}}) - with self.assertRaisesRegex(TranslationError, 'template'): + { + 3: { + "sourceOfGridDefinition": 0, + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 0, + "gridDefinitionTemplateNumber": 2, + } + } + ) + with self.assertRaisesRegex(TranslationError, "template"): message.data @@ -159,23 +184,23 @@ def section_3(self, scanning_mode): raise NotImplementedError() def test_unsupported_scanning_mode(self): - message = _make_test_message( - {3: self.section_3(1), - 6: SECTION_6_NO_BITMAP}) - with self.assertRaisesRegex(TranslationError, 'scanning mode'): + message = _make_test_message({3: self.section_3(1), 6: SECTION_6_NO_BITMAP}) + with self.assertRaisesRegex(TranslationError, "scanning mode"): message.data def _test(self, scanning_mode): message = _make_test_message( - {3: self.section_3(scanning_mode), - 6: SECTION_6_NO_BITMAP, - 7: {'codedValues': np.arange(12)}}) + { + 3: self.section_3(scanning_mode), + 6: SECTION_6_NO_BITMAP, + 7: {"codedValues": np.arange(12)}, + } + ) data = message.data self.assertTrue(is_lazy_data(data)) self.assertEqual(data.shape, (3, 4)) self.assertEqual(data.dtype, np.floating) - self.assertArrayEqual(as_concrete_data(data), - np.arange(12).reshape(3, 4)) + self.assertArrayEqual(as_concrete_data(data), np.arange(12).reshape(3, 4)) def test_regular_mode_0(self): self._test(0) @@ -191,90 +216,88 @@ def test_regular_mode_64_128(self): def _example_section_3(grib_definition_template_number, scanning_mode): - return {'sourceOfGridDefinition': 0, - 'numberOfOctectsForNumberOfPoints': 0, - 'interpretationOfNumberOfPoints': 0, - 'gridDefinitionTemplateNumber': grib_definition_template_number, - 'scanningMode': scanning_mode, - 'Nj': 3, - 'Ni': 4} - - -class Test_data__grid_template_0(tests.IrisGribTest, - Mixin_data__grid_template): + return { + "sourceOfGridDefinition": 0, + "numberOfOctectsForNumberOfPoints": 0, + "interpretationOfNumberOfPoints": 0, + "gridDefinitionTemplateNumber": grib_definition_template_number, + "scanningMode": scanning_mode, + "Nj": 3, + "Ni": 4, + } + + +class Test_data__grid_template_0(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): return _example_section_3(0, scanning_mode) -class Test_data__grid_template_1(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_1(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): return _example_section_3(1, scanning_mode) -class Test_data__grid_template_5(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_5(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): return _example_section_3(5, scanning_mode) -class Test_data__grid_template_12(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_12(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): return _example_section_3(12, scanning_mode) -class Test_data__grid_template_30(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_30(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): section_3 = _example_section_3(30, scanning_mode) # Dimensions are 'Nx' + 'Ny' instead of 'Ni' + 'Nj'. - section_3['Nx'] = section_3['Ni'] - section_3['Ny'] = section_3['Nj'] - del section_3['Ni'] - del section_3['Nj'] + section_3["Nx"] = section_3["Ni"] + section_3["Ny"] = section_3["Nj"] + del section_3["Ni"] + del section_3["Nj"] return section_3 -class Test_data__grid_template_40_regular(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_40_regular( + tests.IrisGribTest, Mixin_data__grid_template +): def section_3(self, scanning_mode): return _example_section_3(40, scanning_mode) -class Test_data__grid_template_90(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_90(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): section_3 = _example_section_3(90, scanning_mode) # Exceptionally, dimensions are 'Nx' + 'Ny' instead of 'Ni' + 'Nj'. - section_3['Nx'] = section_3['Ni'] - section_3['Ny'] = section_3['Nj'] - del section_3['Ni'] - del section_3['Nj'] + section_3["Nx"] = section_3["Ni"] + section_3["Ny"] = section_3["Nj"] + del section_3["Ni"] + del section_3["Nj"] return section_3 -class Test_data__grid_template_140(tests.IrisGribTest, - Mixin_data__grid_template): +class Test_data__grid_template_140(tests.IrisGribTest, Mixin_data__grid_template): def section_3(self, scanning_mode): section_3 = _example_section_3(140, scanning_mode) - section_3['numberOfPointsAlongXAxis'] = section_3['Ni'] - section_3['numberOfPointsAlongYAxis'] = section_3['Nj'] - del section_3['Ni'] - del section_3['Nj'] + section_3["numberOfPointsAlongXAxis"] = section_3["Ni"] + section_3["numberOfPointsAlongYAxis"] = section_3["Nj"] + del section_3["Ni"] + del section_3["Nj"] return section_3 class Test_data__unknown_grid_template(tests.IrisGribTest): def test(self): message = _make_test_message( - {3: _example_section_3(999, 0), - 6: SECTION_6_NO_BITMAP, - 7: {'codedValues': np.arange(12)}}) - with self.assertRaisesRegex(TranslationError, - 'template 999 is not supported'): + { + 3: _example_section_3(999, 0), + 6: SECTION_6_NO_BITMAP, + 7: {"codedValues": np.arange(12)}, + } + ) + with self.assertRaisesRegex(TranslationError, "template 999 is not supported"): _ = message.data -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/message/test_Section.py b/iris_grib/tests/unit/message/test_Section.py index b2f842fa9..a1af5277a 100644 --- a/iris_grib/tests/unit/message/test_Section.py +++ b/iris_grib/tests/unit/message/test_Section.py @@ -20,49 +20,54 @@ @tests.skip_data class Test___getitem__(tests.IrisGribTest): def setUp(self): - filename = tests.get_data_path(('GRIB', 'uk_t', 'uk_t.grib2')) - with open(filename, 'rb') as grib_fh: + filename = tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2")) + with open(filename, "rb") as grib_fh: self.grib_id = eccodes.codes_new_from_file( grib_fh, eccodes.CODES_PRODUCT_GRIB ) def test_scalar(self): - section = Section(self.grib_id, None, ['Ni']) - self.assertEqual(section['Ni'], 47) + section = Section(self.grib_id, None, ["Ni"]) + self.assertEqual(section["Ni"], 47) def test_array(self): - section = Section(self.grib_id, None, ['codedValues']) - codedValues = section['codedValues'] + section = Section(self.grib_id, None, ["codedValues"]) + codedValues = section["codedValues"] self.assertEqual(codedValues.shape, (1551,)) - self.assertArrayAlmostEqual(codedValues[:3], - [-1.78140259, -1.53140259, -1.28140259]) + self.assertArrayAlmostEqual( + codedValues[:3], [-1.78140259, -1.53140259, -1.28140259] + ) def test_typeOfFirstFixedSurface(self): - section = Section(self.grib_id, None, ['typeOfFirstFixedSurface']) - self.assertEqual(section['typeOfFirstFixedSurface'], 100) + section = Section(self.grib_id, None, ["typeOfFirstFixedSurface"]) + self.assertEqual(section["typeOfFirstFixedSurface"], 100) def test_numberOfSection(self): n = 4 - section = Section(self.grib_id, n, ['numberOfSection']) - self.assertEqual(section['numberOfSection'], n) + section = Section(self.grib_id, n, ["numberOfSection"]) + self.assertEqual(section["numberOfSection"], n) def test_invalid(self): - section = Section(self.grib_id, None, ['Ni']) - with self.assertRaisesRegex(KeyError, 'Nii'): - section['Nii'] + section = Section(self.grib_id, None, ["Ni"]) + with self.assertRaisesRegex(KeyError, "Nii"): + section["Nii"] @tests.skip_data class Test__getitem___pdt_31(tests.IrisGribTest): def setUp(self): - filename = tests.get_data_path(('GRIB', 'umukv', 'ukv_chan9.grib2')) - with open(filename, 'rb') as grib_fh: + filename = tests.get_data_path(("GRIB", "umukv", "ukv_chan9.grib2")) + with open(filename, "rb") as grib_fh: self.grib_id = eccodes.codes_new_from_file( grib_fh, eccodes.CODES_PRODUCT_GRIB ) - self.keys = ['satelliteSeries', 'satelliteNumber', 'instrumentType', - 'scaleFactorOfCentralWaveNumber', - 'scaledValueOfCentralWaveNumber'] + self.keys = [ + "satelliteSeries", + "satelliteNumber", + "instrumentType", + "scaleFactorOfCentralWaveNumber", + "scaledValueOfCentralWaveNumber", + ] def test_array(self): section = Section(self.grib_id, None, self.keys) @@ -75,15 +80,15 @@ def test_array(self): @tests.skip_data class Test_get_computed_key(tests.IrisGribTest): def test_gdt40_computed(self): - fname = tests.get_data_path(('GRIB', 'gaussian', 'regular_gg.grib2')) - with open(fname, 'rb') as grib_fh: + fname = tests.get_data_path(("GRIB", "gaussian", "regular_gg.grib2")) + with open(fname, "rb") as grib_fh: self.grib_id = eccodes.codes_new_from_file( grib_fh, eccodes.CODES_PRODUCT_GRIB ) section = Section(self.grib_id, None, []) - latitudes = section.get_computed_key('latitudes') + latitudes = section.get_computed_key("latitudes") self.assertTrue(88.55 < latitudes[0] < 88.59) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/message/test__DataProxy.py b/iris_grib/tests/unit/message/test__DataProxy.py index 3b3c103b8..48fdc1fb0 100644 --- a/iris_grib/tests/unit/message/test__DataProxy.py +++ b/iris_grib/tests/unit/message/test__DataProxy.py @@ -20,24 +20,24 @@ class Test__bitmap(tests.IrisGribTest): def test_no_bitmap(self): - section_6 = {'bitMapIndicator': 255, 'bitmap': None} + section_6 = {"bitMapIndicator": 255, "bitmap": None} data_proxy = _DataProxy(0, 0, 0) result = data_proxy._bitmap(section_6) self.assertIsNone(result) def test_bitmap_present(self): bitmap = randint(2, size=(12)) - section_6 = {'bitMapIndicator': 0, 'bitmap': bitmap} + section_6 = {"bitMapIndicator": 0, "bitmap": bitmap} data_proxy = _DataProxy(0, 0, 0) result = data_proxy._bitmap(section_6) self.assertArrayEqual(bitmap, result) def test_bitmap__invalid_indicator(self): - section_6 = {'bitMapIndicator': 100, 'bitmap': None} + section_6 = {"bitMapIndicator": 100, "bitmap": None} data_proxy = _DataProxy(0, 0, 0) - with self.assertRaisesRegex(TranslationError, 'unsupported bitmap'): + with self.assertRaisesRegex(TranslationError, "unsupported bitmap"): data_proxy._bitmap(section_6) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/message/test__MessageLocation.py b/iris_grib/tests/unit/message/test__MessageLocation.py index 890a1a87d..8ff5d6eed 100644 --- a/iris_grib/tests/unit/message/test__MessageLocation.py +++ b/iris_grib/tests/unit/message/test__MessageLocation.py @@ -18,16 +18,16 @@ class Test(tests.IrisGribTest): def test(self): - message_location = _MessageLocation(mock.sentinel.filename, - mock.sentinel.location) - patch_target = 'iris_grib.message._RawGribMessage.from_file_offset' + message_location = _MessageLocation( + mock.sentinel.filename, mock.sentinel.location + ) + patch_target = "iris_grib.message._RawGribMessage.from_file_offset" expected = mock.sentinel.message with mock.patch(patch_target, return_value=expected) as rgm: result = message_location() - rgm.assert_called_once_with(mock.sentinel.filename, - mock.sentinel.location) + rgm.assert_called_once_with(mock.sentinel.filename, mock.sentinel.location) self.assertIs(result, expected) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/message/test__RawGribMessage.py b/iris_grib/tests/unit/message/test__RawGribMessage.py index 9f35ba0f2..f8b90ea27 100644 --- a/iris_grib/tests/unit/message/test__RawGribMessage.py +++ b/iris_grib/tests/unit/message/test__RawGribMessage.py @@ -19,11 +19,9 @@ @tests.skip_data class Test(tests.IrisGribTest): def setUp(self): - filename = tests.get_data_path(('GRIB', 'uk_t', 'uk_t.grib2')) - with open(filename, 'rb') as grib_fh: - grib_id = eccodes.codes_new_from_file( - grib_fh, eccodes.CODES_PRODUCT_GRIB - ) + filename = tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2")) + with open(filename, "rb") as grib_fh: + grib_id = eccodes.codes_new_from_file(grib_fh, eccodes.CODES_PRODUCT_GRIB) self.message = _RawGribMessage(grib_id) def test_sections__set(self): @@ -32,7 +30,7 @@ def test_sections__set(self): self.assertNotEqual(self.message._sections, None) def test_sections__indexing(self): - res = self.message.sections[3]['scanningMode'] + res = self.message.sections[3]["scanningMode"] expected = 64 self.assertEqual(expected, res) @@ -46,9 +44,9 @@ def test_sections__numberOfSection_value(self): # This tests that the `_RawGribMessage._get_message_sections` # override is functioning. section_number = 4 - res = self.message.sections[section_number]['numberOfSection'] + res = self.message.sections[section_number]["numberOfSection"] self.assertEqual(res, section_number) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/save_rules/__init__.py b/iris_grib/tests/unit/save_rules/__init__.py index 7ccfb2acc..e64ef6fc2 100644 --- a/iris_grib/tests/unit/save_rules/__init__.py +++ b/iris_grib/tests/unit/save_rules/__init__.py @@ -19,11 +19,12 @@ class GdtTestMixin: """Some handy common test capabilities for grib grid-definition tests.""" - TARGET_MODULE = 'iris_grib._save_rules' + + TARGET_MODULE = "iris_grib._save_rules" def setUp(self): # Patch the ecCodes of the tested module. - self.mock_eccodes = self.patch(self.TARGET_MODULE + '.eccodes') + self.mock_eccodes = self.patch(self.TARGET_MODULE + ".eccodes") # Fix the mock ecCodes to record key assignments. def codes_set_trap(grib, name, value): @@ -53,8 +54,9 @@ def _default_x_points(self): def _default_y_points(self): return [7.0, 8.0] # N.B. is_regular will *fail* on length-1 coords. - def _make_test_cube(self, cs=None, x_points=None, y_points=None, - coord_units='degrees'): + def _make_test_cube( + self, cs=None, x_points=None, y_points=None, coord_units="degrees" + ): # Create a cube with given properties, or minimal defaults. if cs is None: cs = self._default_coord_system() @@ -63,10 +65,12 @@ def _make_test_cube(self, cs=None, x_points=None, y_points=None, if y_points is None: y_points = self._default_y_points() - x_coord = DimCoord(x_points, long_name='longitude', - units=coord_units, coord_system=cs) - y_coord = DimCoord(y_points, long_name='latitude', - units=coord_units, coord_system=cs) + x_coord = DimCoord( + x_points, long_name="longitude", units=coord_units, coord_system=cs + ) + y_coord = DimCoord( + y_points, long_name="latitude", units=coord_units, coord_system=cs + ) test_cube = Cube(np.zeros((len(y_points), len(x_points)))) test_cube.add_dim_coord(y_coord, 0) test_cube.add_dim_coord(x_coord, 1) @@ -77,7 +81,6 @@ def _check_key(self, name, value): msg_fmt = 'Expected grib setting "{}" = {}, got {}' found = self.mock_grib.keys.get(name) if found is None: - self.assertEqual(0, 1, msg_fmt.format(name, value, '((UNSET))')) + self.assertEqual(0, 1, msg_fmt.format(name, value, "((UNSET))")) else: - self.assertArrayEqual(found, value, - msg_fmt.format(name, value, found)) + self.assertArrayEqual(found, value, msg_fmt.format(name, value, found)) diff --git a/iris_grib/tests/unit/save_rules/test__missing_forecast_period.py b/iris_grib/tests/unit/save_rules/test__missing_forecast_period.py index 6e427f00d..ffbc99863 100644 --- a/iris_grib/tests/unit/save_rules/test__missing_forecast_period.py +++ b/iris_grib/tests/unit/save_rules/test__missing_forecast_period.py @@ -19,7 +19,7 @@ class TestNoForecastReferenceTime(tests.IrisGribTest): def test_no_bounds(self): - t_coord = DimCoord(15, 'time', units='hours since epoch') + t_coord = DimCoord(15, "time", units="hours since epoch") cube = Cube(23) cube.add_aux_coord(t_coord) @@ -28,15 +28,11 @@ def test_no_bounds(self): expected_rt_type = 3 expected_fp = 0 expected_fp_type = 1 - expected = (expected_rt, - expected_rt_type, - expected_fp, - expected_fp_type) + expected = (expected_rt, expected_rt_type, expected_fp, expected_fp_type) self.assertEqual(res, expected) def test_with_bounds(self): - t_coord = DimCoord(15, 'time', bounds=[14, 16], - units='hours since epoch') + t_coord = DimCoord(15, "time", bounds=[14, 16], units="hours since epoch") cube = Cube(23) cube.add_aux_coord(t_coord) @@ -45,18 +41,14 @@ def test_with_bounds(self): expected_rt_type = 3 expected_fp = 0 expected_fp_type = 1 - expected = (expected_rt, - expected_rt_type, - expected_fp, - expected_fp_type) + expected = (expected_rt, expected_rt_type, expected_fp, expected_fp_type) self.assertEqual(res, expected) class TestWithForecastReferenceTime(tests.IrisGribTest): def test_no_bounds(self): - t_coord = DimCoord(3, 'time', units='days since epoch') - frt_coord = DimCoord(8, 'forecast_reference_time', - units='hours since epoch') + t_coord = DimCoord(3, "time", units="days since epoch") + frt_coord = DimCoord(8, "forecast_reference_time", units="hours since epoch") cube = Cube(23) cube.add_aux_coord(t_coord) cube.add_aux_coord(frt_coord) @@ -66,16 +58,12 @@ def test_no_bounds(self): expected_rt_type = 1 expected_fp = 3 * 24 - 8 expected_fp_type = 1 - expected = (expected_rt, - expected_rt_type, - expected_fp, - expected_fp_type) + expected = (expected_rt, expected_rt_type, expected_fp, expected_fp_type) self.assertEqual(res, expected) def test_with_bounds(self): - t_coord = DimCoord(3, 'time', bounds=[2, 4], units='days since epoch') - frt_coord = DimCoord(8, 'forecast_reference_time', - units='hours since epoch') + t_coord = DimCoord(3, "time", bounds=[2, 4], units="days since epoch") + frt_coord = DimCoord(8, "forecast_reference_time", units="hours since epoch") cube = Cube(23) cube.add_aux_coord(t_coord) cube.add_aux_coord(frt_coord) @@ -85,10 +73,7 @@ def test_with_bounds(self): expected_rt_type = 1 expected_fp = 2 * 24 - 8 expected_fp_type = 1 - expected = (expected_rt, - expected_rt_type, - expected_fp, - expected_fp_type) + expected = (expected_rt, expected_rt_type, expected_fp, expected_fp_type) self.assertEqual(res, expected) diff --git a/iris_grib/tests/unit/save_rules/test__non_missing_forecast_period.py b/iris_grib/tests/unit/save_rules/test__non_missing_forecast_period.py index 2595526b4..40f6184e3 100644 --- a/iris_grib/tests/unit/save_rules/test__non_missing_forecast_period.py +++ b/iris_grib/tests/unit/save_rules/test__non_missing_forecast_period.py @@ -15,10 +15,12 @@ class Test(tests.IrisGribTest): def _cube(self, t_bounds=False): - time_coord = iris.coords.DimCoord(15, standard_name='time', - units='hours since epoch') - fp_coord = iris.coords.DimCoord(10, standard_name='forecast_period', - units='hours') + time_coord = iris.coords.DimCoord( + 15, standard_name="time", units="hours since epoch" + ) + fp_coord = iris.coords.DimCoord( + 10, standard_name="forecast_period", units="hours" + ) if t_bounds: time_coord.bounds = [[8, 100]] fp_coord.bounds = [[3, 95]] @@ -39,7 +41,7 @@ def test_time_bounds(self): def test_time_bounds_in_minutes(self): cube = self._cube(t_bounds=True) - cube.coord('forecast_period').convert_units('minutes') + cube.coord("forecast_period").convert_units("minutes") rt, rt_meaning, fp, fp_meaning = _non_missing_forecast_period(cube) self.assertEqual((rt_meaning, fp, fp_meaning), (1, 180, 0)) diff --git a/iris_grib/tests/unit/save_rules/test__product_definition_template_8_10_and_11.py b/iris_grib/tests/unit/save_rules/test__product_definition_template_8_10_and_11.py index 82a3c7f9e..fd6b0ba4e 100644 --- a/iris_grib/tests/unit/save_rules/test__product_definition_template_8_10_and_11.py +++ b/iris_grib/tests/unit/save_rules/test__product_definition_template_8_10_and_11.py @@ -27,49 +27,51 @@ class TestTypeOfStatisticalProcessing(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - coord = DimCoord(23, 'time', bounds=[0, 100], - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("air_temperature") + coord = DimCoord( + 23, + "time", + bounds=[0, 100], + units=Unit("days since epoch", calendar="standard"), + ) self.cube.add_aux_coord(coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_sum(self, mock_set): cube = self.cube - cell_method = CellMethod(method='sum', coords=['time']) + cell_method = CellMethod(method="sum", coords=["time"]) cube.add_cell_method(cell_method) _product_definition_template_8_10_and_11(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "typeOfStatisticalProcessing", 1) + mock_set.assert_any_call(mock.sentinel.grib, "typeOfStatisticalProcessing", 1) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_unrecognised(self, mock_set): cube = self.cube - cell_method = CellMethod(method='95th percentile', coords=['time']) + cell_method = CellMethod(method="95th percentile", coords=["time"]) cube.add_cell_method(cell_method) _product_definition_template_8_10_and_11(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "typeOfStatisticalProcessing", 255) + mock_set.assert_any_call(mock.sentinel.grib, "typeOfStatisticalProcessing", 255) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_multiple_cell_method_coords(self, mock_set): cube = self.cube - cell_method = CellMethod(method='sum', - coords=['time', 'forecast_period']) + cell_method = CellMethod(method="sum", coords=["time", "forecast_period"]) cube.add_cell_method(cell_method) - with self.assertRaisesRegex(ValueError, - 'Cannot handle multiple coordinate name'): + with self.assertRaisesRegex( + ValueError, "Cannot handle multiple coordinate name" + ): _product_definition_template_8_10_and_11(cube, mock.sentinel.grib) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_cell_method_coord_name_fail(self, mock_set): cube = self.cube - cell_method = CellMethod(method='mean', coords=['season']) + cell_method = CellMethod(method="mean", coords=["season"]) cube.add_cell_method(cell_method) with self.assertRaisesRegex( - ValueError, "Expected a cell method with a coordinate " - "name of 'time'"): + ValueError, "Expected a cell method with a coordinate " "name of 'time'" + ): _product_definition_template_8_10_and_11(cube, mock.sentinel.grib) @@ -77,119 +79,115 @@ class TestTimeCoordPrerequisites(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') + self.cube.rename("air_temperature") - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_multiple_points(self, mock_set): # Add time coord with multiple points. - coord = DimCoord([23, 24, 25], 'time', - bounds=[[22, 23], [23, 24], [24, 25]], - units=Unit('days since epoch', calendar='standard')) + coord = DimCoord( + [23, 24, 25], + "time", + bounds=[[22, 23], [23, 24], [24, 25]], + units=Unit("days since epoch", calendar="standard"), + ) self.cube.add_aux_coord(coord, 0) - with self.assertRaisesRegex( - ValueError, 'Expected length one time coordinate'): - _product_definition_template_8_10_and_11(self.cube, - mock.sentinel.grib) + with self.assertRaisesRegex(ValueError, "Expected length one time coordinate"): + _product_definition_template_8_10_and_11(self.cube, mock.sentinel.grib) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_no_bounds(self, mock_set): # Add time coord with no bounds. - coord = DimCoord(23, 'time', - units=Unit('days since epoch', calendar='standard')) + coord = DimCoord( + 23, "time", units=Unit("days since epoch", calendar="standard") + ) self.cube.add_aux_coord(coord) with self.assertRaisesRegex( - ValueError, 'Expected time coordinate with two bounds, ' - 'got 0 bounds'): - _product_definition_template_8_10_and_11(self.cube, - mock.sentinel.grib) + ValueError, "Expected time coordinate with two bounds, " "got 0 bounds" + ): + _product_definition_template_8_10_and_11(self.cube, mock.sentinel.grib) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_more_than_two_bounds(self, mock_set): # Add time coord with more than two bounds. - coord = DimCoord(23, 'time', bounds=[21, 22, 23], - units=Unit('days since epoch', calendar='standard')) + coord = DimCoord( + 23, + "time", + bounds=[21, 22, 23], + units=Unit("days since epoch", calendar="standard"), + ) self.cube.add_aux_coord(coord) with self.assertRaisesRegex( - ValueError, 'Expected time coordinate with two bounds, ' - 'got 3 bounds'): - _product_definition_template_8_10_and_11(self.cube, - mock.sentinel.grib) + ValueError, "Expected time coordinate with two bounds, " "got 3 bounds" + ): + _product_definition_template_8_10_and_11(self.cube, mock.sentinel.grib) class TestEndOfOverallTimeInterval(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - cell_method = CellMethod(method='sum', coords=['time']) + self.cube.rename("air_temperature") + cell_method = CellMethod(method="sum", coords=["time"]) self.cube.add_cell_method(cell_method) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_default_calendar(self, mock_set): cube = self.cube # End bound is 1972-04-26 10:27:07. - coord = DimCoord(23.0, 'time', bounds=[0.452, 20314.452], - units=Unit('hours since epoch')) + coord = DimCoord( + 23.0, "time", bounds=[0.452, 20314.452], units=Unit("hours since epoch") + ) cube.add_aux_coord(coord) grib = mock.sentinel.grib _product_definition_template_8_10_and_11(cube, grib) - mock_set.assert_any_call( - grib, "yearOfEndOfOverallTimeInterval", 1972) - mock_set.assert_any_call( - grib, "monthOfEndOfOverallTimeInterval", 4) - mock_set.assert_any_call( - grib, "dayOfEndOfOverallTimeInterval", 26) - mock_set.assert_any_call( - grib, "hourOfEndOfOverallTimeInterval", 10) - mock_set.assert_any_call( - grib, "minuteOfEndOfOverallTimeInterval", 27) - mock_set.assert_any_call( - grib, "secondOfEndOfOverallTimeInterval", 7) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call(grib, "yearOfEndOfOverallTimeInterval", 1972) + mock_set.assert_any_call(grib, "monthOfEndOfOverallTimeInterval", 4) + mock_set.assert_any_call(grib, "dayOfEndOfOverallTimeInterval", 26) + mock_set.assert_any_call(grib, "hourOfEndOfOverallTimeInterval", 10) + mock_set.assert_any_call(grib, "minuteOfEndOfOverallTimeInterval", 27) + mock_set.assert_any_call(grib, "secondOfEndOfOverallTimeInterval", 7) + + @mock.patch.object(eccodes, "codes_set") def test_360_day_calendar(self, mock_set): cube = self.cube # End bound is 1972-05-07 10:27:07 - coord = DimCoord(23.0, 'time', bounds=[0.452, 20314.452], - units=Unit('hours since epoch', calendar='360_day')) + coord = DimCoord( + 23.0, + "time", + bounds=[0.452, 20314.452], + units=Unit("hours since epoch", calendar="360_day"), + ) cube.add_aux_coord(coord) grib = mock.sentinel.grib _product_definition_template_8_10_and_11(cube, grib) - mock_set.assert_any_call( - grib, "yearOfEndOfOverallTimeInterval", 1972) - mock_set.assert_any_call( - grib, "monthOfEndOfOverallTimeInterval", 5) - mock_set.assert_any_call( - grib, "dayOfEndOfOverallTimeInterval", 7) - mock_set.assert_any_call( - grib, "hourOfEndOfOverallTimeInterval", 10) - mock_set.assert_any_call( - grib, "minuteOfEndOfOverallTimeInterval", 27) - mock_set.assert_any_call( - grib, "secondOfEndOfOverallTimeInterval", 7) + mock_set.assert_any_call(grib, "yearOfEndOfOverallTimeInterval", 1972) + mock_set.assert_any_call(grib, "monthOfEndOfOverallTimeInterval", 5) + mock_set.assert_any_call(grib, "dayOfEndOfOverallTimeInterval", 7) + mock_set.assert_any_call(grib, "hourOfEndOfOverallTimeInterval", 10) + mock_set.assert_any_call(grib, "minuteOfEndOfOverallTimeInterval", 27) + mock_set.assert_any_call(grib, "secondOfEndOfOverallTimeInterval", 7) class TestNumberOfTimeRange(tests.IrisGribTest): - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_other_cell_methods(self, mock_set): cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - cube.rename('air_temperature') - coord = DimCoord(23, 'time', bounds=[0, 24], - units=Unit('hours since epoch')) + cube.rename("air_temperature") + coord = DimCoord(23, "time", bounds=[0, 24], units=Unit("hours since epoch")) cube.add_aux_coord(coord) # Add one time cell method and another unrelated one. - cell_method = CellMethod(method='mean', coords=['elephants']) + cell_method = CellMethod(method="mean", coords=["elephants"]) cube.add_cell_method(cell_method) - cell_method = CellMethod(method='sum', coords=['time']) + cell_method = CellMethod(method="sum", coords=["time"]) cube.add_cell_method(cell_method) _product_definition_template_8_10_and_11(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, 'numberOfTimeRange', 1) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfTimeRange", 1) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_data_section.py b/iris_grib/tests/unit/save_rules/test_data_section.py index 22add2e6d..81f073e5b 100644 --- a/iris_grib/tests/unit/save_rules/test_data_section.py +++ b/iris_grib/tests/unit/save_rules/test_data_section.py @@ -20,7 +20,7 @@ from iris_grib._save_rules import data_section -GRIB_API = 'iris_grib._save_rules.eccodes' +GRIB_API = "iris_grib._save_rules.eccodes" GRIB_MESSAGE = mock.sentinel.GRIB_MESSAGE @@ -28,42 +28,42 @@ class TestMDI(tests.IrisGribTest): def assertBitmapOff(self, grib_api): # Check the use of a mask has been turned off via: # eccodes.codes_set(grib_message, 'bitmapPresent', 0) - grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, - 'bitmapPresent', 0) + grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, "bitmapPresent", 0) def assertBitmapOn(self, grib_api, fill_value): # Check the use of a mask has been turned on via: # eccodes.codes_set(grib_message, 'bitmapPresent', 1) # eccodes.codes_set_double(grib_message, 'missingValue', fill_value) - grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, - 'bitmapPresent', 1) - grib_api.codes_set_double.assert_called_once_with(GRIB_MESSAGE, - 'missingValue', - fill_value) + grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, "bitmapPresent", 1) + grib_api.codes_set_double.assert_called_once_with( + GRIB_MESSAGE, "missingValue", fill_value + ) def assertBitmapRange(self, grib_api, min_data, max_data): # Check the use of a mask has been turned on via: # eccodes.codes_set(grib_message, 'bitmapPresent', 1) # eccodes.codes_set_double(grib_message, 'missingValue', ...) # and that a suitable fill value has been chosen. - grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, - 'bitmapPresent', 1) - args, = grib_api.codes_set_double.call_args_list + grib_api.codes_set.assert_called_once_with(GRIB_MESSAGE, "bitmapPresent", 1) + (args,) = grib_api.codes_set_double.call_args_list (message, key, fill_value), kwargs = args self.assertIs(message, GRIB_MESSAGE) - self.assertEqual(key, 'missingValue') - self.assertTrue(fill_value < min_data or fill_value > max_data, - 'Fill value {} is not outside data range ' - '{} to {}.'.format(fill_value, min_data, max_data)) + self.assertEqual(key, "missingValue") + self.assertTrue( + fill_value < min_data or fill_value > max_data, + "Fill value {} is not outside data range " "{} to {}.".format( + fill_value, min_data, max_data + ), + ) return fill_value def assertValues(self, grib_api, values): # Check the correct data values have been set via: # eccodes.codes_set_double_array(grib_message, 'values', ...) - args, = grib_api.codes_set_double_array.call_args_list + (args,) = grib_api.codes_set_double_array.call_args_list (message, key, values), kwargs = args self.assertIs(message, GRIB_MESSAGE) - self.assertEqual(key, 'values') + self.assertEqual(key, "values") self.assertArrayEqual(values, values) self.assertEqual(kwargs, {}) @@ -79,9 +79,11 @@ def test_simple(self): self.assertValues(grib_api, np.arange(5)) def test_masked_with_finite_fill_value(self): - cube = iris.cube.Cube(np.ma.MaskedArray([1.0, 2.0, 3.0, 1.0, 2.0, 3.0], - mask=[0, 0, 0, 1, 1, 1], - fill_value=2000)) + cube = iris.cube.Cube( + np.ma.MaskedArray( + [1.0, 2.0, 3.0, 1.0, 2.0, 3.0], mask=[0, 0, 0, 1, 1, 1], fill_value=2000 + ) + ) grib_message = mock.sentinel.GRIB_MESSAGE with mock.patch(GRIB_API) as grib_api: data_section(cube, grib_message) @@ -92,9 +94,13 @@ def test_masked_with_finite_fill_value(self): self.assertValues(grib_api, [1, 2, 3, FILL, FILL, FILL]) def test_masked_with_nan_fill_value(self): - cube = iris.cube.Cube(np.ma.MaskedArray([1.0, 2.0, 3.0, 1.0, 2.0, 3.0], - mask=[0, 0, 0, 1, 1, 1], - fill_value=np.nan)) + cube = iris.cube.Cube( + np.ma.MaskedArray( + [1.0, 2.0, 3.0, 1.0, 2.0, 3.0], + mask=[0, 0, 0, 1, 1, 1], + fill_value=np.nan, + ) + ) grib_message = mock.sentinel.GRIB_MESSAGE with mock.patch(GRIB_API) as grib_api: data_section(cube, grib_message) @@ -107,8 +113,9 @@ def test_masked_with_nan_fill_value(self): def test_scaled(self): # If the Cube's units don't match the units required by GRIB # ensure the data values are scaled correctly. - cube = iris.cube.Cube(np.arange(5), - standard_name='geopotential_height', units='km') + cube = iris.cube.Cube( + np.arange(5), standard_name="geopotential_height", units="km" + ) grib_message = mock.sentinel.GRIB_MESSAGE with mock.patch(GRIB_API) as grib_api: data_section(cube, grib_message) @@ -120,10 +127,13 @@ def test_scaled(self): def test_scaled_with_finite_fill_value(self): # When re-scaling masked data with a finite fill value, ensure # the fill value and any filled values are also re-scaled. - cube = iris.cube.Cube(np.ma.MaskedArray([1.0, 2.0, 3.0, 1.0, 2.0, 3.0], - mask=[0, 0, 0, 1, 1, 1], - fill_value=2000), - standard_name='geopotential_height', units='km') + cube = iris.cube.Cube( + np.ma.MaskedArray( + [1.0, 2.0, 3.0, 1.0, 2.0, 3.0], mask=[0, 0, 0, 1, 1, 1], fill_value=2000 + ), + standard_name="geopotential_height", + units="km", + ) grib_message = mock.sentinel.GRIB_MESSAGE with mock.patch(GRIB_API) as grib_api: data_section(cube, grib_message) @@ -137,10 +147,13 @@ def test_scaled_with_nan_fill_value(self): # When re-scaling masked data with a NaN fill value, ensure # a fill value is chosen which allows for the scaling, and any # filled values match the chosen fill value. - cube = iris.cube.Cube(np.ma.MaskedArray([-1.0, 2.0, -1.0, 2.0], - mask=[0, 0, 1, 1], - fill_value=np.nan), - standard_name='geopotential_height', units='km') + cube = iris.cube.Cube( + np.ma.MaskedArray( + [-1.0, 2.0, -1.0, 2.0], mask=[0, 0, 1, 1], fill_value=np.nan + ), + standard_name="geopotential_height", + units="km", + ) grib_message = mock.sentinel.GRIB_MESSAGE with mock.patch(GRIB_API) as grib_api: data_section(cube, grib_message) @@ -157,8 +170,7 @@ class TestNonDoubleData(tests.IrisGribTest): # fault. def check(self, dtype): data = np.random.random(1920 * 2560).astype(dtype) - cube = iris.cube.Cube(data, - standard_name='geopotential_height', units='km') + cube = iris.cube.Cube(data, standard_name="geopotential_height", units="km") grib_message = eccodes.codes_grib_new_from_samples("GRIB2") data_section(cube, grib_message) eccodes.codes_release(grib_message) diff --git a/iris_grib/tests/unit/save_rules/test_fixup_float32_as_int32.py b/iris_grib/tests/unit/save_rules/test_fixup_float32_as_int32.py index 3354a6b5d..5e576e04c 100644 --- a/iris_grib/tests/unit/save_rules/test_fixup_float32_as_int32.py +++ b/iris_grib/tests/unit/save_rules/test_fixup_float32_as_int32.py @@ -44,5 +44,5 @@ def test_high_bit_set_2(self): self.assertEqual(result, -2) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/save_rules/test_fixup_int32_as_uint32.py b/iris_grib/tests/unit/save_rules/test_fixup_int32_as_uint32.py index a9928eee8..cc8e87c40 100644 --- a/iris_grib/tests/unit/save_rules/test_fixup_int32_as_uint32.py +++ b/iris_grib/tests/unit/save_rules/test_fixup_int32_as_uint32.py @@ -36,5 +36,5 @@ def test_very_positive(self): fixup_int32_as_uint32(0x80000000) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_0.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_0.py index 83075d874..1c2381837 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_0.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_0.py @@ -25,37 +25,34 @@ def setUp(self): def test__template_number(self): grid_definition_template_0(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 0) + self._check_key("gridDefinitionTemplateNumber", 0) def test__shape_of_earth_spherical(self): cs = GeogCS(semi_major_axis=1.23) test_cube = self._make_test_cube(cs=cs) grid_definition_template_0(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 1) - self._check_key('scaleFactorOfRadiusOfSphericalEarth', 0) - self._check_key('scaledValueOfRadiusOfSphericalEarth', 1.23) + self._check_key("shapeOfTheEarth", 1) + self._check_key("scaleFactorOfRadiusOfSphericalEarth", 0) + self._check_key("scaledValueOfRadiusOfSphericalEarth", 1.23) def test__shape_of_earth_flattened(self): - cs = GeogCS(semi_major_axis=1.456, - semi_minor_axis=1.123) + cs = GeogCS(semi_major_axis=1.456, semi_minor_axis=1.123) test_cube = self._make_test_cube(cs=cs) grid_definition_template_0(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 1.456) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 1.123) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 1.456) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 1.123) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_0(test_cube, self.mock_grib) - self._check_key('Ni', 13) - self._check_key('Nj', 6) + self._check_key("Ni", 13) + self._check_key("Nj", 6) def test__grid_points(self): - test_cube = self._make_test_cube( - x_points=[1, 3, 5, 7], y_points=[4, 9]) + test_cube = self._make_test_cube(x_points=[1, 3, 5, 7], y_points=[4, 9]) grid_definition_template_0(test_cube, self.mock_grib) self._check_key("longitudeOfFirstGridPoint", 1000000) self._check_key("longitudeOfLastGridPoint", 7000000) @@ -66,14 +63,14 @@ def test__grid_points(self): def test__scanmode(self): grid_definition_template_0(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7, 0, -1)) grid_definition_template_0(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_1.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_1.py index 53d1ce894..ee0bb6f59 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_1.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_1.py @@ -28,49 +28,53 @@ def setUp(self): def _default_coord_system(self): # Define an alternate, rotated coordinate system to test. - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=self.default_ellipsoid, + ) return cs def test__template_number(self): grid_definition_template_1(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 1) + self._check_key("gridDefinitionTemplateNumber", 1) def test__shape_of_earth_spherical(self): ellipsoid = GeogCS(1.23) - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_1(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 1) - self._check_key('scaleFactorOfRadiusOfSphericalEarth', 0) - self._check_key('scaledValueOfRadiusOfSphericalEarth', 1.23) + self._check_key("shapeOfTheEarth", 1) + self._check_key("scaleFactorOfRadiusOfSphericalEarth", 0) + self._check_key("scaledValueOfRadiusOfSphericalEarth", 1.23) def test__shape_of_earth_flattened(self): ellipsoid = GeogCS(semi_major_axis=1.456, semi_minor_axis=1.123) - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_1(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 1.456) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 1.123) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 1.456) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 1.123) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_1(test_cube, self.mock_grib) - self._check_key('Ni', 13) - self._check_key('Nj', 6) + self._check_key("Ni", 13) + self._check_key("Nj", 6) def test__grid_points(self): - test_cube = self._make_test_cube(x_points=[1, 3, 5, 7], - y_points=[4, 9]) + test_cube = self._make_test_cube(x_points=[1, 3, 5, 7], y_points=[4, 9]) grid_definition_template_1(test_cube, self.mock_grib) self._check_key("longitudeOfFirstGridPoint", 1000000) self._check_key("longitudeOfLastGridPoint", 7000000) @@ -81,19 +85,21 @@ def test__grid_points(self): def test__scanmode(self): grid_definition_template_1(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7, 0, -1)) grid_definition_template_1(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) def test__rotated_pole(self): - cs = RotatedGeogCS(grid_north_pole_latitude=75.3, - grid_north_pole_longitude=54.321, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=75.3, + grid_north_pole_longitude=54.321, + ellipsoid=self.default_ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_1(test_cube, self.mock_grib) self._check_key("latitudeOfSouthernPole", -75300000) @@ -101,14 +107,16 @@ def test__rotated_pole(self): self._check_key("angleOfRotation", 0) def test__fail_rotated_pole_nonstandard_meridian(self): - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - north_pole_grid_longitude=22.5, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + north_pole_grid_longitude=22.5, + ellipsoid=self.default_ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) with self.assertRaisesRegex( - TranslationError, - 'not yet support .* rotated prime meridian.'): + TranslationError, "not yet support .* rotated prime meridian." + ): grid_definition_template_1(test_cube, self.mock_grib) diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_10.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_10.py index d83e420df..521f6001d 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_10.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_10.py @@ -22,41 +22,42 @@ class Test(tests.IrisGribTest, GdtTestMixin): def setUp(self): self.default_ellipsoid = GeogCS(semi_major_axis=6371200.0) - self.mercator_test_cube = self._make_test_cube(coord_units='m') + self.mercator_test_cube = self._make_test_cube(coord_units="m") GdtTestMixin.setUp(self) def _default_coord_system(self): - return Mercator(standard_parallel=14., - ellipsoid=self.default_ellipsoid) + return Mercator(standard_parallel=14.0, ellipsoid=self.default_ellipsoid) def test__template_number(self): grid_definition_template_10(self.mercator_test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 10) + self._check_key("gridDefinitionTemplateNumber", 10) def test__shape_of_earth(self): grid_definition_template_10(self.mercator_test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 1) - self._check_key('scaleFactorOfRadiusOfSphericalEarth', 0) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 0) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 0) + self._check_key("shapeOfTheEarth", 1) + self._check_key("scaleFactorOfRadiusOfSphericalEarth", 0) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 0) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 0) def test__grid_shape(self): n_x_points = 13 n_y_points = 6 - test_cube = self._make_test_cube(x_points=np.arange(n_x_points), - y_points=np.arange(n_y_points), - coord_units='m') + test_cube = self._make_test_cube( + x_points=np.arange(n_x_points), + y_points=np.arange(n_y_points), + coord_units="m", + ) grid_definition_template_10(test_cube, self.mock_grib) - self._check_key('Ni', n_x_points) - self._check_key('Nj', n_y_points) + self._check_key("Ni", n_x_points) + self._check_key("Nj", n_y_points) def test__grid_points(self): - test_cube = self._make_test_cube(x_points=[1e6, 3e6, 5e6, 7e6], - y_points=[4e6, 9e6], - coord_units='m') + test_cube = self._make_test_cube( + x_points=[1e6, 3e6, 5e6, 7e6], y_points=[4e6, 9e6], coord_units="m" + ) grid_definition_template_10(test_cube, self.mock_grib) self._check_key("latitudeOfFirstGridPoint", 34727738) self._check_key("longitudeOfFirstGridPoint", 9268240) @@ -71,15 +72,16 @@ def test__template_specifics(self): def test__scanmode(self): grid_definition_template_10(self.mercator_test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): - test_cube = self._make_test_cube(x_points=np.arange(7e6, 0, -1e6), - coord_units='m') + test_cube = self._make_test_cube( + x_points=np.arange(7e6, 0, -1e6), coord_units="m" + ) grid_definition_template_10(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_12.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_12.py index caad0a99f..ad42bff60 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_12.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_12.py @@ -26,8 +26,9 @@ class FakeGribError(Exception): class Test(tests.IrisGribTest, GdtTestMixin): def setUp(self): - self.default_ellipsoid = GeogCS(semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + self.default_ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) self.test_cube = self._make_test_cube() GdtTestMixin.setUp(self) @@ -41,10 +42,12 @@ def _make_test_cube(self, cs=None, x_points=None, y_points=None): if y_points is None: y_points = self._default_y_points() - x_coord = iris.coords.DimCoord(x_points, 'projection_x_coordinate', - units='m', coord_system=cs) - y_coord = iris.coords.DimCoord(y_points, 'projection_y_coordinate', - units='m', coord_system=cs) + x_coord = iris.coords.DimCoord( + x_points, "projection_x_coordinate", units="m", coord_system=cs + ) + y_coord = iris.coords.DimCoord( + y_points, "projection_y_coordinate", units="m", coord_system=cs + ) test_cube = iris.cube.Cube(np.zeros((len(y_points), len(x_points)))) test_cube.add_dim_coord(y_coord, 0) test_cube.add_dim_coord(x_coord, 1) @@ -52,36 +55,36 @@ def _make_test_cube(self, cs=None, x_points=None, y_points=None): def _default_coord_system(self): # This defines an OSGB coord system. - cs = TransverseMercator(latitude_of_projection_origin=49.0, - longitude_of_central_meridian=-2.0, - false_easting=400000.0, - false_northing=-100000.0, - scale_factor_at_central_meridian=0.9996012717, - ellipsoid=self.default_ellipsoid) + cs = TransverseMercator( + latitude_of_projection_origin=49.0, + longitude_of_central_meridian=-2.0, + false_easting=400000.0, + false_northing=-100000.0, + scale_factor_at_central_meridian=0.9996012717, + ellipsoid=self.default_ellipsoid, + ) return cs def test__template_number(self): grid_definition_template_12(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 12) + self._check_key("gridDefinitionTemplateNumber", 12) def test__shape_of_earth(self): grid_definition_template_12(self.test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 6377563.396) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 6356256.909) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 6377563.396) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 6356256.909) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_12(test_cube, self.mock_grib) - self._check_key('Ni', 13) - self._check_key('Nj', 6) + self._check_key("Ni", 13) + self._check_key("Nj", 6) def test__grid_points_exact(self): - test_cube = self._make_test_cube(x_points=[1, 3, 5, 7], - y_points=[4, 9]) + test_cube = self._make_test_cube(x_points=[1, 3, 5, 7], y_points=[4, 9]) grid_definition_template_12(test_cube, self.mock_grib) self._check_key("X1", 100) self._check_key("X2", 700) @@ -91,8 +94,9 @@ def test__grid_points_exact(self): self._check_key("Dj", 500) def test__grid_points_approx(self): - test_cube = self._make_test_cube(x_points=[1.001, 3.003, 5.005, 7.007], - y_points=[4, 9]) + test_cube = self._make_test_cube( + x_points=[1.001, 3.003, 5.005, 7.007], y_points=[4, 9] + ) grid_definition_template_12(test_cube, self.mock_grib) self._check_key("X1", 100) self._check_key("X2", 701) @@ -109,10 +113,10 @@ def set(grib, key, value): if key in ["X1", "X2", "Y1", "Y2"] and value < 0: raise self.mock_eccodes.CodesInternalError() grib.keys[key] = value + self.mock_eccodes.codes_set = set - test_cube = self._make_test_cube(x_points=[-1, 1, 3, 5, 7], - y_points=[-4, 9]) + test_cube = self._make_test_cube(x_points=[-1, 1, 3, 5, 7], y_points=[-4, 9]) grid_definition_template_12(test_cube, self.mock_grib) self._check_key("X1", 0x80000064) self._check_key("X2", 700) @@ -120,8 +124,7 @@ def set(grib, key, value): self._check_key("Y2", 900) def test__negative_grid_points_eccodes_fixed(self): - test_cube = self._make_test_cube(x_points=[-1, 1, 3, 5, 7], - y_points=[-4, 9]) + test_cube = self._make_test_cube(x_points=[-1, 1, 3, 5, 7], y_points=[-4, 9]) grid_definition_template_12(test_cube, self.mock_grib) self._check_key("X1", -100) self._check_key("X2", 700) @@ -143,29 +146,30 @@ def test__scale_factor_eccodes_broken(self): def get_native_type(grib, key): assert key == "scaleFactorAtReferencePoint" return int + self.mock_eccodes.codes_get_native_type = get_native_type grid_definition_template_12(self.test_cube, self.mock_grib) self._check_key("scaleFactorAtReferencePoint", 1065346526) def test__scale_factor_eccodes_fixed(self): - def get_native_type(grib, key): assert key == "scaleFactorAtReferencePoint" return float + self.mock_eccodes.codes_get_native_type = get_native_type grid_definition_template_12(self.test_cube, self.mock_grib) self._check_key("scaleFactorAtReferencePoint", 0.9996012717) def test__scanmode(self): grid_definition_template_12(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7, 0, -1)) grid_definition_template_12(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_140.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_140.py index 7f888b5cc..dcc7bb41d 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_140.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_140.py @@ -29,8 +29,9 @@ class FakeGribError(Exception): class Test(tests.IrisGribTest, GdtTestMixin): def setUp(self): - self.default_ellipsoid = GeogCS(semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + self.default_ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) self.test_cube = self._make_test_cube() GdtTestMixin.setUp(self) @@ -44,44 +45,48 @@ def _make_test_cube(self, cs=None, x_points=None, y_points=None): if y_points is None: y_points = self._default_y_points() - x_coord = iris.coords.DimCoord(x_points, 'projection_x_coordinate', - units='m', coord_system=cs) - y_coord = iris.coords.DimCoord(y_points, 'projection_y_coordinate', - units='m', coord_system=cs) + x_coord = iris.coords.DimCoord( + x_points, "projection_x_coordinate", units="m", coord_system=cs + ) + y_coord = iris.coords.DimCoord( + y_points, "projection_y_coordinate", units="m", coord_system=cs + ) test_cube = iris.cube.Cube(np.zeros((len(y_points), len(x_points)))) test_cube.add_dim_coord(y_coord, 0) test_cube.add_dim_coord(x_coord, 1) return test_cube def _default_coord_system(self, false_easting=0, false_northing=0): - return LambertAzimuthalEqualArea(latitude_of_projection_origin=54.9, - longitude_of_projection_origin=-2.5, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=self.default_ellipsoid) + return LambertAzimuthalEqualArea( + latitude_of_projection_origin=54.9, + longitude_of_projection_origin=-2.5, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=self.default_ellipsoid, + ) def test__template_number(self): grid_definition_template(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 140) + self._check_key("gridDefinitionTemplateNumber", 140) def test__shape_of_earth(self): grid_definition_template(self.test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 6377563.396) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 6356256.909) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 6377563.396) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 6356256.909) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template(test_cube, self.mock_grib) - self._check_key('Nx', 13) - self._check_key('Ny', 6) + self._check_key("Nx", 13) + self._check_key("Ny", 6) def test__grid_points(self): - test_cube = self._make_test_cube(x_points=[1e6, 3e6, 5e6, 7e6], - y_points=[4e6, 9e6]) + test_cube = self._make_test_cube( + x_points=[1e6, 3e6, 5e6, 7e6], y_points=[4e6, 9e6] + ) grid_definition_template(test_cube, self.mock_grib) self._check_key("latitudeOfFirstGridPoint", 81330008) self._check_key("longitudeOfFirstGridPoint", 98799008) @@ -96,26 +101,27 @@ def test__template_specifics(self): def test__scanmode(self): grid_definition_template(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7e6, 0, -1e6)) grid_definition_template(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) def __fail_false_easting_northing(self, false_easting, false_northing): - cs = self._default_coord_system(false_easting=false_easting, - false_northing=false_northing) + cs = self._default_coord_system( + false_easting=false_easting, false_northing=false_northing + ) test_cube = self._make_test_cube(cs=cs) - msg = (r'non zero false easting \(\d*\.\d{2}\) or ' - r'non zero false northing \(\d*\.\d{2}\)' - r'; unsupported by GRIB Template 3\.140' - r'') - with self.assertRaisesRegex( - TranslationError, - msg): + msg = ( + r"non zero false easting \(\d*\.\d{2}\) or " + r"non zero false northing \(\d*\.\d{2}\)" + r"; unsupported by GRIB Template 3\.140" + r"" + ) + with self.assertRaisesRegex(TranslationError, msg): grid_definition_template(test_cube, self.mock_grib) def test__fail_false_easting(self): diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_30.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_30.py index 49a627cfc..38a5cad9c 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_30.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_30.py @@ -26,8 +26,9 @@ class FakeGribError(Exception): class Test(tests.IrisGribTest, GdtTestMixin): def setUp(self): - self.default_ellipsoid = GeogCS(semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + self.default_ellipsoid = GeogCS( + semi_major_axis=6377563.396, semi_minor_axis=6356256.909 + ) self.test_cube = self._make_test_cube() GdtTestMixin.setUp(self) @@ -41,43 +42,49 @@ def _make_test_cube(self, cs=None, x_points=None, y_points=None): if y_points is None: y_points = self._default_y_points() - x_coord = iris.coords.DimCoord(x_points, 'projection_x_coordinate', - units='m', coord_system=cs) - y_coord = iris.coords.DimCoord(y_points, 'projection_y_coordinate', - units='m', coord_system=cs) + x_coord = iris.coords.DimCoord( + x_points, "projection_x_coordinate", units="m", coord_system=cs + ) + y_coord = iris.coords.DimCoord( + y_points, "projection_y_coordinate", units="m", coord_system=cs + ) test_cube = iris.cube.Cube(np.zeros((len(y_points), len(x_points)))) test_cube.add_dim_coord(y_coord, 0) test_cube.add_dim_coord(x_coord, 1) return test_cube def _default_coord_system(self): - return LambertConformal(central_lat=39.0, central_lon=-96.0, - false_easting=0.0, false_northing=0.0, - secant_latitudes=(33, 45), - ellipsoid=self.default_ellipsoid) + return LambertConformal( + central_lat=39.0, + central_lon=-96.0, + false_easting=0.0, + false_northing=0.0, + secant_latitudes=(33, 45), + ellipsoid=self.default_ellipsoid, + ) def test__template_number(self): grid_definition_template_30(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 30) + self._check_key("gridDefinitionTemplateNumber", 30) def test__shape_of_earth(self): grid_definition_template_30(self.test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 6377563.396) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 6356256.909) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 6377563.396) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 6356256.909) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_30(test_cube, self.mock_grib) - self._check_key('Nx', 13) - self._check_key('Ny', 6) + self._check_key("Nx", 13) + self._check_key("Ny", 6) def test__grid_points(self): - test_cube = self._make_test_cube(x_points=[1e6, 3e6, 5e6, 7e6], - y_points=[4e6, 9e6]) + test_cube = self._make_test_cube( + x_points=[1e6, 3e6, 5e6, 7e6], y_points=[4e6, 9e6] + ) grid_definition_template_30(test_cube, self.mock_grib) self._check_key("latitudeOfFirstGridPoint", 71676530) self._check_key("longitudeOfFirstGridPoint", 287218188) @@ -95,24 +102,28 @@ def test__template_specifics(self): def test__scanmode(self): grid_definition_template_30(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7e6, 0, -1e6)) grid_definition_template_30(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) def test_projection_centre(self): grid_definition_template_30(self.test_cube, self.mock_grib) self._check_key("projectionCentreFlag", 0) def test_projection_centre_south_pole(self): - cs = LambertConformal(central_lat=39.0, central_lon=-96.0, - false_easting=0.0, false_northing=0.0, - secant_latitudes=(-33, -45), - ellipsoid=self.default_ellipsoid) + cs = LambertConformal( + central_lat=39.0, + central_lon=-96.0, + false_easting=0.0, + false_northing=0.0, + secant_latitudes=(-33, -45), + ellipsoid=self.default_ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_30(test_cube, self.mock_grib) self._check_key("projectionCentreFlag", 1) diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_4.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_4.py index 3d5c4f4c4..8e4e1b807 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_4.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_4.py @@ -25,33 +25,31 @@ def setUp(self): def test__template_number(self): grid_definition_template_4(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 4) + self._check_key("gridDefinitionTemplateNumber", 4) def test__shape_of_earth_spherical(self): cs = GeogCS(semi_major_axis=1.23) test_cube = self._make_test_cube(cs=cs) grid_definition_template_4(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 1) - self._check_key('scaleFactorOfRadiusOfSphericalEarth', 0) - self._check_key('scaledValueOfRadiusOfSphericalEarth', 1.23) + self._check_key("shapeOfTheEarth", 1) + self._check_key("scaleFactorOfRadiusOfSphericalEarth", 0) + self._check_key("scaledValueOfRadiusOfSphericalEarth", 1.23) def test__shape_of_earth_flattened(self): - cs = GeogCS(semi_major_axis=1.456, - semi_minor_axis=1.123) + cs = GeogCS(semi_major_axis=1.456, semi_minor_axis=1.123) test_cube = self._make_test_cube(cs=cs) grid_definition_template_4(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 1.456) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 1.123) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 1.456) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 1.123) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_4(test_cube, self.mock_grib) - self._check_key('Ni', 13) - self._check_key('Nj', 6) + self._check_key("Ni", 13) + self._check_key("Nj", 6) def test__grid_points(self): x_floats = np.array([11.0, 12.0, 167.0]) @@ -66,14 +64,14 @@ def test__grid_points(self): def test__scanmode(self): grid_definition_template_4(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(x_points=np.arange(7, 0, -1)) grid_definition_template_4(test_cube, self.mock_grib) - self._check_key('iScansPositively', 0) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 0) + self._check_key("jScansPositively", 1) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_grid_definition_template_5.py b/iris_grib/tests/unit/save_rules/test_grid_definition_template_5.py index 9d74f8c00..be789823a 100644 --- a/iris_grib/tests/unit/save_rules/test_grid_definition_template_5.py +++ b/iris_grib/tests/unit/save_rules/test_grid_definition_template_5.py @@ -28,9 +28,11 @@ def setUp(self): def _default_coord_system(self): # Define an alternate, rotated coordinate system to test." self.default_ellipsoid = GeogCS(PP_DEFAULT_EARTH_RADIUS) - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=self.default_ellipsoid, + ) return cs def _default_x_points(self): @@ -39,53 +41,58 @@ def _default_x_points(self): def test__template_number(self): grid_definition_template_5(self.test_cube, self.mock_grib) - self._check_key('gridDefinitionTemplateNumber', 5) + self._check_key("gridDefinitionTemplateNumber", 5) def test__shape_of_earth_spherical(self): - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=GeogCS(52431.0)) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=GeogCS(52431.0), + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_5(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 1) - self._check_key('scaleFactorOfRadiusOfSphericalEarth', 0) - self._check_key('scaledValueOfRadiusOfSphericalEarth', 52431.0) + self._check_key("shapeOfTheEarth", 1) + self._check_key("scaleFactorOfRadiusOfSphericalEarth", 0) + self._check_key("scaledValueOfRadiusOfSphericalEarth", 52431.0) def test__shape_of_earth_flattened(self): ellipsoid = GeogCS(semi_major_axis=1456.0, semi_minor_axis=1123.0) - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - ellipsoid=ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + ellipsoid=ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_5(test_cube, self.mock_grib) - self._check_key('shapeOfTheEarth', 7) - self._check_key('scaleFactorOfEarthMajorAxis', 0) - self._check_key('scaledValueOfEarthMajorAxis', 1456.0) - self._check_key('scaleFactorOfEarthMinorAxis', 0) - self._check_key('scaledValueOfEarthMinorAxis', 1123.0) + self._check_key("shapeOfTheEarth", 7) + self._check_key("scaleFactorOfEarthMajorAxis", 0) + self._check_key("scaledValueOfEarthMajorAxis", 1456.0) + self._check_key("scaleFactorOfEarthMinorAxis", 0) + self._check_key("scaledValueOfEarthMinorAxis", 1123.0) def test__grid_shape(self): - test_cube = self._make_test_cube(x_points=np.arange(13), - y_points=np.arange(6)) + test_cube = self._make_test_cube(x_points=np.arange(13), y_points=np.arange(6)) grid_definition_template_5(test_cube, self.mock_grib) - self._check_key('Ni', 13) - self._check_key('Nj', 6) + self._check_key("Ni", 13) + self._check_key("Nj", 6) def test__scanmode(self): grid_definition_template_5(self.test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 1) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 1) def test__scanmode_reverse(self): test_cube = self._make_test_cube(y_points=[5.0, 2.0]) grid_definition_template_5(test_cube, self.mock_grib) - self._check_key('iScansPositively', 1) - self._check_key('jScansPositively', 0) + self._check_key("iScansPositively", 1) + self._check_key("jScansPositively", 0) def test__rotated_pole(self): - cs = RotatedGeogCS(grid_north_pole_latitude=75.3, - grid_north_pole_longitude=54.321, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=75.3, + grid_north_pole_longitude=54.321, + ellipsoid=self.default_ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) grid_definition_template_5(test_cube, self.mock_grib) self._check_key("latitudeOfSouthernPole", -75300000) @@ -93,14 +100,16 @@ def test__rotated_pole(self): self._check_key("angleOfRotation", 0) def test__fail_rotated_pole_nonstandard_meridian(self): - cs = RotatedGeogCS(grid_north_pole_latitude=90.0, - grid_north_pole_longitude=0.0, - north_pole_grid_longitude=22.5, - ellipsoid=self.default_ellipsoid) + cs = RotatedGeogCS( + grid_north_pole_latitude=90.0, + grid_north_pole_longitude=0.0, + north_pole_grid_longitude=22.5, + ellipsoid=self.default_ellipsoid, + ) test_cube = self._make_test_cube(cs=cs) with self.assertRaisesRegex( - TranslationError, - 'not yet support .* rotated prime meridian.'): + TranslationError, "not yet support .* rotated prime meridian." + ): grid_definition_template_5(test_cube, self.mock_grib) def test__grid_points(self): @@ -115,16 +124,16 @@ def test__grid_points(self): self._check_key("latitude", y_longs) def test__true_winds_orientation(self): - self.test_cube.rename('eastward_wind') + self.test_cube.rename("eastward_wind") grid_definition_template_5(self.test_cube, self.mock_grib) - flags = self.mock_grib.keys['resolutionAndComponentFlags'] & 255 + flags = self.mock_grib.keys["resolutionAndComponentFlags"] & 255 flags_expected = 0b00000000 self.assertEqual(flags, flags_expected) def test__grid_winds_orientation(self): - self.test_cube.rename('x_wind') + self.test_cube.rename("x_wind") grid_definition_template_5(self.test_cube, self.mock_grib) - flags = self.mock_grib.keys['resolutionAndComponentFlags'] & 255 + flags = self.mock_grib.keys["resolutionAndComponentFlags"] & 255 flags_expected = 0b00001000 self.assertEqual(flags, flags_expected) diff --git a/iris_grib/tests/unit/save_rules/test_identification.py b/iris_grib/tests/unit/save_rules/test_identification.py index 63ef01eff..d09dc52bc 100644 --- a/iris_grib/tests/unit/save_rules/test_identification.py +++ b/iris_grib/tests/unit/save_rules/test_identification.py @@ -19,7 +19,7 @@ from iris_grib.tests.unit import TestGribSimple -GRIB_API = 'iris_grib._save_rules.eccodes' +GRIB_API = "iris_grib._save_rules.eccodes" class Test(TestGribSimple): @@ -32,13 +32,13 @@ def test_no_realization(self): identification(cube, grib) mock_eccodes.assert_has_calls( - [mock.call.codes_set_long(grib, "typeOfProcessedData", 2)]) + [mock.call.codes_set_long(grib, "typeOfProcessedData", 2)] + ) @tests.skip_data def test_realization_0(self): cube = stock.simple_pp() - realisation = iris.coords.AuxCoord((0,), standard_name='realization', - units='1') + realisation = iris.coords.AuxCoord((0,), standard_name="realization", units="1") cube.add_aux_coord(realisation) grib = mock.Mock() @@ -47,13 +47,13 @@ def test_realization_0(self): identification(cube, grib) mock_eccodes.assert_has_calls( - [mock.call.codes_set_long(grib, "typeOfProcessedData", 3)]) + [mock.call.codes_set_long(grib, "typeOfProcessedData", 3)] + ) @tests.skip_data def test_realization_n(self): cube = stock.simple_pp() - realisation = iris.coords.AuxCoord((2,), standard_name='realization', - units='1') + realisation = iris.coords.AuxCoord((2,), standard_name="realization", units="1") cube.add_aux_coord(realisation) grib = mock.Mock() @@ -62,7 +62,8 @@ def test_realization_n(self): identification(cube, grib) mock_eccodes.assert_has_calls( - [mock.call.codes_set_long(grib, "typeOfProcessedData", 4)]) + [mock.call.codes_set_long(grib, "typeOfProcessedData", 4)] + ) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_1.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_1.py index fad9f5cdf..c6904c3da 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_1.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_1.py @@ -26,31 +26,30 @@ class TestRealizationIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - coord = DimCoord([45], 'time', - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("air_temperature") + coord = DimCoord( + [45], "time", units=Unit("days since epoch", calendar="standard") + ) self.cube.add_aux_coord(coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_realization(self, mock_set): cube = self.cube - coord = DimCoord(10, 'realization', units='1') + coord = DimCoord(10, "realization", units="1") cube.add_aux_coord(coord) product_definition_template_1(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 1) - mock_set.assert_any_call(mock.sentinel.grib, - "perturbationNumber", 10) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfForecastsInEnsemble", 255) - mock_set.assert_any_call(mock.sentinel.grib, - "typeOfEnsembleForecast", 255) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 1 + ) + mock_set.assert_any_call(mock.sentinel.grib, "perturbationNumber", 10) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfForecastsInEnsemble", 255) + mock_set.assert_any_call(mock.sentinel.grib, "typeOfEnsembleForecast", 255) + + @mock.patch.object(eccodes, "codes_set") def test_multiple_realization_values(self, mock_set): cube = self.cube - coord = DimCoord([8, 9, 10], 'realization', units='1') + coord = DimCoord([8, 9, 10], "realization", units="1") cube.add_aux_coord(coord, 0) msg = "'realization' coordinate with one point is required" diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_10.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_10.py index 93355da89..64cc01a99 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_10.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_10.py @@ -26,32 +26,35 @@ class TestPercentileValueIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('y_wind') + self.cube.rename("y_wind") time_coord = DimCoord( - 20, 'time', bounds=[0, 40], - units=Unit('days since epoch', calendar='julian')) + 20, + "time", + bounds=[0, 40], + units=Unit("days since epoch", calendar="julian"), + ) self.cube.add_aux_coord(time_coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_percentile_value(self, mock_set): cube = self.cube - percentile_coord = DimCoord(95, long_name='percentile_over_time') + percentile_coord = DimCoord(95, long_name="percentile_over_time") cube.add_aux_coord(percentile_coord) product_definition_template_10(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 10) - mock_set.assert_any_call(mock.sentinel.grib, - "percentileValue", 95) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 10 + ) + mock_set.assert_any_call(mock.sentinel.grib, "percentileValue", 95) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_multiple_percentile_value(self, mock_set): cube = self.cube - percentile_coord = DimCoord([5, 10, 15], - long_name='percentile_over_time') + percentile_coord = DimCoord([5, 10, 15], long_name="percentile_over_time") cube.add_aux_coord(percentile_coord, 0) - err_msg = "A cube 'percentile_over_time' coordinate with one point "\ - "is required" + err_msg = ( + "A cube 'percentile_over_time' coordinate with one point " "is required" + ) with self.assertRaisesRegex(ValueError, err_msg): product_definition_template_10(cube, mock.sentinel.grib) diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_11.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_11.py index 6c0c2dc06..64e27a064 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_11.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_11.py @@ -26,28 +26,30 @@ class TestRealizationIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - coord = DimCoord(23, 'time', bounds=[0, 100], - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("air_temperature") + coord = DimCoord( + 23, + "time", + bounds=[0, 100], + units=Unit("days since epoch", calendar="standard"), + ) self.cube.add_aux_coord(coord) - coord = DimCoord(4, 'realization', units='1') + coord = DimCoord(4, "realization", units="1") self.cube.add_aux_coord(coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_realization(self, mock_set): cube = self.cube - cell_method = CellMethod(method='sum', coords=['time']) + cell_method = CellMethod(method="sum", coords=["time"]) cube.add_cell_method(cell_method) product_definition_template_11(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 11) - mock_set.assert_any_call(mock.sentinel.grib, - "perturbationNumber", 4) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfForecastsInEnsemble", 255) - mock_set.assert_any_call(mock.sentinel.grib, - "typeOfEnsembleForecast", 255) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 11 + ) + mock_set.assert_any_call(mock.sentinel.grib, "perturbationNumber", 4) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfForecastsInEnsemble", 255) + mock_set.assert_any_call(mock.sentinel.grib, "typeOfEnsembleForecast", 255) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_15.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_15.py index 8da31e0a9..458d03bfd 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_15.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_15.py @@ -27,126 +27,119 @@ def setUp(self): self.cube = stock.lat_lon_cube() # Add scalar time coord so that product_definition_template_common # doesn't get upset. - t_coord = DimCoord([424854.], standard_name='time', - units=Unit('hours since 1970-01-01 00:00:00', - calendar='gregorian')) + t_coord = DimCoord( + [424854.0], + standard_name="time", + units=Unit("hours since 1970-01-01 00:00:00", calendar="gregorian"), + ) self.cube.add_aux_coord(t_coord) # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') + self.cube.rename("air_temperature") - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_cell_method(self, mock_set): cube_0 = self.cube - cube_0.attributes = dict(spatial_processing_type='No interpolation') - cell_method = CellMethod(method='mean', coords=['area']) + cube_0.attributes = dict(spatial_processing_type="No interpolation") + cell_method = CellMethod(method="mean", coords=["area"]) cube_0.add_cell_method(cell_method) # If the cube has a cell method attached then it should not have any # interpolation on the data, so spatial processing code should be 0 and # number of points used should be 0. product_definition_template_15(cube_0, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 0) - mock_set.assert_any_call(mock.sentinel.grib, - "statisticalProcess", 0) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 0) + mock_set.assert_any_call(mock.sentinel.grib, "statisticalProcess", 0) + + @mock.patch.object(eccodes, "codes_set") def test_bilinear_interpolation(self, mock_set): cube_1 = self.cube - cube_1.attributes = dict(spatial_processing_type='Bilinear ' - 'interpolation') + cube_1.attributes = dict(spatial_processing_type="Bilinear " "interpolation") # If the cube has a bilinear interpolation attribute, spatial # processing code should be 1 and number of points used should be 4. product_definition_template_15(cube_1, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 1) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 4) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 1) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 4) + + @mock.patch.object(eccodes, "codes_set") def test_bicubic_interpolation(self, mock_set): cube_2 = self.cube - cube_2.attributes = dict(spatial_processing_type='Bicubic ' - 'interpolation') + cube_2.attributes = dict(spatial_processing_type="Bicubic " "interpolation") # If the cube has a bicubic interpolation attribute, spatial # processing code should be 2 and number of points used should be 4. product_definition_template_15(cube_2, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 2) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 4) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 2) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 4) + + @mock.patch.object(eccodes, "codes_set") def test_nearest_neighbour_interpolation(self, mock_set): cube_3 = self.cube - cube_3.attributes = dict(spatial_processing_type='Nearest neighbour ' - 'interpolation') + cube_3.attributes = dict( + spatial_processing_type="Nearest neighbour " "interpolation" + ) # If the cube has a nearest neighbour interpolation attribute, spatial # processing code should be 3 and number of points used should be 1. product_definition_template_15(cube_3, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 3) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 1) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 3) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 1) + + @mock.patch.object(eccodes, "codes_set") def test_budget_interpolation(self, mock_set): cube_4 = self.cube - cube_4.attributes = dict(spatial_processing_type='Budget ' - 'interpolation') + cube_4.attributes = dict(spatial_processing_type="Budget " "interpolation") # If the cube has a budget interpolation attribute, spatial # processing code should be 4 and number of points used should be 4. product_definition_template_15(cube_4, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 4) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 4) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 4) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 4) + + @mock.patch.object(eccodes, "codes_set") def test_spectral_interpolation(self, mock_set): cube_5 = self.cube - cube_5.attributes = dict(spatial_processing_type='Spectral ' - 'interpolation') + cube_5.attributes = dict(spatial_processing_type="Spectral " "interpolation") # If the cube has a spectral interpolation attribute, spatial # processing code should be 5 and number of points used should be 4. product_definition_template_15(cube_5, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 5) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 4) - - @mock.patch.object(eccodes, 'codes_set') + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 5) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 4) + + @mock.patch.object(eccodes, "codes_set") def test_neighbour_budget_interpolation(self, mock_set): cube_6 = self.cube - cube_6.attributes = dict(spatial_processing_type='Neighbour-budget ' - 'interpolation') + cube_6.attributes = dict( + spatial_processing_type="Neighbour-budget " "interpolation" + ) # If the cube has a neighbour-budget interpolation attribute, spatial # processing code should be 6 and number of points used should be 4. product_definition_template_15(cube_6, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 15) - mock_set.assert_any_call(mock.sentinel.grib, - "spatialProcessing", 6) - mock_set.assert_any_call(mock.sentinel.grib, - "numberOfPointsUsed", 4) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 15 + ) + mock_set.assert_any_call(mock.sentinel.grib, "spatialProcessing", 6) + mock_set.assert_any_call(mock.sentinel.grib, "numberOfPointsUsed", 4) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_40.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_40.py index f247d32bd..d0df74cbc 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_40.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_40.py @@ -26,22 +26,23 @@ class TestChemicalConstituentIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('atmosphere_mole_content_of_ozone') - coord = DimCoord(24, 'time', - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("atmosphere_mole_content_of_ozone") + coord = DimCoord( + 24, "time", units=Unit("days since epoch", calendar="standard") + ) self.cube.add_aux_coord(coord) - self.cube.attributes['WMO_constituent_type'] = 0 + self.cube.attributes["WMO_constituent_type"] = 0 - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_constituent_type(self, mock_set): cube = self.cube product_definition_template_40(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'productDefinitionTemplateNumber', 40) - mock_set.assert_any_call(mock.sentinel.grib, - 'constituentType', 0) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 40 + ) + mock_set.assert_any_call(mock.sentinel.grib, "constituentType", 0) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_6.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_6.py index 0f39e1e07..ae9859fca 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_6.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_6.py @@ -26,27 +26,28 @@ class TestRealizationIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - coord = DimCoord([45], 'time', - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("air_temperature") + coord = DimCoord( + [45], "time", units=Unit("days since epoch", calendar="standard") + ) self.cube.add_aux_coord(coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_percentile(self, mock_set): cube = self.cube - coord = DimCoord(10, long_name='percentile', units='%') + coord = DimCoord(10, long_name="percentile", units="%") cube.add_aux_coord(coord) product_definition_template_6(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 6) - mock_set.assert_any_call(mock.sentinel.grib, - "percentileValue", 10) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 6 + ) + mock_set.assert_any_call(mock.sentinel.grib, "percentileValue", 10) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_multiple_percentile_values(self, mock_set): cube = self.cube - coord = DimCoord([8, 9, 10], long_name='percentile', units='%') + coord = DimCoord([8, 9, 10], long_name="percentile", units="%") cube.add_aux_coord(coord, 0) msg = "'percentile' coordinate with one point is required" diff --git a/iris_grib/tests/unit/save_rules/test_product_definition_template_8.py b/iris_grib/tests/unit/save_rules/test_product_definition_template_8.py index 2c5ba3ef8..bcb461149 100644 --- a/iris_grib/tests/unit/save_rules/test_product_definition_template_8.py +++ b/iris_grib/tests/unit/save_rules/test_product_definition_template_8.py @@ -28,20 +28,25 @@ class TestProductDefinitionIdentifier(tests.IrisGribTest): def setUp(self): self.cube = stock.lat_lon_cube() # Rename cube to avoid warning about unknown discipline/parameter. - self.cube.rename('air_temperature') - coord = DimCoord(23, 'time', bounds=[0, 100], - units=Unit('days since epoch', calendar='standard')) + self.cube.rename("air_temperature") + coord = DimCoord( + 23, + "time", + bounds=[0, 100], + units=Unit("days since epoch", calendar="standard"), + ) self.cube.add_aux_coord(coord) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_product_definition(self, mock_set): cube = self.cube - cell_method = CellMethod(method='sum', coords=['time']) + cell_method = CellMethod(method="sum", coords=["time"]) cube.add_cell_method(cell_method) product_definition_template_8(cube, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - "productDefinitionTemplateNumber", 8) + mock_set.assert_any_call( + mock.sentinel.grib, "productDefinitionTemplateNumber", 8 + ) class Test_type_of_statistical_processing(tests.IrisTest): diff --git a/iris_grib/tests/unit/save_rules/test_reference_time.py b/iris_grib/tests/unit/save_rules/test_reference_time.py index 3c31f11d9..02e593477 100644 --- a/iris_grib/tests/unit/save_rules/test_reference_time.py +++ b/iris_grib/tests/unit/save_rules/test_reference_time.py @@ -20,25 +20,28 @@ class Test(tests.IrisGribTest): def _test(self, cube): grib = mock.Mock() mock_eccodes = mock.Mock(spec=eccodes) - with mock.patch('iris_grib._save_rules.eccodes', mock_eccodes): + with mock.patch("iris_grib._save_rules.eccodes", mock_eccodes): reference_time(cube, grib) mock_eccodes.assert_has_calls( - [mock.call.codes_set_long(grib, "significanceOfReferenceTime", 1), - mock.call.codes_set_long(grib, "dataDate", '19980306'), - mock.call.codes_set_long(grib, "dataTime", '0300')]) + [ + mock.call.codes_set_long(grib, "significanceOfReferenceTime", 1), + mock.call.codes_set_long(grib, "dataDate", "19980306"), + mock.call.codes_set_long(grib, "dataTime", "0300"), + ] + ) @tests.skip_data def test_forecast_period(self): # The stock cube has a non-compliant forecast_period. - fname = tests.get_data_path(('GRIB', 'global_t', 'global.grib2')) + fname = tests.get_data_path(("GRIB", "global_t", "global.grib2")) [cube] = load_cubes(fname) self._test(cube) @tests.skip_data def test_no_forecast_period(self): # The stock cube has a non-compliant forecast_period. - fname = tests.get_data_path(('GRIB', 'global_t', 'global.grib2')) + fname = tests.get_data_path(("GRIB", "global_t", "global.grib2")) [cube] = load_cubes(fname) cube.remove_coord("forecast_period") self._test(cube) diff --git a/iris_grib/tests/unit/save_rules/test_set_discipline_and_parameter.py b/iris_grib/tests/unit/save_rules/test_set_discipline_and_parameter.py index 9daa8bf3b..1aa899f51 100644 --- a/iris_grib/tests/unit/save_rules/test_set_discipline_and_parameter.py +++ b/iris_grib/tests/unit/save_rules/test_set_discipline_and_parameter.py @@ -3,6 +3,7 @@ # This file is part of iris-grib and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for `iris_grib.grib_save_rules.set_discipline_and_parameter`.""" + # Import iris_grib.tests first so that some things can be initialised before # importing anything else. import iris_grib.tests as tests @@ -19,15 +20,12 @@ class TestPhenomenonCoding(tests.IrisGribTest): def setUp(self): # A mock cube with empty phenomenon-specifying metadata. self.mock_cube = mock.Mock( - spec=Cube, - standard_name=None, - long_name=None, - attributes={}) + spec=Cube, standard_name=None, long_name=None, attributes={} + ) def _check_coding(self, cube, discipline, paramCategory, paramNumber): # Check that encoding 'cube' writes the expected phenomenon keys. - codes_set_patch = self.patch( - 'iris_grib._save_rules.eccodes.codes_set') + codes_set_patch = self.patch("iris_grib._save_rules.eccodes.codes_set") mock_message = mock.sentinel.grib2_message set_discipline_and_parameter(cube, mock_message) @@ -35,7 +33,8 @@ def _check_coding(self, cube, discipline, paramCategory, paramNumber): expected_calls = [ mock.call(mock_message, "discipline", discipline), mock.call(mock_message, "parameterCategory", paramCategory), - mock.call(mock_message, "parameterNumber", paramNumber)] + mock.call(mock_message, "parameterNumber", paramNumber), + ] self.assertEqual(codes_set_patch.call_args_list, expected_calls) @@ -45,38 +44,38 @@ def test_unknown_phenomenon(self): def test_known_standard_name(self): cube = self.mock_cube - cube.standard_name = 'sea_water_y_velocity' + cube.standard_name = "sea_water_y_velocity" self._check_coding(cube, 10, 1, 3) # as seen in _grib_cf_map.py def test_known_long_name(self): cube = self.mock_cube - cube.long_name = 'cloud_mixing_ratio' + cube.long_name = "cloud_mixing_ratio" self._check_coding(cube, 0, 1, 22) def test_gribcode_attribute_object(self): cube = self.mock_cube - cube.attributes = {'GRIB_PARAM': GRIBCode(2, 7, 12, 99)} + cube.attributes = {"GRIB_PARAM": GRIBCode(2, 7, 12, 99)} self._check_coding(cube, 7, 12, 99) def test_gribcode_attribute_string(self): cube = self.mock_cube - cube.attributes = {'GRIB_PARAM': '2, 9, 33, 177'} + cube.attributes = {"GRIB_PARAM": "2, 9, 33, 177"} self._check_coding(cube, 9, 33, 177) def test_gribcode_attribute_tuple(self): cube = self.mock_cube - cube.attributes = {'GRIB_PARAM': (2, 33, 4, 12)} + cube.attributes = {"GRIB_PARAM": (2, 33, 4, 12)} self._check_coding(cube, 33, 4, 12) def test_gribcode_attribute_not_edition_2(self): cube = self.mock_cube - cube.attributes = {'GRIB_PARAM': GRIBCode(1, 7, 12, 99)} + cube.attributes = {"GRIB_PARAM": GRIBCode(1, 7, 12, 99)} self._check_coding(cube, 255, 255, 255) def test_gribcode_attribute_overrides_phenomenon(self): cube = self.mock_cube - cube.standard_name = 'sea_water_y_velocity' - cube.attributes = {'GRIB_PARAM': '2, 9, 33, 177'} + cube.standard_name = "sea_water_y_velocity" + cube.attributes = {"GRIB_PARAM": "2, 9, 33, 177"} self._check_coding(cube, 9, 33, 177) diff --git a/iris_grib/tests/unit/save_rules/test_set_fixed_surfaces.py b/iris_grib/tests/unit/save_rules/test_set_fixed_surfaces.py index 262da42fb..7b9791305 100644 --- a/iris_grib/tests/unit/save_rules/test_set_fixed_surfaces.py +++ b/iris_grib/tests/unit/save_rules/test_set_fixed_surfaces.py @@ -27,84 +27,87 @@ class Test(tests.IrisGribTest): def test_bounded_altitude_feet(self): cube = iris.cube.Cube([0]) - cube.add_aux_coord(iris.coords.AuxCoord( - 1500.0, long_name='altitude', units='ft', - bounds=np.array([1000.0, 2000.0]))) + cube.add_aux_coord( + iris.coords.AuxCoord( + 1500.0, + long_name="altitude", + units="ft", + bounds=np.array([1000.0, 2000.0]), + ) + ) grib = eccodes.codes_grib_new_from_samples("GRIB2") set_fixed_surfaces(cube, grib) self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), - 305.0) # precise ~304.8 - self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), - 610.0) # precise ~609.6 - self.assertEqual( - eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), - 102) + eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), 305.0 + ) # precise ~304.8 self.assertEqual( - eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), - 102) + eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), 610.0 + ) # precise ~609.6 + self.assertEqual(eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 102) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 102) def test_theta_level(self): cube = iris.cube.Cube([0]) - cube.add_aux_coord(iris.coords.AuxCoord( - 230.0, standard_name='air_potential_temperature', - units='K', attributes={'positive': 'up'}, - bounds=np.array([220.0, 240.0]))) + cube.add_aux_coord( + iris.coords.AuxCoord( + 230.0, + standard_name="air_potential_temperature", + units="K", + attributes={"positive": "up"}, + bounds=np.array([220.0, 240.0]), + ) + ) grib = eccodes.codes_grib_new_from_samples("GRIB2") set_fixed_surfaces(cube, grib) self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), - 220.0) - self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), - 240.0) - self.assertEqual( - eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), - 107) + eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), 220.0 + ) self.assertEqual( - eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), - 107) + eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), 240.0 + ) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 107) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 107) def test_depth(self): cube = iris.cube.Cube([0]) - cube.add_aux_coord(iris.coords.AuxCoord( - 1, long_name='depth', units='m', - bounds=np.array([0., 2]), attributes={'positive': 'down'})) + cube.add_aux_coord( + iris.coords.AuxCoord( + 1, + long_name="depth", + units="m", + bounds=np.array([0.0, 2]), + attributes={"positive": "down"}, + ) + ) grib = eccodes.codes_grib_new_from_samples("GRIB2") set_fixed_surfaces(cube, grib) self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), - 0.) - self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), - 2) - self.assertEqual( - eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), - 106) + eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), 0.0 + ) self.assertEqual( - eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), - 106) + eccodes.codes_get_double(grib, "scaledValueOfSecondFixedSurface"), 2 + ) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 106) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 106) @mock.patch.object(eccodes, "codes_set") def test_altitude_point(self, mock_set): grib = None cube = iris.cube.Cube([1, 2, 3, 4, 5]) - cube.add_aux_coord( - iris.coords.AuxCoord([12345], "altitude", units="m") - ) + cube.add_aux_coord(iris.coords.AuxCoord([12345], "altitude", units="m")) set_fixed_surfaces(cube, grib) mock_set.assert_any_call(grib, "typeOfFirstFixedSurface", 102) mock_set.assert_any_call(grib, "scaleFactorOfFirstFixedSurface", 0) - mock_set.assert_any_call(grib, "scaledValueOfFirstFixedSurface", - 12345) + mock_set.assert_any_call(grib, "scaledValueOfFirstFixedSurface", 12345) mock_set.assert_any_call(grib, "typeOfSecondFixedSurface", 255) - mock_set.assert_any_call(grib, "scaleFactorOfSecondFixedSurface", - GRIB_MISSING_LONG) - mock_set.assert_any_call(grib, "scaledValueOfSecondFixedSurface", - GRIB_MISSING_LONG) + mock_set.assert_any_call( + grib, "scaleFactorOfSecondFixedSurface", GRIB_MISSING_LONG + ) + mock_set.assert_any_call( + grib, "scaledValueOfSecondFixedSurface", GRIB_MISSING_LONG + ) @mock.patch.object(eccodes, "codes_set") def test_height_point(self, mock_set): @@ -118,66 +121,78 @@ def test_height_point(self, mock_set): mock_set.assert_any_call(grib, "scaleFactorOfFirstFixedSurface", 0) mock_set.assert_any_call(grib, "scaledValueOfFirstFixedSurface", 12345) mock_set.assert_any_call(grib, "typeOfSecondFixedSurface", 255) - mock_set.assert_any_call(grib, "scaleFactorOfSecondFixedSurface", - GRIB_MISSING_LONG) - mock_set.assert_any_call(grib, "scaledValueOfSecondFixedSurface", - GRIB_MISSING_LONG) + mock_set.assert_any_call( + grib, "scaleFactorOfSecondFixedSurface", GRIB_MISSING_LONG + ) + mock_set.assert_any_call( + grib, "scaledValueOfSecondFixedSurface", GRIB_MISSING_LONG + ) def test_unknown_vertical_unbounded(self): cube = iris.cube.Cube([0]) cube.add_aux_coord( - iris.coords.AuxCoord([1], - attributes={'GRIB_fixed_surface_type': 151})) + iris.coords.AuxCoord([1], attributes={"GRIB_fixed_surface_type": 151}) + ) grib = eccodes.codes_grib_new_from_samples("GRIB2") set_fixed_surfaces(cube, grib) - self.assertEqual(eccodes.codes_get_long( - grib, "typeOfFirstFixedSurface"), 151) - self.assertEqual(eccodes.codes_get_double( - grib, "scaledValueOfFirstFixedSurface"), 1) - self.assertEqual(eccodes.codes_get_double( - grib, "scaleFactorOfFirstFixedSurface"), 0) - self.assertEqual(eccodes.codes_get_long( - grib, "typeOfSecondFixedSurface"), 255) - self.assertEqual(eccodes.codes_get_long( - grib, "scaledValueOfSecondFixedSurface"), GRIB_MISSING_LONG) - self.assertEqual(eccodes.codes_get_long( - grib, "scaleFactorOfSecondFixedSurface"), GRIB_MISSING_LONG) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 151) + self.assertEqual( + eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), 1 + ) + self.assertEqual( + eccodes.codes_get_double(grib, "scaleFactorOfFirstFixedSurface"), 0 + ) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 255) + self.assertEqual( + eccodes.codes_get_long(grib, "scaledValueOfSecondFixedSurface"), + GRIB_MISSING_LONG, + ) + self.assertEqual( + eccodes.codes_get_long(grib, "scaleFactorOfSecondFixedSurface"), + GRIB_MISSING_LONG, + ) def test_unknown_vertical_bounded(self): cube = iris.cube.Cube([0]) cube.add_aux_coord( - iris.coords.AuxCoord([700], bounds=np.array([900.0, 500.0]), - attributes={'GRIB_fixed_surface_type': 108})) + iris.coords.AuxCoord( + [700], + bounds=np.array([900.0, 500.0]), + attributes={"GRIB_fixed_surface_type": 108}, + ) + ) grib = eccodes.codes_grib_new_from_samples("GRIB2") set_fixed_surfaces(cube, grib) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 108) self.assertEqual( - eccodes.codes_get_long(grib, "typeOfFirstFixedSurface"), 108) - self.assertEqual( - eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), - 900) - self.assertEqual( - eccodes.codes_get_double(grib, "scaleFactorOfFirstFixedSurface"), - 0) + eccodes.codes_get_double(grib, "scaledValueOfFirstFixedSurface"), 900 + ) self.assertEqual( - eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 108) + eccodes.codes_get_double(grib, "scaleFactorOfFirstFixedSurface"), 0 + ) + self.assertEqual(eccodes.codes_get_long(grib, "typeOfSecondFixedSurface"), 108) self.assertEqual( - eccodes.codes_get_long(grib, "scaledValueOfSecondFixedSurface"), - 500) + eccodes.codes_get_long(grib, "scaledValueOfSecondFixedSurface"), 500 + ) self.assertEqual( - eccodes.codes_get_long(grib, "scaleFactorOfSecondFixedSurface"), - 0) + eccodes.codes_get_long(grib, "scaleFactorOfSecondFixedSurface"), 0 + ) def test_multiple_unknown_vertical_coords(self): grib = None cube = iris.cube.Cube([0]) cube.add_aux_coord( - iris.coords.AuxCoord([1], - attributes={'GRIB_fixed_surface_type': 151})) + iris.coords.AuxCoord([1], attributes={"GRIB_fixed_surface_type": 151}) + ) cube.add_aux_coord( - iris.coords.AuxCoord([450], bounds=np.array([900.0, 0.0]), - attributes={'GRIB_fixed_surface_type': 108})) + iris.coords.AuxCoord( + [450], + bounds=np.array([900.0, 0.0]), + attributes={"GRIB_fixed_surface_type": 108}, + ) + ) msg = r"coordinates were found of fixed surface type: \[151, 108\]" with self.assertRaisesRegex(ValueError, msg): set_fixed_surfaces(cube, grib) @@ -185,10 +200,11 @@ def test_multiple_unknown_vertical_coords(self): def test_unhandled_vertical_axis(self): grib = None cube = iris.cube.Cube([0]) - cube.add_aux_coord( - iris.coords.AuxCoord([450], attributes={'positive': 'up'})) - msg = r"vertical-axis coordinate\(s\) \('unknown'\) are not " \ - "recognised or handled." + cube.add_aux_coord(iris.coords.AuxCoord([450], attributes={"positive": "up"})) + msg = ( + r"vertical-axis coordinate\(s\) \('unknown'\) are not " + "recognised or handled." + ) with self.assertRaisesRegex(TranslationError, msg): set_fixed_surfaces(cube, grib) @@ -201,10 +217,12 @@ def test_no_vertical(self, mock_set): mock_set.assert_any_call(grib, "scaleFactorOfFirstFixedSurface", 0) mock_set.assert_any_call(grib, "scaledValueOfFirstFixedSurface", 0) mock_set.assert_any_call(grib, "typeOfSecondFixedSurface", 255) - mock_set.assert_any_call(grib, "scaleFactorOfSecondFixedSurface", - GRIB_MISSING_LONG) - mock_set.assert_any_call(grib, "scaledValueOfSecondFixedSurface", - GRIB_MISSING_LONG) + mock_set.assert_any_call( + grib, "scaleFactorOfSecondFixedSurface", GRIB_MISSING_LONG + ) + mock_set.assert_any_call( + grib, "scaledValueOfSecondFixedSurface", GRIB_MISSING_LONG + ) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_set_time_increment.py b/iris_grib/tests/unit/save_rules/test_set_time_increment.py index 4ff6f14e7..6f44d88d6 100644 --- a/iris_grib/tests/unit/save_rules/test_set_time_increment.py +++ b/iris_grib/tests/unit/save_rules/test_set_time_increment.py @@ -21,64 +21,72 @@ class Test(tests.IrisGribTest): - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_no_intervals(self, mock_set): - cell_method = CellMethod('sum', 'time') + cell_method = CellMethod("sum", "time") set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 255) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 0) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 255 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 0) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_area(self, mock_set): - cell_method = CellMethod('sum', 'area', '25 km') + cell_method = CellMethod("sum", "area", "25 km") set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 255) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 0) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 255 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 0) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_multiple_intervals(self, mock_set): - cell_method = CellMethod('sum', 'time', ('1 hour', '24 hour')) + cell_method = CellMethod("sum", "time", ("1 hour", "24 hour")) set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 255) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 0) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 255 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 0) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_hr(self, mock_set): - cell_method = CellMethod('sum', 'time', '23 hr') + cell_method = CellMethod("sum", "time", "23 hr") set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 1) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 23) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 1 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 23) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_hour(self, mock_set): - cell_method = CellMethod('sum', 'time', '24 hour') + cell_method = CellMethod("sum", "time", "24 hour") set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 1) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 24) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 1 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 24) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_hours(self, mock_set): - cell_method = CellMethod('sum', 'time', '25 hours') + cell_method = CellMethod("sum", "time", "25 hours") set_time_increment(cell_method, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 1) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 25) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 1 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 25) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_fractional_hours(self, mock_set): - cell_method = CellMethod('sum', 'time', '25.9 hours') - with mock.patch('warnings.warn') as warn: + cell_method = CellMethod("sum", "time", "25.9 hours") + with mock.patch("warnings.warn") as warn: set_time_increment(cell_method, mock.sentinel.grib) - warn.assert_called_once_with('Truncating floating point timeIncrement ' - '25.9 to integer value 25') - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeIncrement', 1) - mock_set.assert_any_call(mock.sentinel.grib, 'timeIncrement', 25) + warn.assert_called_once_with( + "Truncating floating point timeIncrement " "25.9 to integer value 25" + ) + mock_set.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 1 + ) + mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 25) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/save_rules/test_set_time_range.py b/iris_grib/tests/unit/save_rules/test_set_time_range.py index a7e0baced..d2f1dacb6 100644 --- a/iris_grib/tests/unit/save_rules/test_set_time_range.py +++ b/iris_grib/tests/unit/save_rules/test_set_time_range.py @@ -24,53 +24,57 @@ class Test(tests.IrisGribTest): def setUp(self): - self.coord = DimCoord(0, 'time', - units=Unit('hours since epoch', - calendar='standard')) + self.coord = DimCoord( + 0, "time", units=Unit("hours since epoch", calendar="standard") + ) def test_no_bounds(self): - with self.assertRaisesRegex(ValueError, 'Expected time coordinate ' - 'with two bounds, got 0 bounds'): + with self.assertRaisesRegex( + ValueError, "Expected time coordinate " "with two bounds, got 0 bounds" + ): set_time_range(self.coord, mock.sentinel.grib) def test_three_bounds(self): self.coord.bounds = [0, 1, 2] - with self.assertRaisesRegex(ValueError, 'Expected time coordinate ' - 'with two bounds, got 3 bounds'): + with self.assertRaisesRegex( + ValueError, "Expected time coordinate " "with two bounds, got 3 bounds" + ): set_time_range(self.coord, mock.sentinel.grib) def test_non_scalar(self): - coord = DimCoord([0, 1], 'time', bounds=[[0, 1], [1, 2]], - units=Unit('hours since epoch', calendar='standard')) - with self.assertRaisesRegex(ValueError, 'Expected length one time ' - 'coordinate, got 2 points'): + coord = DimCoord( + [0, 1], + "time", + bounds=[[0, 1], [1, 2]], + units=Unit("hours since epoch", calendar="standard"), + ) + with self.assertRaisesRegex( + ValueError, "Expected length one time " "coordinate, got 2 points" + ): set_time_range(coord, mock.sentinel.grib) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_hours(self, mock_set): lower = 10 upper = 20 self.coord.bounds = [lower, upper] set_time_range(self.coord, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeRange', 1) - mock_set.assert_any_call(mock.sentinel.grib, - 'lengthOfTimeRange', upper - lower) + mock_set.assert_any_call(mock.sentinel.grib, "indicatorOfUnitForTimeRange", 1) + mock_set.assert_any_call(mock.sentinel.grib, "lengthOfTimeRange", upper - lower) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_days(self, mock_set): lower = 4 upper = 6 self.coord.bounds = [lower, upper] - self.coord.units = Unit('days since epoch', calendar='standard') + self.coord.units = Unit("days since epoch", calendar="standard") set_time_range(self.coord, mock.sentinel.grib) - mock_set.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeRange', 1) - mock_set.assert_any_call(mock.sentinel.grib, - 'lengthOfTimeRange', - (upper - lower) * 24) + mock_set.assert_any_call(mock.sentinel.grib, "indicatorOfUnitForTimeRange", 1) + mock_set.assert_any_call( + mock.sentinel.grib, "lengthOfTimeRange", (upper - lower) * 24 + ) - @mock.patch.object(eccodes, 'codes_set') + @mock.patch.object(eccodes, "codes_set") def test_fractional_hours(self, mock_set_long): lower = 10.0 upper = 20.9 @@ -79,13 +83,17 @@ def test_fractional_hours(self, mock_set_long): warnings.simplefilter("always") set_time_range(self.coord, mock.sentinel.grib) self.assertEqual(len(warn), 1) - msg = r'Truncating floating point lengthOfTimeRange 10\.8?9+ ' \ - 'to integer value 10' + msg = ( + r"Truncating floating point lengthOfTimeRange 10\.8?9+ " + "to integer value 10" + ) self.assertRegex(str(warn[0].message), msg) - mock_set_long.assert_any_call(mock.sentinel.grib, - 'indicatorOfUnitForTimeRange', 1) - mock_set_long.assert_any_call(mock.sentinel.grib, - 'lengthOfTimeRange', int(upper - lower)) + mock_set_long.assert_any_call( + mock.sentinel.grib, "indicatorOfUnitForTimeRange", 1 + ) + mock_set_long.assert_any_call( + mock.sentinel.grib, "lengthOfTimeRange", int(upper - lower) + ) if __name__ == "__main__": diff --git a/iris_grib/tests/unit/test_GribWrapper.py b/iris_grib/tests/unit/test_GribWrapper.py index 1b775298b..2f1484f35 100644 --- a/iris_grib/tests/unit/test_GribWrapper.py +++ b/iris_grib/tests/unit/test_GribWrapper.py @@ -25,16 +25,18 @@ def _mock_codes_get_long(grib_message, key): - lookup = dict(totalLength=_message_length, - numberOfValues=200, - jPointsAreConsecutive=0, - Ni=20, - Nj=10, - edition=1) + lookup = dict( + totalLength=_message_length, + numberOfValues=200, + jPointsAreConsecutive=0, + Ni=20, + Nj=10, + edition=1, + ) try: result = lookup[key] except KeyError: - msg = 'Mock codes_get_long unknown key: {!r}'.format(key) + msg = "Mock codes_get_long unknown key: {!r}".format(key) raise AttributeError(msg) return result @@ -45,7 +47,7 @@ def _mock_codes_get_string(grib_message, key): def _mock_codes_get_native_type(grib_message, key): result = int - if key == 'gridType': + if key == "gridType": result = str return result @@ -56,26 +58,24 @@ def _mock_codes_get_message_offset(grib_message): class Test_edition(tests.IrisGribTest): def setUp(self): - self.patch('iris_grib.GribWrapper._confirm_in_scope') - self.patch('iris_grib.GribWrapper._compute_extra_keys') - self.patch('eccodes.codes_get_long', _mock_codes_get_long) - self.patch('eccodes.codes_get_string', _mock_codes_get_string) - self.patch('eccodes.codes_get_native_type', - _mock_codes_get_native_type) - self.patch('eccodes.codes_get_message_offset', - _mock_codes_get_message_offset) + self.patch("iris_grib.GribWrapper._confirm_in_scope") + self.patch("iris_grib.GribWrapper._compute_extra_keys") + self.patch("eccodes.codes_get_long", _mock_codes_get_long) + self.patch("eccodes.codes_get_string", _mock_codes_get_string) + self.patch("eccodes.codes_get_native_type", _mock_codes_get_native_type) + self.patch("eccodes.codes_get_message_offset", _mock_codes_get_message_offset) def test_not_edition_1(self): def func(grib_message, key): return 2 emsg = "GRIB edition 2 is not supported by 'GribWrapper'" - with mock.patch('eccodes.codes_get_long', func): + with mock.patch("eccodes.codes_get_long", func): with self.assertRaisesRegex(TranslationError, emsg): GribWrapper(None) def test_edition_1(self): - grib_message = 'regular_ll' + grib_message = "regular_ll" grib_fh = mock.Mock() wrapper = GribWrapper(grib_message, grib_fh) self.assertEqual(wrapper.grib_message, grib_message) @@ -84,28 +84,24 @@ def test_edition_1(self): @tests.skip_data class Test_deferred_data(tests.IrisTest): def test_regular_data(self): - filename = tests.get_data_path(('GRIB', 'gaussian', - 'regular_gg.grib1')) + filename = tests.get_data_path(("GRIB", "gaussian", "regular_gg.grib1")) messages = list(_load_generate(filename)) self.assertTrue(is_lazy_data(messages[0]._data)) def test_reduced_data(self): - filename = tests.get_data_path(('GRIB', 'reduced', - 'reduced_ll.grib1')) + filename = tests.get_data_path(("GRIB", "reduced", "reduced_ll.grib1")) messages = list(_load_generate(filename)) self.assertTrue(is_lazy_data(messages[0]._data)) class Test_deferred_proxy_args(tests.IrisTest): def setUp(self): - self.patch('iris_grib.GribWrapper._confirm_in_scope') - self.patch('iris_grib.GribWrapper._compute_extra_keys') - self.patch('eccodes.codes_get_long', _mock_codes_get_long) - self.patch('eccodes.codes_get_string', _mock_codes_get_string) - self.patch('eccodes.codes_get_native_type', - _mock_codes_get_native_type) - self.patch('eccodes.codes_get_message_offset', - _mock_codes_get_message_offset) + self.patch("iris_grib.GribWrapper._confirm_in_scope") + self.patch("iris_grib.GribWrapper._compute_extra_keys") + self.patch("eccodes.codes_get_long", _mock_codes_get_long) + self.patch("eccodes.codes_get_string", _mock_codes_get_string) + self.patch("eccodes.codes_get_native_type", _mock_codes_get_native_type) + self.patch("eccodes.codes_get_message_offset", _mock_codes_get_message_offset) self.expected = np.atleast_1d(_offset) self.grib_fh = mock.Mock() self.dtype = np.float64 @@ -113,24 +109,21 @@ def setUp(self): self.lookup = _mock_codes_get_long def test_regular_proxy_args(self): - grib_message = 'regular_ll' - shape = (self.lookup(grib_message, 'Nj'), - self.lookup(grib_message, 'Ni')) + grib_message = "regular_ll" + shape = (self.lookup(grib_message, "Nj"), self.lookup(grib_message, "Ni")) for offset in self.expected: - with mock.patch('iris_grib.GribDataProxy') as mock_gdp: + with mock.patch("iris_grib.GribDataProxy") as mock_gdp: _ = GribWrapper(grib_message, self.grib_fh) - mock_gdp.assert_called_once_with(shape, self.dtype, - self.path, offset) + mock_gdp.assert_called_once_with(shape, self.dtype, self.path, offset) def test_reduced_proxy_args(self): - grib_message = 'reduced_gg' - shape = (self.lookup(grib_message, 'numberOfValues')) + grib_message = "reduced_gg" + shape = self.lookup(grib_message, "numberOfValues") for offset in self.expected: - with mock.patch('iris_grib.GribDataProxy') as mock_gdp: + with mock.patch("iris_grib.GribDataProxy") as mock_gdp: _ = GribWrapper(grib_message, self.grib_fh) - mock_gdp.assert_called_once_with((shape,), self.dtype, - self.path, offset) + mock_gdp.assert_called_once_with((shape,), self.dtype, self.path, offset) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/test__load_generate.py b/iris_grib/tests/unit/test__load_generate.py index 882d5b02f..7a10db44c 100644 --- a/iris_grib/tests/unit/test__load_generate.py +++ b/iris_grib/tests/unit/test__load_generate.py @@ -27,36 +27,37 @@ def _make_test_message(self, sections): return GribMessage(raw_message, None, file_ref=file_ref) def test_grib1(self): - sections = [{'editionNumber': 1}] + sections = [{"editionNumber": 1}] message = self._make_test_message(sections) - mfunc = 'iris_grib.GribMessage.messages_from_filename' - mclass = 'iris_grib.GribWrapper' + mfunc = "iris_grib.GribMessage.messages_from_filename" + mclass = "iris_grib.GribWrapper" with mock.patch(mfunc, return_value=[message]) as mock_func: with mock.patch(mclass, spec=GribWrapper) as mock_wrapper: field = next(_load_generate(self.fname)) mock_func.assert_called_once_with(self.fname) self.assertIsInstance(field, GribWrapper) - mock_wrapper.assert_called_once_with(self.message_id, - grib_fh=self.grib_fh) + mock_wrapper.assert_called_once_with( + self.message_id, grib_fh=self.grib_fh + ) def test_grib2(self): - sections = [{'editionNumber': 2}] + sections = [{"editionNumber": 2}] message = self._make_test_message(sections) - mfunc = 'iris_grib.GribMessage.messages_from_filename' + mfunc = "iris_grib.GribMessage.messages_from_filename" with mock.patch(mfunc, return_value=[message]) as mock_func: field = next(_load_generate(self.fname)) mock_func.assert_called_once_with(self.fname) self.assertEqual(field, message) def test_grib_unknown(self): - sections = [{'editionNumber': 0}] + sections = [{"editionNumber": 0}] message = self._make_test_message(sections) - mfunc = 'iris_grib.GribMessage.messages_from_filename' - emsg = 'GRIB edition 0 is not supported' + mfunc = "iris_grib.GribMessage.messages_from_filename" + emsg = "GRIB edition 0 is not supported" with mock.patch(mfunc, return_value=[message]): with self.assertRaisesRegex(TranslationError, emsg): next(_load_generate(self.fname)) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/iris_grib/tests/unit/test_load_cubes.py b/iris_grib/tests/unit/test_load_cubes.py index 32f6efba7..d4f3016de 100644 --- a/iris_grib/tests/unit/test_load_cubes.py +++ b/iris_grib/tests/unit/test_load_cubes.py @@ -21,7 +21,7 @@ def test(self): files = mock.sentinel.FILES callback = mock.sentinel.CALLBACK expected_result = mock.sentinel.RESULT - with mock.patch('iris.fileformats.rules.load_cubes') as rules_load: + with mock.patch("iris.fileformats.rules.load_cubes") as rules_load: rules_load.return_value = expected_result result = load_cubes(files, callback) kwargs = {} @@ -32,12 +32,10 @@ def test(self): @tests.skip_data class Test_load_cubes(tests.IrisGribTest): - def test_reduced_raw(self): # Loading a GRIB message defined on a reduced grid without # interpolating to a regular grid. - gribfile = tests.get_data_path( - ("GRIB", "reduced", "reduced_gg.grib2")) + gribfile = tests.get_data_path(("GRIB", "reduced", "reduced_gg.grib2")) grib_generator = load_cubes(gribfile) self.assertCML(next(grib_generator)) diff --git a/iris_grib/tests/unit/test_save_grib2.py b/iris_grib/tests/unit/test_save_grib2.py index 9b8f6423e..44603b30e 100644 --- a/iris_grib/tests/unit/test_save_grib2.py +++ b/iris_grib/tests/unit/test_save_grib2.py @@ -17,12 +17,12 @@ class TestSaveGrib2(tests.IrisGribTest): def setUp(self): self.cube = mock.sentinel.cube self.target = mock.sentinel.target - func = 'iris_grib.save_pairs_from_cube' + func = "iris_grib.save_pairs_from_cube" self.messages = list(range(10)) slices = self.messages side_effect = [zip(slices, self.messages)] self.save_pairs_from_cube = self.patch(func, side_effect=side_effect) - func = 'iris_grib.save_messages' + func = "iris_grib.save_messages" self.save_messages = self.patch(func) def _check(self, append=False): diff --git a/iris_grib/tests/unit/test_save_messages.py b/iris_grib/tests/unit/test_save_messages.py index 37c3f71e8..c8a1263ce 100644 --- a/iris_grib/tests/unit/test_save_messages.py +++ b/iris_grib/tests/unit/test_save_messages.py @@ -21,16 +21,15 @@ def setUp(self): def test_save(self): m = mock.mock_open() - with mock.patch('builtins.open', m, create=True): - iris_grib.save_messages([self.grib_message], 'foo.grib2') - self.assertTrue(mock.call('foo.grib2', 'wb') in m.mock_calls) + with mock.patch("builtins.open", m, create=True): + iris_grib.save_messages([self.grib_message], "foo.grib2") + self.assertTrue(mock.call("foo.grib2", "wb") in m.mock_calls) def test_save_append(self): m = mock.mock_open() - with mock.patch('builtins.open', m, create=True): - iris_grib.save_messages([self.grib_message], 'foo.grib2', - append=True) - self.assertTrue(mock.call('foo.grib2', 'ab') in m.mock_calls) + with mock.patch("builtins.open", m, create=True): + iris_grib.save_messages([self.grib_message], "foo.grib2", append=True) + self.assertTrue(mock.call("foo.grib2", "ab") in m.mock_calls) if __name__ == "__main__": diff --git a/noxfile.py b/noxfile.py index ff2711e25..a8166b711 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,7 +25,7 @@ #: Cirrus-CI environment variable hook. PY_VER = os.environ.get("PY_VER", ["3.9", "3.10", "3.11"]) -IRIS_SOURCE = os.environ.get("IRIS_SOURCE", ['source', 'conda-forge']) +IRIS_SOURCE = os.environ.get("IRIS_SOURCE", ["source", "conda-forge"]) #: Default cartopy cache directory. CARTOPY_CACHE_DIR = os.environ.get("HOME") / Path(".local/share/cartopy") @@ -64,20 +64,18 @@ def _write_iris_config(session: nox.sessions.Session) -> None: """ try: - test_data_dir = session.posargs[ - session.posargs.index('--test-data-dir')+1 - ] + test_data_dir = session.posargs[session.posargs.index("--test-data-dir") + 1] except Exception: test_data_dir = "" iris_config_file = os.path.join( session.virtualenv.location, - 'lib', - f'python{session.python}', - 'site-packages', - 'iris', - 'etc', - 'site.cfg', + "lib", + f"python{session.python}", + "site-packages", + "iris", + "etc", + "site.cfg", ) iris_config = f""" [Resources] @@ -91,7 +89,7 @@ def _write_iris_config(session: nox.sessions.Session) -> None: print("Iris config\n-----------") print(iris_config) - with open(iris_config_file, 'w') as f: + with open(iris_config_file, "w") as f: f.write(iris_config) @@ -163,7 +161,7 @@ def _install_and_cache_venv(session: nox.sessions.Session) -> None: @contextmanager def prepare_venv( - session: nox.sessions.Session, iris_source: str = 'conda-forge' + session: nox.sessions.Session, iris_source: str = "conda-forge" ) -> None: """ Create and cache the nox session conda environment, and additionally @@ -206,7 +204,7 @@ def prepare_venv( logger.debug(f"Environment up to date: {venv_dir}") - if iris_source == 'source': + if iris_source == "source": # get latest iris iris_dir = f"{session.create_tmp()}/iris" @@ -219,7 +217,7 @@ def prepare_venv( "pull", "origin", "main", - external=True # use git from host environment + external=True, # use git from host environment ) else: session.run( @@ -227,9 +225,9 @@ def prepare_venv( "clone", "https://github.com/scitools/iris.git", iris_dir, - external=True + external=True, ) - session.install(iris_dir, '--no-deps') + session.install(iris_dir, "--no-deps") _cache_cartopy(session) _write_iris_config(session) @@ -252,46 +250,8 @@ def prepare_venv( ) -@nox.session -def flake8(session: nox.sessions.Session): - """ - Perform flake8 linting of iris-grib. - - Parameters - ---------- - session: object - A `nox.sessions.Session` object. - - """ - # Pip install the session requirements. - session.install("flake8") - # Execute the flake8 linter on the package. - session.run("flake8", PACKAGE) - # Execute the flake8 linter on this file. - session.run("flake8", __file__) - - -@nox.session -def black(session: nox.sessions.Session): - """ - Perform black format checking of iris-grib. - - Parameters - ---------- - session: object - A `nox.sessions.Session` object. - - """ - # Pip install the session requirements. - session.install("black==20.8b1") - # Execute the black format checker on the package. - session.run("black", "--check", PACKAGE) - # Execute the black format checker on this file. - session.run("black", "--check", __file__) - - @nox.session(python=PY_VER, venv_backend="conda") -@nox.parametrize('iris_source', IRIS_SOURCE) +@nox.parametrize("iris_source", IRIS_SOURCE) def tests(session: nox.sessions.Session, iris_source: str): """ Perform iris-grib tests against release and development versions of iris. diff --git a/pyproject.toml b/pyproject.toml index 5a054c517..51a40161c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,5 @@ +# See https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html + [build-system] # Defined by PEP 518 requires = [ @@ -65,7 +67,10 @@ optional-dependencies.test = {file = "requirements/test.txt"} [tool.setuptools.packages.find] include = ["iris_grib*"] +#------------------------------------------------------------------------------ + [tool.coverage.run] +# See https://coverage.readthedocs.io/en/latest/config.html branch = true source = [ "iris_grib", @@ -77,14 +82,17 @@ omit = [ [tool.coverage.report] exclude_lines = [ "pragma: no cover", + "def __repr__", "if __name__ == .__main__.:" ] [tool.codespell] +# See https://github.com/codespell-project/codespell/tree/master?tab=readme-ov-file#using-a-config-file ignore-words-list = "alpha-numeric,degreee,discontiguities,lazyness,meaned,nin" skip = "_build,*.css,*.ipynb,*.js,*.html,*.svg,*.xml,.git,generated" [tool.mypy] +# See https://mypy.readthedocs.io/en/stable/config_file.html ignore_missing_imports = true warn_unused_configs = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] @@ -92,8 +100,10 @@ exclude = [ 'noxfile\.py', 'docs/conf\.py' ] +strict = false # Default value, make true when introducing type hinting. [tool.pytest.ini_options] +# See https://docs.pytest.org/en/stable/reference/customize.html addopts = [ "--doctest-continue-on-failure", "--doctest-modules", @@ -103,10 +113,52 @@ addopts = [ "-v", ] doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS NUMBER" -# configure logging as recommended by repo-review +# configure settings as recommended by repo-review: log_cli = "True" log_cli_level = "INFO" minversion = "6.0" testpaths = "iris_grib" xfail_strict = "True" +[tool.ruff] +# Exclude the following, in addition to the standard set of exclusions. +# https://docs.astral.sh/ruff/settings/#exclude +line-length = 88 +src = [ + "iris_grib", + "docs", +] + +[tool.ruff.format] +docstring-code-format = true +preview = false + +[tool.ruff.lint] +ignore = [ + # NOTE: Non-permanent exclusions should be added to the ".ruff.toml" file. + + # flake8-commas (COM) + # https://docs.astral.sh/ruff/rules/#flake8-commas-com + "COM812", # Trailing comma missing. + "COM819", # Trailing comma prohibited. + + # flake8-implicit-str-concat (ISC) + # https://docs.astral.sh/ruff/rules/single-line-implicit-string-concatenation/ + # NOTE: This rule may cause conflicts when used with "ruff format". + "ISC001", # Implicitly concatenate string literals on one line. + ] +preview = false +select = [ + "ALL", + + # pydocstyle (D) + # https://docs.astral.sh/ruff/rules/multi-line-summary-first-line/ + "D212", # Multi-line docstring summary should start at the first line +] + +[tool.ruff.lint.isort] +force-sort-within-sections = true +known-first-party = ["iris_grib"] + +[tool.ruff.lint.pydocstyle] +convention = "numpy"