Skip to content

Commit

Permalink
Add simple type annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
timj committed Apr 7, 2022
1 parent e8dc1e1 commit 6d6da5f
Show file tree
Hide file tree
Showing 22 changed files with 637 additions and 301 deletions.
32 changes: 32 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[mypy]
warn_unused_configs = True
warn_redundant_casts = True

[mypy-astropy.*]
ignore_missing_imports = True

[mypy-lsst.*]
ignore_missing_imports = True
ignore_errors = True

[mypy-lsst.afw.fits]
ignore_missing_imports = True
ignore_errors = True

[mypy-lsst.daf.base]
ignore_missing_imports = True
ignore_errors = True

[mypy-astro_metadata_translator.*]
ignore_missing_imports = False
ignore_errors = False
disallow_untyped_defs = True
disallow_incomplete_defs = True
strict_equality = True
warn_unreachable = True
warn_unused_ignores = True

# version.py is added by scons and may not exist when we run mypy.

[mypy-astro_metadata_translator.version]
ignore_missing_imports = True
33 changes: 21 additions & 12 deletions python/astro_metadata_translator/bin/translateheader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,16 @@
Read file metadata from the specified files and report the translated content.
"""

from __future__ import annotations

__all__ = ("main", "process_files")

import argparse
import importlib
import logging
import sys
import traceback
from typing import IO, List, Sequence, Tuple

import yaml

Expand Down Expand Up @@ -58,7 +61,7 @@
)


def build_argparser():
def build_argparser() -> argparse.ArgumentParser:
"""Construct an argument parser for the ``translate_header.py`` script.
Returns
Expand Down Expand Up @@ -139,14 +142,14 @@ def build_argparser():


def read_file(
file,
hdrnum,
print_trace,
outstream=sys.stdout,
errstream=sys.stderr,
output_mode="verbose",
write_heading=False,
):
file: str,
hdrnum: int,
print_trace: bool,
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
output_mode: str = "verbose",
write_heading: bool = False,
) -> bool:
"""Read the specified file and process it.
Parameters
Expand Down Expand Up @@ -266,8 +269,14 @@ def read_file(


def process_files(
files, regex, hdrnum, print_trace, outstream=sys.stdout, errstream=sys.stderr, output_mode="auto"
):
files: Sequence[str],
regex: str,
hdrnum: int,
print_trace: bool,
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
output_mode: str = "auto",
) -> Tuple[List[str], List[str]]:
"""Read and translate metadata from the specified files.
Parameters
Expand Down Expand Up @@ -323,7 +332,7 @@ def process_files(
return okay, failed


def main():
def main() -> int:
"""Read metadata from the supplied files and translate the content to
standard form.
Expand Down
23 changes: 13 additions & 10 deletions python/astro_metadata_translator/bin/writeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,15 @@
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.

from __future__ import annotations

__all__ = "write_index_files"

import json
import logging
import os
import sys
from typing import IO, List, MutableMapping, Optional, Sequence, Tuple

from ..file_helpers import find_files
from ..indexing import index_files
Expand All @@ -23,15 +26,15 @@


def write_index_files(
files,
regex,
hdrnum,
print_trace,
content_mode="translated",
outpath=None,
outstream=sys.stdout,
errstream=sys.stderr,
):
files: Sequence[str],
regex: str,
hdrnum: int,
print_trace: bool,
content_mode: str = "translated",
outpath: Optional[str] = None,
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
) -> Tuple[List[str], List[str]]:
"""Process each file and create JSON index file.
The index file will have common information in the toplevel.
Expand Down Expand Up @@ -87,7 +90,7 @@ def write_index_files(

failed = []
okay = []
files_per_directory = {}
files_per_directory: MutableMapping[str, List[str]] = {}

# Group each file by directory if no explicit output path
if outpath is None:
Expand Down
24 changes: 20 additions & 4 deletions python/astro_metadata_translator/bin/writesidecar.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,19 @@
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.

from __future__ import annotations

__all__ = ("write_sidecar_files", "write_sidecar_file")

import os
import sys
import traceback
from typing import IO, List, Sequence, Tuple

from ..file_helpers import find_files, read_file_info


def _split_ext(file):
def _split_ext(file: str) -> Tuple[str, str]:
"""Split the extension from the file name and return it and the root.
Special case handling of .gz and other compression extensions.
Expand All @@ -34,7 +37,14 @@ def _split_ext(file):
return root, ext


def write_sidecar_file(file, hdrnum, content_mode, print_trace, outstream=sys.stdout, errstream=sys.stderr):
def write_sidecar_file(
file: str,
hdrnum: int,
content_mode: str,
print_trace: bool,
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
) -> bool:
"""Write JSON summary to sidecar file.
Parameters
Expand Down Expand Up @@ -103,8 +113,14 @@ def write_sidecar_file(file, hdrnum, content_mode, print_trace, outstream=sys.st


def write_sidecar_files(
files, regex, hdrnum, content_mode, print_trace, outstream=sys.stdout, errstream=sys.stderr
):
files: Sequence[str],
regex: str,
hdrnum: int,
content_mode: str,
print_trace: bool,
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
) -> Tuple[List[str], List[str]]:
"""Process each file and create sidecar file.
Parameters
Expand Down
23 changes: 15 additions & 8 deletions python/astro_metadata_translator/cli/astrometadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,14 @@
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.

from __future__ import annotations

__all__ = ("main",)

import importlib
import logging
import os
from typing import Sequence

import click

Expand Down Expand Up @@ -73,21 +76,21 @@
" python module names).",
)
@click.pass_context
def main(ctx, log_level, traceback, packages):
def main(ctx: click.Context, log_level: int, traceback: bool, packages: Sequence[str]) -> None:
ctx.ensure_object(dict)

logging.basicConfig(level=log_level)

# Traceback needs to be known to subcommands
ctx.obj["TRACEBACK"] = traceback

packages = set(packages)
packages_set = set(packages)
if PACKAGES_VAR in os.environ:
new_packages = os.environ[PACKAGES_VAR].split(":")
packages.update(new_packages)
packages_set.update(new_packages)

# Process import requests
for m in packages:
for m in packages_set:
try:
importlib.import_module(m)
except (ImportError, ModuleNotFoundError):
Expand All @@ -114,7 +117,9 @@ def main(ctx, log_level, traceback, packages):
)
@regex_option
@click.pass_context
def translate(ctx, files, quiet, hdrnum, mode, regex):
def translate(
ctx: click.Context, files: Sequence[str], quiet: bool, hdrnum: int, mode: str, regex: str
) -> None:

# For quiet mode we want to translate everything but report nothing.
if quiet:
Expand Down Expand Up @@ -147,7 +152,7 @@ def translate(ctx, files, quiet, hdrnum, mode, regex):
)
@regex_option
@click.pass_context
def dump(ctx, files, hdrnum, mode, regex):
def dump(ctx: click.Context, files: Sequence[str], hdrnum: int, mode: str, regex: str) -> None:

okay, failed = translate_header(files, regex, hdrnum, ctx.obj["TRACEBACK"], output_mode=mode)

Expand All @@ -167,7 +172,7 @@ def dump(ctx, files, hdrnum, mode, regex):
@regex_option
@content_option
@click.pass_context
def write_sidecar(ctx, files, hdrnum, regex, content):
def write_sidecar(ctx: click.Context, files: Sequence[str], hdrnum: int, regex: str, content: str) -> None:
okay, failed = write_sidecar_files(files, regex, hdrnum, content, ctx.obj["TRACEBACK"])

if failed:
Expand All @@ -194,7 +199,9 @@ def write_sidecar(ctx, files, hdrnum, regex, content):
" Default is to write one index per directory where files are located.",
)
@click.pass_context
def write_index(ctx, files, hdrnum, regex, content, outpath):
def write_index(
ctx: click.Context, files: Sequence[str], hdrnum: int, regex: str, content: str, outpath: str
) -> None:
okay, failed = write_index_files(
files, regex, hdrnum, ctx.obj["TRACEBACK"], content_mode=content, outpath=outpath
)
Expand Down
42 changes: 28 additions & 14 deletions python/astro_metadata_translator/file_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,16 @@

"""Support functions for script implementations."""

from __future__ import annotations

__all__ = ("find_files", "read_basic_metadata_from_file", "read_file_info")

import json
import os
import re
import sys
import traceback
from typing import IO, Any, Iterable, List, MutableMapping, Optional, Union

from .headers import merge_headers
from .observationInfo import ObservationInfo
Expand All @@ -26,9 +29,11 @@
# Prefer afw over Astropy
try:
import lsst.daf.base # noqa: F401 need PropertyBase for readMetadata
from lsst.afw.fits import readMetadata
from lsst.afw.fits import FitsError, readMetadata

def _read_fits_metadata(file, hdu, can_raise=False):
def _read_fits_metadata(
file: str, hdu: int, can_raise: bool = False
) -> Optional[MutableMapping[str, Any]]:
"""Read a FITS header using afw.
Parameters
Expand All @@ -54,7 +59,7 @@ def _read_fits_metadata(file, hdu, can_raise=False):
"""
try:
return readMetadata(file, hdu=hdu)
except lsst.afw.fits.FitsError as e:
except FitsError as e:
if can_raise:
# Try to convert a basic fits error code
if "(104)" in str(e):
Expand All @@ -65,7 +70,9 @@ def _read_fits_metadata(file, hdu, can_raise=False):
except ImportError:
from astropy.io import fits

def _read_fits_metadata(file, hdu, can_raise=False):
def _read_fits_metadata(
file: str, hdu: int, can_raise: bool = False
) -> Optional[MutableMapping[str, Any]]:
"""Read a FITS header using astropy."""

# For detailed docstrings see the afw implementation above
Expand All @@ -83,7 +90,7 @@ def _read_fits_metadata(file, hdu, can_raise=False):
return header


def find_files(files, regex):
def find_files(files: Iterable[str], regex: str) -> List[str]:
"""Find files for processing.
Parameters
Expand All @@ -93,6 +100,11 @@ def find_files(files, regex):
regex : `str`
Regular expression string used to filter files when a directory is
scanned.
Returns
-------
found_files : `list` of `str`
The files that were found.
"""
file_regex = re.compile(regex)
found_files = []
Expand All @@ -111,7 +123,9 @@ def find_files(files, regex):
return found_files


def read_basic_metadata_from_file(file, hdrnum, errstream=sys.stderr, can_raise=True):
def read_basic_metadata_from_file(
file: str, hdrnum: int, errstream: IO = sys.stderr, can_raise: bool = True
) -> Optional[MutableMapping[str, Any]]:
"""Read a raw header from a file, merging if necessary
Parameters
Expand Down Expand Up @@ -174,14 +188,14 @@ def read_basic_metadata_from_file(file, hdrnum, errstream=sys.stderr, can_raise=


def read_file_info(
file,
hdrnum,
print_trace=None,
content_mode="translated",
content_type="simple",
outstream=sys.stdout,
errstream=sys.stderr,
):
file: str,
hdrnum: int,
print_trace: Optional[bool] = None,
content_mode: str = "translated",
content_type: str = "simple",
outstream: IO = sys.stdout,
errstream: IO = sys.stderr,
) -> Optional[Union[str, MutableMapping[str, Any], ObservationInfo]]:
"""Read information from file
Parameters
Expand Down
Loading

0 comments on commit 6d6da5f

Please sign in to comment.