diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 050cd1f8..a5bb7531 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,59 +7,61 @@ on: jobs: build_wheels: - name: Python ${{ matrix.python-version }} wheel on ${{ matrix.os }} + name: build wheel runs-on: ${{ matrix.os }} strategy: matrix: - os: [ ubuntu-latest, windows-latest ] + os: [ ubuntu-latest ] python-version: [ '3.x' ] steps: - - name: Checkout repository + - name: checkout repository uses: actions/checkout@v2 - - name: Install Python + - name: install Python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Restore cached dependencies + - name: load cached `~/.local` uses: actions/cache@v2 with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.py') }} - restore-keys: ${{ runner.os }}-pip-${{ matrix.python-version }}- - - name: Install dependencies - run: pip install --user --upgrade pip setuptools wheel - - name: Build wheel - run: python setup.py bdist_wheel - - name: Save wheel + path: ~/.local + key: ${{ runner.os }}-python${{ matrix.python-version }}-${{ hashFiles('pyproject.toml', 'setup.*') }} + restore-keys: ${{ runner.os }}-python${{ matrix.python-version }}- + - name: build wheel + run: pip wheel . -w dist --no-deps + - name: save wheel uses: actions/upload-artifact@v2 with: + name: build path: ./dist/*.whl build_sdist: name: package source runs-on: ubuntu-latest steps: - - name: Checkout repository + - name: checkout repository uses: actions/checkout@v2 - - name: Install Python + - name: install Python uses: actions/setup-python@v2 - - name: Package source + - name: install dependencies + run: pip install dunamai + - name: package source run: python setup.py sdist - - name: Save source package + - name: save source package uses: actions/upload-artifact@v2 with: + name: build path: ./dist/*.tar.gz upload_pypi: name: publish to PyPI needs: [ build_wheels, build_sdist ] runs-on: ubuntu-latest steps: - - name: Retrieve wheel(s) and source + - name: retrieve wheel(s) and source uses: actions/download-artifact@v2 with: - name: artifact + name: build path: dist - - name: Upload wheel(s) and source - uses: pypa/gh-action-pypi-publish@master + - name: upload wheel(s) and source + uses: pypa/gh-action-pypi-publish@v1.5.0 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml deleted file mode 100644 index d51ada43..00000000 --- a/.github/workflows/formatting.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: formatting - -on: pull_request - -jobs: - run-linters: - name: Run formatter - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - pip install wheel - pip install -e .[development] - - name: Run linters / formatters - uses: wearerequired/lint-action@master - with: - oitnb: true - auto_fix: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 41a11526..e2e7736f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -3,44 +3,58 @@ name: tests on: [ push ] jobs: - tests: - name: Python ${{ matrix.python-version }} on ${{ matrix.os }} - runs-on: ${{ matrix.os }} + lint: + name: lint + runs-on: ubuntu-latest strategy: - fail-fast: false matrix: - os: [ ubuntu-latest ] - python-version: [ '3.6', '3.x' ] + python-version: [ '3.x' ] steps: - - name: Checkout repository + - name: clone repository uses: actions/checkout@v2 - - name: Install Python + - name: install Python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Restore cached dependencies + - name: load cached `~/.local` uses: actions/cache@v2 with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.py') }} - restore-keys: ${{ runner.os }}-pip-${{ matrix.python-version }}- - - name: Install netCDF4 library on Linux - if: contains(matrix.os, 'ubuntu') - run: sudo apt-get update -y && sudo apt-get install -y libnetcdf-dev libnetcdff-dev - - name: Install dependencies - run: pip install ".[testing,development]" - - name: Lint with flake8 + path: ~/.local + key: ${{ runner.os }}-python${{ matrix.python-version }}-${{ hashFiles('pyproject.toml', 'setup.*') }} + restore-keys: ${{ runner.os }}-python${{ matrix.python-version }}- + - name: install dependencies + run: pip install ".[development]" + - name: lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Run tests with coverage - run: pytest --numprocesses auto --cov ./ --cov-report xml:coverage.xml - - name: Upload coverage to Codecov - if: matrix.python-version == '3.x' - uses: codecov/codecov-action@v2.1.0 + test: + needs: lint + name: test + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ ubuntu-latest ] + python-version: [ '3.6', '3.x' ] + steps: + - name: clone repository + uses: actions/checkout@v2 + - name: install Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: load cached `~/.local` + uses: actions/cache@v2 with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - fail_ci_if_error: true + path: ~/.local + key: ${{ runner.os }}-python${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('pyproject.toml', 'setup.*') }} + restore-keys: ${{ runner.os }}-python${{ matrix.python-version }}- + - name: install `pipwin` on Windows + if: contains(matrix.os, 'windows') + run: pip install pipwin + - name: install dependencies + run: pip install ".[testing]" + - name: run tests + run: pytest --numprocesses auto diff --git a/adcircpy/utilities.py b/adcircpy/utilities.py index e8e5c52a..aef4f31d 100644 --- a/adcircpy/utilities.py +++ b/adcircpy/utilities.py @@ -5,10 +5,12 @@ import sys import tarfile -import wget +import pooch -def download_mesh(url: str, directory: PathLike, overwrite: bool = False): +def download_mesh( + url: str, directory: PathLike, known_hash: str = None, overwrite: bool = False +): if not isinstance(directory, Path): directory = Path(directory) if not directory.exists(): @@ -16,13 +18,19 @@ def download_mesh(url: str, directory: PathLike, overwrite: bool = False): if not (directory / 'fort.14').exists() or overwrite: logging.info(f'downloading mesh files to {directory}') - extract_download(url, directory, ['fort.13', 'fort.14']) + extract_download( + url, directory, ['fort.13', 'fort.14'], known_hash=known_hash, overwrite=overwrite + ) return directory def extract_download( - url: str, directory: PathLike, filenames: [str] = None, overwrite: bool = False + url: str, + directory: PathLike, + filenames: [str] = None, + known_hash: str = None, + overwrite: bool = False, ): if not isinstance(directory, Path): directory = Path(directory) @@ -35,7 +43,7 @@ def extract_download( temporary_filename = directory / 'temp.tar.gz' logging.debug(f'downloading {url} -> {temporary_filename}') - wget.download(url, f'{temporary_filename}') + temporary_filename = pooch.retrieve(url, known_hash=known_hash, fname=temporary_filename) logging.debug(f'extracting {temporary_filename} -> {directory}') with tarfile.open(temporary_filename) as local_file: if len(filenames) > 0: @@ -49,8 +57,6 @@ def extract_download( else: local_file.extractall(directory) - os.remove(temporary_filename) - def get_logger( name: str, diff --git a/examples/example_1.py b/examples/example_1.py index 931a6cec..41aff478 100755 --- a/examples/example_1.py +++ b/examples/example_1.py @@ -11,11 +11,12 @@ INPUT_DIRECTORY = DATA_DIRECTORY / 'input' / 'shinnecock' OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_1' -MESH_URL = 'https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1' MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' download_mesh( - url=MESH_URL, directory=MESH_DIRECTORY, + url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', + directory=MESH_DIRECTORY, + known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', ) # open mesh file diff --git a/examples/example_2.py b/examples/example_2.py index 79e63460..f0776b27 100755 --- a/examples/example_2.py +++ b/examples/example_2.py @@ -13,11 +13,12 @@ INPUT_DIRECTORY = DATA_DIRECTORY / 'input' OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_2' -MESH_URL = 'https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1' MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' download_mesh( - url=MESH_URL, directory=MESH_DIRECTORY, + url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', + directory=MESH_DIRECTORY, + known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', ) # open mesh file diff --git a/examples/example_3.py b/examples/example_3.py index 6c85a444..885a6afd 100755 --- a/examples/example_3.py +++ b/examples/example_3.py @@ -10,11 +10,12 @@ INPUT_DIRECTORY = DATA_DIRECTORY / 'input' OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_3' -MESH_URL = 'https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1' MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' download_mesh( - url=MESH_URL, directory=MESH_DIRECTORY, + url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', + directory=MESH_DIRECTORY, + known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', ) # open mesh file diff --git a/examples/example_4.py b/examples/example_4.py index 1a7742e8..b427060e 100644 --- a/examples/example_4.py +++ b/examples/example_4.py @@ -11,11 +11,12 @@ INPUT_DIRECTORY = DATA_DIRECTORY / 'input' OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_4' -MESH_URL = 'https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1' MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' download_mesh( - url=MESH_URL, directory=MESH_DIRECTORY, + url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', + directory=MESH_DIRECTORY, + known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', ) # open mesh file diff --git a/examples/make_best_track_ensemble.py b/examples/make_best_track_ensemble.py deleted file mode 100755 index 87402cd4..00000000 --- a/examples/make_best_track_ensemble.py +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env python3 -""" -Script to extract an ATCF best track dataset and modify it -e.g., Make it more intense or larger in size - -Example for Sandy2012 and prescribed start and end dates -that are valid. -- central_pressure is decreased by 10%, -- max_sustained_wind_speed is increased by 10% -- radius_of_maximum_winds is increased by 10% - -By William Pringle, Mar 2021 - -""" - -from copy import deepcopy -from datetime import datetime, timedelta - -from adcircpy.forcing.winds.best_track import BestTrackForcing - - -def main(): - # set storm name - storm_name = 'Sandy2012' - - # set simulation dates - start_date = datetime(2012, 10, 22) - end_date = start_date + timedelta(days=5) - - # getting best track - BT = BestTrackForcing(storm_name, start_date=start_date, end_date=end_date,) - - # write out original fort.22 - BT.write('original.22', overwrite=True) - - # extracting original dataframe - df_original = BT.df - - # modifying the neccessary variables and - # writing each to a new fort.22 - variable_list = ['central_pressure', 'max_sustained_wind_speed', 'radius_of_maximum_winds'] - alpha = [0.9, 1.1, 1.1] # the multiplier for each variable - for idx, var in enumerate(variable_list): - print(var) - # make a deepcopy to preserve the original dataframe - df_modified = deepcopy(df_original) - df_modified[var] = df_modified[var] * alpha[idx] - # reset the dataframe - BT._df = df_modified - # write out the modified fort.22 - BT.write(var + '.22', overwrite=True) - - -if __name__ == '__main__': - main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..a0543802 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[build-system] +requires = [ + "dunamai", + "setuptools", +] +build-backend = "setuptools.build_meta" diff --git a/setup.cfg b/setup.cfg index cd610f4c..963d139f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,13 +1,57 @@ [metadata] name = adcircpy -lead_developer = Zachary Burnett -author = Jaime R Calzada, Zachary Burnett -author_email = jreniel@gmail.com, zachary.burnett@noaa.gov -description = Python package for working with ADCIRC input and output files. -long_description = file: README.md +author = Zach Burnett , Jaime R Calzada +description = Python package for working with ADCIRC input and output files +long_description = file:README.md +long_description_content_type = text/markdown license = GPL -url = https://github.com/jreniel/adcircpy.git +python_requires = >=3.6 +url = https://github.com/noaa-ocs-modeling/adcircpy.git -[nosetests] -exe = True -tests = tests/ +[options] +install_requires = + appdirs + geopandas + haversine + matplotlib + netCDF4 + numpy + pandas + paramiko + psutil + pyproj >= 2.6 + requests + scipy + shapely + stormevents >= 1.2 + utm + +[options.extras_require] +testing = + pooch + pytest + pytest-cov + pytest-mock + pytest-socket + pytest-xdist +development = + flake8 + isort + oitnb +documentation = + m2r2 + sphinx + sphinx-rtd-theme + sphinxcontrib-programoutput + sphinxcontrib-bibtex + +[options.entry_points] +console_scripts = + tidal_run = adcircpy.cmd.tidal_run:main + best_track_run = adcircpy.cmd.best_track_run:main + best_track_file = adcircpy.cmd.best_track_file:main + plot_mesh = adcircpy.cmd.plot_mesh:main + plot_maxele = adcircpy.cmd.plot_maxele:main + plot_fort61 = adcircpy.cmd.plot_fort61:main + fort63 = adcircpy.cmd.fort63:main + tide_gen = adcircpy.cmd.tide_gen:main diff --git a/setup.py b/setup.py index b77c2ccb..f3931b6e 100755 --- a/setup.py +++ b/setup.py @@ -1,238 +1,12 @@ -from collections.abc import Mapping -import os -from pathlib import Path -import re -import subprocess -import sys - -try: - from importlib import metadata as importlib_metadata -except ImportError: # for Python<3.8 - subprocess.run( - f'{sys.executable} -m pip install importlib_metadata', - shell=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - import importlib_metadata - -from typing import List - -from setuptools import config, find_packages, setup - -DEPENDENCIES = { - 'appdirs': [], - 'geopandas': ['gdal', 'fiona'], - 'haversine': [], - 'matplotlib': [], - 'netCDF4': [], - 'numpy': [], - 'pandas': [], - 'paramiko': [], - 'psutil': [], - 'pyproj>=2.6': [], - 'requests': [], - 'scipy': [], - 'shapely': [], - 'stormevents>=1.2': [], - 'utm': [], - 'wget': [], -} - - -def installed_packages() -> List[str]: - installed_distributions = importlib_metadata.distributions() - return [ - distribution.metadata['Name'].lower() - for distribution in installed_distributions - if distribution.metadata['Name'] is not None - ] - - -def missing_packages(required_packages: {str: [str]}) -> {str: [str]}: - if isinstance(required_packages, Mapping): - missing_dependencies = missing_packages(list(required_packages)) - output = {} - for dependency, subdependencies in required_packages.items(): - missing_subdependencies = missing_packages(subdependencies) - if dependency in missing_dependencies or len(missing_subdependencies) > 0: - output[dependency] = missing_subdependencies - return output - else: - return [ - required_package - for required_package in required_packages - if re.split('<|<=|==|>=|>', required_package)[0].lower() - not in installed_packages() - ] +import warnings +from dunamai import Version +from setuptools import find_packages, setup try: - if 'dunamai' not in installed_packages(): - subprocess.run( - f'{sys.executable} -m pip install dunamai', - shell=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - - from dunamai import Version - - version = Version.from_any_vcs().serialize() -except (ModuleNotFoundError, RuntimeError) as error: - print(error) - version = '0.0.0' - -print(f'using version {version}') - -MISSING_DEPENDENCIES = missing_packages(DEPENDENCIES) - -if len(MISSING_DEPENDENCIES) > 0: - print( - f'found {len(MISSING_DEPENDENCIES)} (out of {len(DEPENDENCIES)}) missing dependencies' - ) - -if (Path(sys.prefix) / 'conda-meta').exists() and len(MISSING_DEPENDENCIES) > 0: - print(f'found conda environment at {sys.prefix}') - - conda_packages = [] - try: - subprocess.check_output( - f'conda install -y {" ".join(MISSING_DEPENDENCIES)}', - shell=True, - stderr=subprocess.STDOUT, - ) - except subprocess.CalledProcessError as error: - output = error.output.decode() - package_not_found_start = 'PackagesNotFoundError: The following packages are not available from current channels:\n\n' - package_not_found_stop = '\n\nCurrent channels:' - if package_not_found_start in output: - non_conda_packages = [ - package.replace('-', '').strip() - for package in output[ - output.index(package_not_found_start) : output.index( - package_not_found_stop - ) - ].splitlines()[2:] - ] - conda_packages = [ - package - for package in MISSING_DEPENDENCIES - if package not in non_conda_packages - ] - - print( - f'found {len(conda_packages)} conda packages (out of {len(MISSING_DEPENDENCIES)})' - ) - - try: - subprocess.run( - f'conda install -y {" ".join(conda_packages)}', - shell=True, - stderr=subprocess.DEVNULL, - ) - except subprocess.CalledProcessError: - for dependency in conda_packages: - try: - subprocess.run( - f'conda install -y {dependency}', shell=True, stderr=subprocess.DEVNULL, - ) - except subprocess.CalledProcessError: - continue - - MISSING_DEPENDENCIES = missing_packages(DEPENDENCIES) - -if os.name == 'nt' and len(MISSING_DEPENDENCIES) > 0: - print(f'attempting to install {len(MISSING_DEPENDENCIES)} packages with `pipwin`') - - if 'pipwin' not in installed_packages(): - subprocess.run( - f'{sys.executable} -m pip install pipwin', - shell=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - subprocess.run(f'{sys.executable} -m pipwin refresh', shell=True) - - for dependency, subdependencies in MISSING_DEPENDENCIES.items(): - failed_pipwin_packages = [] - for _ in range(1 + len(subdependencies)): - for package_name in subdependencies + [dependency]: - if dependency in missing_packages( - DEPENDENCIES - ) or package_name in missing_packages(subdependencies): - try: - subprocess.run( - f'{sys.executable} -m pip install {package_name.lower()}', - check=True, - shell=True, - stderr=subprocess.DEVNULL, - ) - if package_name in failed_pipwin_packages: - failed_pipwin_packages.remove(package_name) - except subprocess.CalledProcessError: - try: - subprocess.run( - f'{sys.executable} -m pipwin install {package_name.lower()}', - check=True, - shell=True, - stderr=subprocess.DEVNULL, - ) - except subprocess.CalledProcessError: - failed_pipwin_packages.append(package_name) - - # since we don't know the dependencies here, repeat this process n number of times - # (worst case is `O(n)`, where the first package is dependant on all the others) - if len(failed_pipwin_packages) == 0: - break - - MISSING_DEPENDENCIES = missing_packages(DEPENDENCIES) - -metadata = config.read_configuration('setup.cfg')['metadata'] + __version__ = Version.from_any_vcs().serialize() +except RuntimeError as error: + warnings.warn(f'{error.__class__.__name__} - {error}') + __version__ = '0.0.0' -setup( - name=metadata['name'], - version=version, - author=metadata['author'], - author_email=metadata['author_email'], - description=metadata['description'], - long_description=metadata['long_description'], - long_description_content_type='text/markdown', - url=metadata['url'], - packages=find_packages(), - python_requires='>=3.6', - setup_requires=['dunamai', 'setuptools>=41.2'], - install_requires=list(DEPENDENCIES), - # test and development dependencies - extras_require={ - 'testing': [ - 'FileLock', - 'pytest', - 'pytest-cov', - 'pytest-mock', - 'pytest-socket', - 'pytest-xdist', - ], - 'development': ['dunamai', 'flake8', 'isort', 'oitnb'], - 'documentation': [ - 'dunamai', - 'm2r2', - 'sphinx', - 'sphinx-rtd-theme', - 'sphinxcontrib-programoutput', - 'sphinxcontrib-bibtex', - ], - }, - entry_points={ - 'console_scripts': [ - 'tidal_run=adcircpy.cmd.tidal_run:main', - 'best_track_run=adcircpy.cmd.best_track_run:main', - 'best_track_file=adcircpy.cmd.best_track_file:main', - 'plot_mesh=adcircpy.cmd.plot_mesh:main', - 'plot_maxele=adcircpy.cmd.plot_maxele:main', - 'plot_fort61=adcircpy.cmd.plot_fort61:main', - 'fort63=adcircpy.cmd.fort63:main', - 'tide_gen=adcircpy.cmd.tide_gen:main', - ] - }, -) +setup(version=__version__, packages=find_packages(exclude=('tests',)), test_suite='tests') diff --git a/tests/__init__.py b/tests/__init__.py index ff4ef45e..ae3405e1 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -4,7 +4,6 @@ import re from typing import Dict, List -from filelock import FileLock import pytest from adcircpy.utilities import download_mesh @@ -18,12 +17,11 @@ @pytest.fixture def shinnecock_mesh_directory(worker_id) -> Path: mesh_directory = INPUT_DIRECTORY / 'shinnecock' - - with FileLock(str(mesh_directory) + '.lock'): - download_mesh( - url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', - directory=mesh_directory, - ) + download_mesh( + url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', + directory=mesh_directory, + known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', + ) return mesh_directory