diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 0000000..d110d10 --- /dev/null +++ b/.codespellrc @@ -0,0 +1,9 @@ +[codespell] +skip = + *.po, + *.ts, + tests/* +count = +quiet-level = 3 +ignore-words-list = + placeholder diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..c740ef3 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +# EditorConfig: https://EditorConfig.org. Provides sensible defaults for +# non vscode editors. + +# top-most EditorConfig file +root = true + +# Every file. +[*] +charset = "utf8" +end_of_line = lf +insert_final_newline = true + +indent_style = space +indent_size = 4 + +trim_trailing_whitespace = true + +# Python. (Duplicates used as placeholders) +[*.py] +indent_style = space +indent_size = 4 diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..1162ce2 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,26 @@ +--- +name: "linting - all" +on: + pull_request: + push: + branches: + - "main" +jobs: + lint: + name: "linting (python)" + runs-on: "ubuntu-latest" + steps: + - name: "Check out repository" + uses: "actions/checkout@v2" + - name: "Set up Python" + uses: "actions/setup-python@v2" + with: + python-version: "3.10" + - name: "install linting tooling" + continue-on-error: true + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements/local.txt ; pylint **/*.py + - name: "run linting via tox" + run: | + tox -e linting diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..6fbebe9 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,25 @@ +name: pyreleaser +on: + push: + tags: + - '*' +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: "checkout" + uses: actions/checkout@v4 + - name: "fetch unshallow" + run: git fetch --prune --unshallow + - name: "list" + run: pwd && ls -la + - name: "deps" + run: python -m pip install -r requirements/local.txt + - name: "package" + run: make package-source + - name: "list" + run: ls -la ./dist/ + - name: "release" + uses: ncipollo/release-action@v1.14.0 + with: + artifacts: "dist/*" diff --git a/.github/workflows/unit-tests-all.yml b/.github/workflows/unit-tests-all.yml new file mode 100644 index 0000000..861dcfd --- /dev/null +++ b/.github/workflows/unit-tests-all.yml @@ -0,0 +1,64 @@ +--- +name: "unit tests - all" +on: + pull_request: + push: + branches: + - "main" +jobs: + tox: + name: "Python ${{ matrix.python-version }} -- ${{ matrix.os }} " + runs-on: ${{ matrix.os }} + continue-on-error: ${{ matrix.experimental }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + python-version: ["3.9", "3.10", "3.11"] + experimental: [false] + # Include experimental or bleeding-edge releases. + # Windows is not included as it can be unreliable, e.g. + # psycopg2-binary is only released some time after a Python + # major/minor version is formally released. + # + # Uncomment below (including 'include:') when the next + # reasonable test candidate is made available: + include: + # + # Versions list: https://github.com/actions/python-versions/releases + # Example formatting: 3.11.0-alpha.1, 3.9.0-beta.8, 3.10.0-rc.3 + # + - os: ubuntu-latest + python-version: "3.12.0" + experimental: true + - os: macos-latest + python-version: "3.12.0" + experimental: true + steps: + - name: "check out repository" + uses: "actions/checkout@v2" + with: + submodules: 'true' + - name: "set up python ${{ matrix.python-version }}" + uses: "actions/setup-python@v2" + with: + python-version: "${{ matrix.python-version }}" + - name: "get pip cache dir" + id: "pip-cache" + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: "cache pip packages" + uses: "actions/cache@v2" + with: + path: "${{ steps.pip-cache.outputs.dir }}" + key: "${{ runner.os }}-pip-${{ hashFiles('**/base.txt', '**/local.txt') }}" + restore-keys: | + ${{ runner.os }}-pip- + - name: "install tox" + run: | + python -m pip install --upgrade pip + pip install tox + - name: "run tox" + env: + TOXENV: py3 + run: | + tox diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..53b3f7b --- /dev/null +++ b/.gitignore @@ -0,0 +1,134 @@ +# project specific files +__init__.py +log.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +tar-src/ + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000..9632db6 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,5 @@ +# Configurtion file for Markdown lint. Add exceptions here. +default: true + +# Exceptions, example given, MD045 +# MD012: false # no multiple blank-lines. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..15b7db8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,45 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: check-json + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict +- repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + language_version: python3 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.270 + hooks: + - id: ruff + args: ["--fix", "--show-fixes"] +- repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.35.0 + hooks: + - id: markdownlint +- repo: https://github.com/codespell-project/codespell + rev: v2.2.4 + hooks: + - id: codespell +- repo: local + hooks: + - id: pylint + name: pylint + entry: pylint + language: python + language_version: python3 + args: + [ + "-rn", # Only display messages. + "-sn", # Don't display the pylint score. + "--rcfile=.pylintrc" + ] diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..949be69 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,37 @@ +# Pylint configuration. +# +# .pylintrc guide: https://github.com/PyCQA/pylint/blob/cfc393a8dff9ec09bd2fcb25857e772ae04a4991/examples/pylintrc +# + +[MAIN] +extension-pkg-whitelist= + pydantic, # binary module validation, Pydantic/Pylint recommendation. + +ignore= + LICENSE, + .pylintrc, + +ignore-patterns= + ^(.+).ini$, + ^(.+).md$, + ^(.+).sh$, + ^(.+).service$, + ^(.+).json, + ^(.+).yml, + ^(.+).yaml, + ^(.+).toml, + ^(.+).env, + ^\., + +ignore-paths= + requirements/., + tests/fixtures/vcrpy/., + Makefile, + +[MESSAGES CONTROL] + +disable = + C0301, # line-length too long, see Black documented recommendations. + C0115, # No docstring for Class. + # Pylint incorrectly picking up the below. + R0401, # Cyclic import. diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..2dc8835 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,47 @@ +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = ["E", "F"] +ignore = [ + "E501", # line-length too long, set via black. +] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] +unfixable = [] + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", +] + +# Same as Black. +line-length = 88 + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +# Assume Python 3.10. +target-version = "py310" + +[mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..4c358c0 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,48 @@ +{ + "editor.insertSpaces": true, + "editor.tabSize": 4, + "editor.rulers": [ + 79 + ], + "editor.detectIndentation": false, + "files.trimTrailingWhitespace": true, + "files.insertFinalNewline": true, + "python.linting.mypyEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.pylintEnabled": true, + "python.linting.lintOnSave": true, + "git.inputValidationSubjectLength": 50, + "git.inputValidationLength": 72, + "[git-commit]": { + "editor.rulers": [ + 50, + 72 + ] + }, + "[python]": { + "editor.rulers": [ + 72, + 79, + 120 + ], + "editor.formatOnSave": true, + "editor.defaultFormatter": "ms-python.black-formatter" + }, + "[go]": { + "editor.rulers": [ + 72, + 79 + ] + }, + "[markdown]": { + "editor.rulers": [80] + }, + "files.eol": "\n", + "cSpell.words": [ + "ADAUSD", + "gmtime", + "levelname", + "ORCFAX", + "websockets" + ] +} diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..e1c4c5d --- /dev/null +++ b/Makefile @@ -0,0 +1,37 @@ +.DEFAULT_GOAL := help + +.PHONY: clean package package-deps package-source package-upload package-wheel tar-source + +tar-source: package-deps ## Package repository as tar for easy distribution + rm -rf tar-src/ + mkdir tar-src/ + git-archive-all --prefix template/ tar-src/template-v0.0.0.tar.gz + +package-deps: ## Upgrade dependencies for packaging + python3 -m pip install -U twine wheel build git-archive-all + +package-source: package-deps clean ## Package the source code + python -m build . + +package-check: clean package-source ## Check the distribution is valid + twine check dist/* + +package-upload-test: clean package-deps package-check ## Upload package to test.pypi + twine upload dist/* --repository-url https://test.pypi.org/legacy/ --verbose + +package-upload: clean package-deps package-check ## Upload package to pypi + twine upload dist/* --repository-url https://upload.pypi.org/legacy/ --verbose + +package: package-upload + +clean: ## Clean the package directory + rm -rf src/*.egg-info/ + rm -rf build/ + rm -rf dist/ + rm -rf tar-src/ + +upgrade: ## Upgrade project dependencies. + pip-upgrade + +help: ## Print this help message + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/README.md b/README.md new file mode 100644 index 0000000..187263f --- /dev/null +++ b/README.md @@ -0,0 +1,151 @@ +# pubwatch + +First-line monitoring of Orcfax expired publications. + +## Environment + +The script requires a number of environment variables to be set. + +```env +export ORCFAX_VALIDATOR= +export KUPO_URL= +export FSP_POLICY= +export VALIDITY_TOKEN= +``` + +## Connecting + +pubwatch will need to connec to `ssl` in production. If the monitor +is being used locally, a `--local` flag can be used. + +pubwatch needs a list of CER feeds available from [cer-feeds][cer-feeds-1]. + +Other command line arguments can be viewed using `--help`. + +## Running + +The script can be run from the repository, e.g.: + +```sh +python pubwatch.py --help +``` + +or once installed via the package with: + +```sh +pubwatch --help +``` + +## Cron + +Pubwatch should be run via cron at reasonable interviews from within the Orcfax +network. + +## Output + +Logging will be visible to the user as follows: + + + +```log +2024-08-08 14:47:55 INFO :: feed_helper.py:29:read_feed_data() :: cer-feeds version: 2024.08.06.0002 +2024-08-08 14:47:55 INFO :: feed_helper.py:30:read_feed_data() :: number of feeds: 13 +2024-08-08 14:47:55 INFO :: pubwatch.py:286:pubwatch() :: policy: 900d528f3c1864a1376db1afc065c9b293a2235f39b00a67455a6724 +2024-08-08 14:47:59 INFO :: pubwatch.py:289:pubwatch() :: unspent datum: 75 +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '923' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IBTC-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IETH-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/MIN-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SNEK-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SHEN-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-EUR' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/ADA-EUR' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/FACT-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LQ-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/WMT-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/WMT-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/NEWM-ADA' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/NEWM-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-DJED' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USDM' delta: '2876' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '2877' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '2877' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '4708' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IBTC-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IETH-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/MIN-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SNEK-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SHEN-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-EUR' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/ADA-EUR' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/FACT-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LQ-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/WMT-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/WMT-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/NEWM-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/NEWM-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-DJED' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USDM' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '6476' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IBTC-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/IETH-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SNEK-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/SHEN-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-EUR' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/ADA-EUR' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/FACT-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LQ-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/WMT-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/WMT-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/NEWM-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/NEWM-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-DJED' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USDM' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '10077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '55238' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '64077' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '65716' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '66618' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-DJED' delta: '85676' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '85676' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USDM' delta: '85676' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '85676' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '85676' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '85802' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/MIN-ADA' delta: '87484' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '88266' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '88266' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '161276' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '162298' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USD' delta: '163020' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '163812' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '163932' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '164053' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/WMT-ADA' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/WMT-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/LENFI-ADA' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/NEWM-ADA' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:269:compare_intervals() :: feed: 'CER/NEWM-ADA' not being monitored +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-DJED' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-IUSD' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/ADA-USDM' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:264:compare_intervals() :: feed 'CER/HUNT-ADA' delta: '172076' +2024-08-08 14:47:59 INFO :: pubwatch.py:292:pubwatch() :: no new pairs needed on-chain... +``` + + + +If new feeds are required on-chain because they have previously expired, i.e. +their age on-chain is higher than their configured interval, then they will be +requested from the validator and published via the `validate_on_demand/` +endpoint of the validator. + +[cer-feeds-1]: https://github.com/orcfax/cer-feeds diff --git a/cer-feeds.json b/cer-feeds.json new file mode 100644 index 0000000..fe825b7 --- /dev/null +++ b/cer-feeds.json @@ -0,0 +1,138 @@ +{ + "meta": { + "description": "active Orcfax CER feeds", + "version": "2024.08.06.0002" + }, + "feeds": [ + { + "pair": "ADA-IUSD", + "label": "ADA-iUSD", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "ADA-USDM", + "label": "ADA-USDM", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "ADA-DJED", + "label": "ADA-DJED", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "SHEN-ADA", + "label": "SHEN-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "MIN-ADA", + "label": "MIN-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "FACT-ADA", + "label": "FACT-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "subsidized", + "type": "CER" + }, + { + "pair": "ADA-USD", + "label": "ADA-USD", + "interval": 3600, + "deviation": 1, + "source": "cex", + "calculation": "median", + "status": "subsidized", + "type": "CER" + }, + { + "pair": "LQ-ADA", + "label": "LQ-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "SNEK-ADA", + "label": "SNEK-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "LENFI-ADA", + "label": "LENFI-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "HUNT-ADA", + "label": "HUNT-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "IBTC-ADA", + "label": "iBTC-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + }, + { + "pair": "IETH-ADA", + "label": "iETH-ADA", + "interval": 3600, + "deviation": 2, + "source": "dex", + "calculation": "weighted mean", + "status": "showcase", + "type": "CER" + } + ] +} diff --git a/pubwatch.py b/pubwatch.py new file mode 100644 index 0000000..1ad80ff --- /dev/null +++ b/pubwatch.py @@ -0,0 +1,16 @@ +"""Orcfax Publication Monitoring. + +A lightweight script to help ensure that CER prices are available +within hour-long windows. +""" + +from src.pubwatch import pubwatch + + +def main(): + """Primary entry point for this script.""" + pubwatch.main() + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..6195644 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,31 @@ +[project] + +name = "pubwatch" + +dynamic = ["version", "dependencies"] + +description = "First-line monitoring of expired Orcfax CER feeds" + +readme = "README.md" + +requires-python = ">=3.9" + +authors = [ + {name = "R. Spencer", email = "ross@oorcfax.io" }, +] + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements/requirements.txt"]} + +[project.urls] +"Homepage" = "https://orcfax.io" +"Source" = "https://github.com/orcfax/pubwatch" + +[project.scripts] +pubwatch = "pubwatch.pubwatch:main" + +[build-system] +requires = ["setuptools>=67.8.0", "wheel", "setuptools_scm[toml]>=7.1.0"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..11d2aaa --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +addopts = -p no:cacheprovider + +[pycodestyle] +ignore = diff --git a/requirements/local.txt b/requirements/local.txt new file mode 100644 index 0000000..a2db188 --- /dev/null +++ b/requirements/local.txt @@ -0,0 +1,8 @@ +# requirements to run locally and for development. +-r requirements.txt + +pip-upgrader==1.4.15 +pre-commit==3.8.0 +pylint==3.2.6 +pytest==8.3.2 +tox==4.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt new file mode 100644 index 0000000..37f6e49 --- /dev/null +++ b/requirements/requirements.txt @@ -0,0 +1,7 @@ +# requirements for the production project. + +cbor2~=5.4.6 +certifi==2024.7.4 +pydantic==2.8.2 +requests==2.32.3 +websockets==12.0 diff --git a/src/pubwatch/__init__.py b/src/pubwatch/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/pubwatch/feed_helper.py b/src/pubwatch/feed_helper.py new file mode 100644 index 0000000..fbc10de --- /dev/null +++ b/src/pubwatch/feed_helper.py @@ -0,0 +1,37 @@ +"""Helpers for processing feed specification data.""" + +# pylint: disable=E0611,R0902 + +import json +import logging + +from pydantic.dataclasses import dataclass +from pydantic.tools import parse_obj_as + +logger = logging.getLogger(__name__) + + +@dataclass +class FeedSpec: + pair: str + label: str + interval: int + deviation: int + source: str + calculation: str + status: str + type: str = "CER" + + +async def read_feed_data(feed_data: str) -> list[FeedSpec]: + """ "Read feed data into memory for use in the script.""" + feed_dict = None + with open(feed_data, "r", encoding="utf-8") as json_feeds: + feed_dict = json.loads(json_feeds.read()) + logger.info("cer-feeds version: %s", feed_dict["meta"]["version"]) + logger.info("number of feeds: %s", len(feed_dict["feeds"])) + feeds = [] + for item in feed_dict["feeds"]: + feed = parse_obj_as(FeedSpec, item) + feeds.append(feed) + return feeds diff --git a/src/pubwatch/pubwatch.py b/src/pubwatch/pubwatch.py new file mode 100644 index 0000000..5610530 --- /dev/null +++ b/src/pubwatch/pubwatch.py @@ -0,0 +1,322 @@ +"""Inspect Orcfax publications posted within the last hour and +requests new prices if they are needed. + +The script is intended to plug publication gaps and raise the overall +reliability of the solution. It is a front-line approach with +monitoring anticipated to pick up where pubwatch leaves off. + +Feeds: https://github.com/orcfax/cer-feeds/main/feeds/cer-feeds.json +""" + +import argparse +import asyncio +import binascii +import json +import logging +import logging.handlers +import os +import ssl +import sys +import tempfile +import time +from typing import Final, Union + +import cbor2 +import certifi +import requests + +# pylint: disable=E0401 +import websockets + +try: + import feed_helper +except ModuleNotFoundError: + try: + from src.pubwatch import feed_helper + except ModuleNotFoundError: + from pubwatch import feed_helper + + +logging.basicConfig( + format="%(asctime)-15s %(levelname)s :: %(filename)s:%(lineno)s:%(funcName)s() :: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + level="INFO", + handlers=[ + logging.handlers.WatchedFileHandler("monitor.log"), + logging.StreamHandler(), + ], +) + +logger = logging.getLogger(__name__) + + +# Get our environment variables. +VALIDATOR_URI: Final[str] = os.environ.get("ORCFAX_VALIDATOR") +KUPO_URL: Final[str] = os.environ.get("KUPO_URL") +FSP_POLICY: Final[str] = os.environ.get("FSP_POLICY") +VALIDITY_TOKEN: Final[str] = os.environ.get("VALIDITY_TOKEN") + +# Construct validator URI. +VALIDATION_REQUEST_URI: Final[str] = f"{VALIDATOR_URI}validate_on_demand/" + +# Additional vars. +SLOTFILE: Final[str] = "pubwatch_slotfile" +INTERVAL_THRESHOLD: Final[str] = 120 + + +class PubWatchException(Exception): + """Sensible exception to return if there's a problem with this + script. + """ + + +def get_user_agent() -> str: + """Return a user-agent string to connect to the monitor websocket.""" + return "orcfax-pubwatch/0.0.0" + + +async def connect_to_websocket(ws_uri: str, msg_to_send: str, local: bool): + """Connect to the websocket and parse the response.""" + validator_connection = ws_uri + ssl_context = ssl.create_default_context(cafile=certifi.where()) + if local: + ssl_context = None + try: + # pylint: disable=E1101 + async with websockets.connect( + validator_connection, + user_agent_header=get_user_agent(), + ssl=ssl_context, + ) as websocket: + logger.info("connected to websocket") + await websocket.send(msg_to_send) + logger.info(msg_to_send) + msg = await websocket.recv() + try: + return json.loads(msg) + except json.JSONDecodeError: + pass + return msg + except websockets.exceptions.InvalidURI as err: + logger.error( + "ensure 'ORCFAX_VALIDATOR' environment variable is set: %s (`export ORCFAX_VALIDATOR=wss://`)", + err, + ) + sys.exit(1) + except TypeError as err: + logger.error("ensure data is sent as JSON: %s", err) + except ( + websockets.exceptions.ConnectionClosedError, + websockets.exceptions.InvalidStatusCode, + ) as err: + logger.warning( + "closed connection error '%s', attempting exponential retry: %s", + ws_uri, + err, + ) + except json.decoder.JSONDecodeError as err: + logger.error("json error decoding server response '%s': %s", msg, err) + except websockets.exceptions.ConnectionClosedOK as err: + logger.error("connection to: '%s' made: %s", ws_uri, err) + + +async def request_new_prices(pairs_to_request: dict, local: bool): + """Send a validation request to the server to ask for a new price + to be placed on-chain. + """ + validate_uri = f"{VALIDATION_REQUEST_URI}" + await connect_to_websocket(validate_uri, pairs_to_request, local) + return + + +async def unwrap_cbor(data: cbor2.CBORTag, unwrapped: list) -> Union[list | dict]: + """Unwrap CBOR so that it renders to the API.""" + if isinstance(data.value, dict): + return data.value + if not isinstance(data.value, list): + return unwrapped + for cbor_obj in data.value: + if isinstance(cbor_obj, cbor2.CBORTag): + nested = [] + unwrapped.append(nested) + await unwrap_cbor(cbor_obj, nested) + continue + try: + unwrapped.append(cbor_obj.decode()) + except AttributeError: + unwrapped.append(cbor_obj) + except UnicodeDecodeError: + unwrapped.append(binascii.hexlify(cbor_obj).decode()) + return unwrapped + + +async def process_cbor(data: str) -> dict: + """Process metadata CBOR and return a dict/json representation.""" + dec = binascii.a2b_hex(data) + cbor_data = cbor2.loads(dec) + return cbor_data + + +async def get_datum(datum_hash: str) -> list: + """Get the datum from Kupo.""" + datums_url = f"{KUPO_URL}/datums/{datum_hash}" + datum = requests.get(datums_url, timeout=30) + res = datum.json() + cbor = await process_cbor(res["datum"]) + unwrapped = await unwrap_cbor(cbor, []) + return unwrapped[0] + + +async def get_latest_feed_data(fs_policy_id: str, created_after: int = 0): + """Get the latest feed data for processing.""" + matches_url = ( + f"{KUPO_URL}/matches/{fs_policy_id}.*?created_after={created_after}&unspent" + ) + matches = requests.get(matches_url, timeout=30) + res = matches.json() + datum_hashes = [] + for item in res: + datum_hashes.append(item["datum_hash"]) + datum = [] + for datum_hash in datum_hashes: + datum.append(await get_datum(datum_hash)) + return datum + + +async def get_policy_from_fsp(fsp_policy_id: str, validity_token_name: str): + """List the current policy ID from the Fact Statement Pointer. + + Requires the fsp policy as input as well as the validity token + name. + + The script will return the current fact statement policy ID. + + ```sh + curl -s \ + "http://:/datums/$(curl -s "http://:/matches/*?policy_id=0690081bc113f74e04640ea78a87d88abbd2f18831c44c4064524230&unspent&asset_name=000de140&order=most_recent_first" \ + | jq -r .[].datum_hash)?unspent" \ + | jq -r .[] | cbor-diag + + ``` + + * Example FSP policy: `0690081bc113f74e04640ea78a87d88abbd2f18831c44c4064524230`. + * Example validity token name: `000de140`. + + """ + matches_url = f"{KUPO_URL}/matches/*?policy_id={fsp_policy_id}&asset_name={validity_token_name}&unspent" + matches = requests.get(matches_url, timeout=30) + res = matches.json() + datum_hash = res[0]["datum_hash"] + datums_url = f"{KUPO_URL}/datums/{datum_hash}" + datum = requests.get(datums_url, timeout=30) + res = datum.json() + cbor = await process_cbor(res["datum"]) + return binascii.hexlify(cbor).decode() + + +async def get_slot() -> str: + """Retrieve and store slot somewhere for future reference. Return + previous slot as a reference point for UTxO retrieval functions.""" + health = requests.get(f"{KUPO_URL}/health", timeout=30) + slot = health.headers["X-Most-Recent-Checkpoint"] + previous_slot = "0" + try: + with open( + os.path.join(tempfile.gettempdir(), SLOTFILE), "r", encoding="utf=8" + ) as slot_file: + previous_slot = slot_file.read().strip() + except FileNotFoundError: + pass + if int(slot) <= int(previous_slot): + raise PubWatchException("slot hasn't changed since last update") + with open( + os.path.join(tempfile.gettempdir(), SLOTFILE), "w", encoding="utf08" + ) as slot_file: + slot_file.write(slot) + return previous_slot + + +def create_interval_dict(feeds: list) -> dict: + """Create a dict of feeds and intervals minus the interval threshold + which should ensure that we always have datum within an anticipated + window. + """ + intervals = {} + for feed in feeds: + intervals[f"{feed.type}/{feed.pair}"] = feed.interval - INTERVAL_THRESHOLD + return intervals + + +async def compare_intervals(intervals: dict, feed_data: list) -> list: + """Compare feed intervals with what we have on-chain and return a + list of gaps. + """ + curr_time = int(time.time()) + feeds = [] + for item in feed_data: + feed = (item[0].rsplit("/", 1)[0]).upper() + on_chain_time = int(int(item[1]) / 1000) + delta = curr_time - on_chain_time + logger.info("feed '%s' delta: '%s'", feed, delta) + try: + if delta < intervals[feed]: + feeds.append(feed) + except KeyError: + logger.info("feed: '%s' not being monitored", feed) + good_feeds = set(feeds) + all_feeds = set(intervals.keys()) + gaps = list(all_feeds - good_feeds) + to_request = [feed.split("/", 1)[1] for feed in gaps] + return to_request + + +async def pubwatch(feed_data: str, local: bool = False): + """Compare feed data with what should be published and request new + feeds to be put on-chain if they're missing. + """ + _ = await get_slot() + feeds = await feed_helper.read_feed_data(feed_data=feed_data) + fs_policy_id = await get_policy_from_fsp( + fsp_policy_id=FSP_POLICY, validity_token_name=VALIDITY_TOKEN + ) + logger.info("policy: %s", fs_policy_id) + intervals = create_interval_dict(feeds) + feed_data = await get_latest_feed_data(fs_policy_id=fs_policy_id) + logger.info("unspent datum: %s", len(feed_data)) + pairs_to_request = await compare_intervals(intervals, feed_data) + if not pairs_to_request: + logger.info("no new pairs needed on-chain...") + return + logger.info("we need to request the following feeds: %s", pairs_to_request) + req = json.dumps({"feeds": pairs_to_request}) + await request_new_prices(req, local) + + +def main(): + """Primary entry point of this script.""" + + parser = argparse.ArgumentParser( + prog="pubwatch", + description="inspects prices on-chain and looks for anything not posted at the top of the last hour and publishes it", + epilog="for more information visit https://orcfax.io", + ) + + parser.add_argument( + "--local", + help="run code locally without ssl", + required=False, + action="store_true", + ) + + parser.add_argument( + "--feeds", + help="feed data describing feeds being monitored (CER-feeds (JSON))", + required=True, + ) + + args = parser.parse_args() + asyncio.run(pubwatch(feed_data=args.feeds, local=args.local)) + + +if __name__ == "__main__": + main() diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..713daa8 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,8 @@ +"""Placeholder tests.""" + +from src.pubwatch.pubwatch import main + + +def test_none(): + """Ensure the main function for the repository exists.""" + assert main diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..fb1e537 --- /dev/null +++ b/tox.ini @@ -0,0 +1,57 @@ +[tox] +envlist = py3,linting +skipsdist = true + +[testenv] +deps = -r requirements/local.txt +skip_install = true +whitelist_externals = pytest +commands = pytest -c pytest.ini + +[testenv:linting] +basepython = python3 +deps = pre-commit +commands = pre-commit run --all-files + +[testenv:linting-show] +basepython = python3 +deps = pre-commit +commands = pre-commit run --all-files --show-diff-on-failure + +[flake8] +exclude = + .git, + .tox, + __pycache__, + old, + build, + dist, + txt, + .ini, + .sh + .github, + venv/ + +application-import-names = flake8 + +# Recommendations from Black formatting library. +select = B,C,E,F,W,T4,B9 +ignore = + # Lines are too long. + E501 + # Line break before binary operator. + W503 + # Whitespace before ':'. + E203 + # Module level import. + E402 + +import-order-style = pep8 + +[isort] +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 88 diff --git a/validator.env b/validator.env new file mode 100644 index 0000000..d282e06 --- /dev/null +++ b/validator.env @@ -0,0 +1,4 @@ +export ORCFAX_VALIDATOR= +export KUPO_URL= +export FSP_POLICY= +export VALIDITY_TOKEN=