From 147cee86eedd5f91bd8f160d6dc3462b7536e36f Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Mon, 23 Aug 2021 22:29:23 -0400 Subject: [PATCH] Adds CI with sorted imports, style guide and tests (#295) --- .../check-vulnerability-whitesource.yml | 93 ----------- .github/workflows/test-bundle-workflow.yml | 35 ++++ DEVELOPER_GUIDE.md | 96 ++++++++++- bundle-workflow/.flake8 | 3 + bundle-workflow/Pipfile | 3 + bundle-workflow/Pipfile.lock | 158 +++++++++++++++++- bundle-workflow/assemble.sh | 2 +- bundle-workflow/build.sh | 2 +- bundle-workflow/pytest.ini | 3 + .../python/build_workflow/build_args.py | 51 ++++-- .../python/manifests/bundle_manifest.py | 79 --------- bundle-workflow/python/system/__init__.py | 1 - bundle-workflow/python/system/arch.py | 10 -- .../test_workflow/local_test_cluster.py | 94 ----------- bundle-workflow/sign.sh | 2 +- bundle-workflow/{python => src}/assemble.py | 31 ++-- .../assemble_workflow/bundle.py | 38 +++-- .../assemble_workflow/bundle_recorder.py | 63 ++++--- bundle-workflow/{python => src}/build.py | 46 +++-- .../src/build_workflow/build_args.py | 55 ++++++ .../build_workflow/build_recorder.py | 65 ++++--- .../{python => src}/build_workflow/builder.py | 26 ++- .../{python => src}/git/git_repository.py | 39 +++-- .../{python => src}/manifests/__init__.py | 0 .../manifests/build_manifest.py | 64 +++---- .../src/manifests/bundle_manifest.py | 82 +++++++++ .../manifests/input_manifest.py | 31 ++-- .../{python => src}/paths/script_finder.py | 51 +++--- bundle-workflow/{python => src}/sign.py | 27 +-- .../signing_workflow/signer.py | 30 +++- .../test_workflow => src/system}/__init__.py | 0 bundle-workflow/src/system/arch.py | 11 ++ .../system/temporary_directory.py | 11 +- bundle-workflow/{python => src}/test.py | 28 +++- bundle-workflow/src/test_workflow/__init__.py | 1 + .../test_workflow/integ_test_suite.py | 9 +- .../src/test_workflow/local_test_cluster.py | 105 ++++++++++++ .../test_workflow/test_cluster.py | 27 +-- bundle-workflow/test.sh | 2 +- bundle-workflow/tests/system/__init__.py | 4 + bundle-workflow/tests/system/test_arch.py | 19 +++ 41 files changed, 964 insertions(+), 533 deletions(-) delete mode 100644 .github/workflows/check-vulnerability-whitesource.yml create mode 100644 .github/workflows/test-bundle-workflow.yml create mode 100644 bundle-workflow/.flake8 create mode 100644 bundle-workflow/pytest.ini delete mode 100644 bundle-workflow/python/manifests/bundle_manifest.py delete mode 100644 bundle-workflow/python/system/__init__.py delete mode 100644 bundle-workflow/python/system/arch.py delete mode 100644 bundle-workflow/python/test_workflow/local_test_cluster.py rename bundle-workflow/{python => src}/assemble.py (60%) rename bundle-workflow/{python => src}/assemble_workflow/bundle.py (80%) rename bundle-workflow/{python => src}/assemble_workflow/bundle_recorder.py (59%) rename bundle-workflow/{python => src}/build.py (54%) create mode 100644 bundle-workflow/src/build_workflow/build_args.py rename bundle-workflow/{python => src}/build_workflow/build_recorder.py (50%) rename bundle-workflow/{python => src}/build_workflow/builder.py (73%) rename bundle-workflow/{python => src}/git/git_repository.py (52%) rename bundle-workflow/{python => src}/manifests/__init__.py (100%) rename bundle-workflow/{python => src}/manifests/build_manifest.py (50%) create mode 100644 bundle-workflow/src/manifests/bundle_manifest.py rename bundle-workflow/{python => src}/manifests/input_manifest.py (56%) rename bundle-workflow/{python => src}/paths/script_finder.py (53%) rename bundle-workflow/{python => src}/sign.py (64%) rename bundle-workflow/{python => src}/signing_workflow/signer.py (64%) rename bundle-workflow/{python/test_workflow => src/system}/__init__.py (100%) create mode 100644 bundle-workflow/src/system/arch.py rename bundle-workflow/{python => src}/system/temporary_directory.py (63%) rename bundle-workflow/{python => src}/test.py (68%) create mode 100644 bundle-workflow/src/test_workflow/__init__.py rename bundle-workflow/{python => src}/test_workflow/integ_test_suite.py (54%) create mode 100644 bundle-workflow/src/test_workflow/local_test_cluster.py rename bundle-workflow/{python => src}/test_workflow/test_cluster.py (88%) create mode 100644 bundle-workflow/tests/system/__init__.py create mode 100644 bundle-workflow/tests/system/test_arch.py diff --git a/.github/workflows/check-vulnerability-whitesource.yml b/.github/workflows/check-vulnerability-whitesource.yml deleted file mode 100644 index b4533c1abe..0000000000 --- a/.github/workflows/check-vulnerability-whitesource.yml +++ /dev/null @@ -1,93 +0,0 @@ -name: WhiteSource Vulnerability Scan - -on: -# schedule: -# - cron: '30 10 * * *' - repository_dispatch: - types: [check-vulnerability-whitesource] - -jobs: - Provision-Runners: - name: Provision-Runners - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v1 - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_WSS_EC2_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_WSS_EC2_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - name: Setting up runner - run: | - RUNNERS="opensearch-wss-scan" - scripts/setup_runners.sh run $RUNNERS ${{ secrets.OPENSEARCH_RELEASE_BOT_PUBLIC_PRIVATE_READ_WRITE_TOKEN }} ami-0bd968fea932935f4 - - whitesource-vulnerability-scan: - needs: [Provision-Runners] - name: WhiteSource Vulnerability Scan - runs-on: [self-hosted, Linux, X64, opensearch-wss-scan] - outputs: - mail_content_output: ${{ steps.vulnerability_scan.outputs.mail_content }} - strategy: - fail-fast: false - matrix: - java: [14] - go-version: [1.14] - steps: - - uses: actions/checkout@v1 - - name: Setup Java - uses: actions/setup-java@v1 - with: - java-version: ${{ matrix.java }} - - name: Setup Go - uses: actions/setup-go@v2 - with: - go-version: ${{ matrix.go-version }} - - name: Vulnerability Scan - id: vulnerability_scan - env: - wss_apikey: ${{ secrets.WSS_API_KEY }} - run: | - export PATH=$JAVA_HOME:$PATH - cd tools/vulnerability-scan - sudo yum install -y maven - wget -q https://services.gradle.org/distributions/gradle-6.7-bin.zip - sudo mkdir /opt/gradle - sudo unzip -d /opt/gradle gradle-6.7-bin.zip - wget -qO - https://rpm.nodesource.com/setup_10.x | sudo bash - - sudo yum install -y nodejs-10.24.1 - sudo npm install --global yarn@1.22.10 - export PATH=$PATH:/opt/gradle/gradle-6.7/bin - gradle -v; mvn -v; node -v; npm -v; yarn -v - # This step is needed to avoid build failures in few plugins - # No ETA on when this dependency can be removed - git clone -b 1.0 https://github.com/opensearch-project/OpenSearch.git - cd OpenSearch - gradle -q publishToMavenLocal -Dbuild.snapshot=false - cd .. - # Run Whitesource scan - ./wss-scan.sh - - name: Store logs - uses: actions/upload-artifact@v2 - with: - name: logs - path: "tools/vulnerability-scan/*.log" - - CleanUp-Runners: - needs: [whitesource-vulnerability-scan] - if: always() - name: CleanUp-Runners - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v1 - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_WSS_EC2_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_WSS_EC2_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - name: Removing runner - run: | - RUNNERS="opensearch-wss-scan" - scripts/setup_runners.sh terminate $RUNNERS ${{ secrets.OPENSEARCH_RELEASE_BOT_PUBLIC_PRIVATE_READ_WRITE_TOKEN }} diff --git a/.github/workflows/test-bundle-workflow.yml b/.github/workflows/test-bundle-workflow.yml new file mode 100644 index 0000000000..ba3cc99bf4 --- /dev/null +++ b/.github/workflows/test-bundle-workflow.yml @@ -0,0 +1,35 @@ +name: test-bundle-workflow + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + + env: + PYTHON_VERSION: 3.7 + + defaults: + run: + working-directory: ./bundle-workflow + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install Pipenv and Dependencies + run: | + python -m pip install --upgrade pipenv wheel + pipenv install --deploy --dev + - name: Check for Sorted Imports + run: | + pipenv run isort --check . + - name: Enforce Style Guide + run: | + pipenv run flake8 . + - name: Run Tests + run: | + pipenv run pytest diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index fbddf266a5..c639fc5af7 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -1,15 +1,99 @@ + + - [Developer Guide](#developer-guide) - - [Forking and Cloning](#forking-and-cloning) - - [Submitting Changes](#submitting-changes) + - [Forking and Cloning](#forking-and-cloning) + - [Install Prerequisites](#install-prerequisites) + - [Python 3.7](#python-37) + - [Pip](#pip) + - [Pipenv](#pipenv) + - [Run bundle-workflow](#run-bundle-workflow) + - [Code Linting](#code-linting) + - [Unit Tests](#unit-tests) -## Developer Guide + -So you want to contribute code to this project? Excellent! We're glad you're here. Here's what you need to do. +## Developer Guide ### Forking and Cloning Fork this repository on GitHub, and clone locally with `git clone`. -### Submitting Changes +### Install Prerequisites + +#### Python 3.7 + +Python projects in this repository, including the [bundle-workflow](./bundle-workflow) project, use Python 3.7. See the [Python Beginners Guide](https://wiki.python.org/moin/BeginnersGuide) if you have never worked with the language. + +``` +$ python3 --version +Python 3.7.11 +``` + +#### Pip + +[Pip](https://docs.python.org/3/installing/index.html) in the preferred installer program for Python3 modules. See [Pip Installation](https://pip.pypa.io/en/stable/installation/) for more details. + +``` +$ pip --version +pip 20.0.2 from /usr/lib/python3/dist-packages/pip (python 3.7) +``` + +#### Pipenv + +This project uses [pipenv](https://pipenv.pypa.io/en/latest/), which is typically installed with `pip install --user pipenv`. Pipenv automatically creates and manages a virtualenv for your projects, as well as adds/removes packages from your `Pipfile` as you install/uninstall packages. It also generates the ever-important `Pipfile.lock`, which is used to produce deterministic builds. + +``` +$ pipenv --version +pipenv, version 11.9.0 +``` + +### Run bundle-workflow + +Try running `./build.sh` from [bundle-workflow](./bundle-workflow). It should complete and show usage. + +``` +$ ./build.sh +Installing dependencies in . ... +Installing dependencies from Pipfile.lock (41aca1)… + 🐍 ▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉ 14/14 — 00:00:01 +To activate this project's virtualenv, run the following: + $ pipenv shell +Running ./src/build.py ... +usage: build.py [-h] [-s] [-c COMPONENT] [--keep] manifest +build.py: error: the following arguments are required: manifest +``` + +### Code Linting + +This project uses [isort](https://github.com/PyCQA/isort) to ensure that imports are sorted, and [flake8](https://flake8.pycqa.org/en/latest/) to enforce code style. + +``` +$ pipenv run flake8 +./src/assemble_workflow/bundle_recorder.py:30:13: W503 line break before binary operator +``` + +Use `isort .` to fix any sorting order. + +``` +$ pipenv run isort . +Fixing bundle-workflow/tests/system/test_arch.py +``` + +Use [black](https://black.readthedocs.io/en/stable/) to auto-format your code. + +``` +$ pipenv run black . +All done! ✨ 🍰 ✨ +23 files left unchanged. +``` + +If your code isn't properly formatted, don't worry, [a CI workflow](./github/workflows/test-bundle-workflow.yml) will make sure to remind you. + +### Unit Tests + +This project uses [pytest](https://docs.pytest.org/en/6.2.x/) to ensure code quality. See [bundle-workflow/tests](bundle-workflow). -See [CONTRIBUTING](CONTRIBUTING.md). \ No newline at end of file +``` +$ pipenv run pytest +2 passed in 0.02s +``` \ No newline at end of file diff --git a/bundle-workflow/.flake8 b/bundle-workflow/.flake8 new file mode 100644 index 0000000000..9025e02751 --- /dev/null +++ b/bundle-workflow/.flake8 @@ -0,0 +1,3 @@ +[flake8] +ignore = E722 +max-line-length = 160 diff --git a/bundle-workflow/Pipfile b/bundle-workflow/Pipfile index e1e31e4e7e..e75c55d8ae 100644 --- a/bundle-workflow/Pipfile +++ b/bundle-workflow/Pipfile @@ -6,6 +6,9 @@ name = "pypi" [packages] pyyaml = "~=5.4" requests = "~=2.26" +isort = "~=5.9" +flake8 = "~=3.9" +pytest = "*" [dev-packages] diff --git a/bundle-workflow/Pipfile.lock b/bundle-workflow/Pipfile.lock index a60db12f9d..dabb6eda9c 100644 --- a/bundle-workflow/Pipfile.lock +++ b/bundle-workflow/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "aa8ff3f731dfae0464f2e052dccd660375d0492a74ada70832581568ae4462d7" + "sha256": "a140a02681b83b5cca369269df6f5dbfc8ba0b43d24e79e17d0b619a261f4869" }, "pipfile-spec": 6, "requires": { @@ -16,6 +16,124 @@ ] }, "default": { + "attrs": { + "hashes": [ + "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", + "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + ], + "version": "==21.2.0" + }, + "certifi": { + "hashes": [ + "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", + "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" + ], + "version": "==2021.5.30" + }, + "charset-normalizer": { + "hashes": [ + "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b", + "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3" + ], + "markers": "python_version >= '3'", + "version": "==2.0.4" + }, + "flake8": { + "hashes": [ + "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", + "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907" + ], + "index": "pypi", + "version": "==3.9.2" + }, + "idna": { + "hashes": [ + "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a", + "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3" + ], + "markers": "python_version >= '3'", + "version": "==3.2" + }, + "importlib-metadata": { + "hashes": [ + "sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f", + "sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5" + ], + "markers": "python_version < '3.8'", + "version": "==4.6.4" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "isort": { + "hashes": [ + "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899", + "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2" + ], + "index": "pypi", + "version": "==5.9.3" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "packaging": { + "hashes": [ + "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7", + "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14" + ], + "version": "==21.0" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + ], + "version": "==1.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", + "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" + ], + "version": "==2.7.0" + }, + "pyflakes": { + "hashes": [ + "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", + "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db" + ], + "version": "==2.3.1" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "version": "==2.4.7" + }, + "pytest": { + "hashes": [ + "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b", + "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890" + ], + "index": "pypi", + "version": "==6.2.4" + }, "pyyaml": { "hashes": [ "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", @@ -50,6 +168,44 @@ ], "index": "pypi", "version": "==5.4.1" + }, + "requests": { + "hashes": [ + "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24", + "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7" + ], + "index": "pypi", + "version": "==2.26.0" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "version": "==0.10.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "markers": "python_version < '3.8'", + "version": "==3.10.0.0" + }, + "urllib3": { + "hashes": [ + "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4", + "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f" + ], + "version": "==1.26.6" + }, + "zipp": { + "hashes": [ + "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3", + "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4" + ], + "version": "==3.5.0" } }, "develop": {} diff --git a/bundle-workflow/assemble.sh b/bundle-workflow/assemble.sh index 26b2a5492c..db3788a821 100755 --- a/bundle-workflow/assemble.sh +++ b/bundle-workflow/assemble.sh @@ -6,4 +6,4 @@ set -e DIR="$(dirname "$0")" -"$DIR/run.sh" "$DIR/python/assemble.py" $@ +"$DIR/run.sh" "$DIR/src/assemble.py" $@ diff --git a/bundle-workflow/build.sh b/bundle-workflow/build.sh index 2a2356a92d..e9a31e24fb 100755 --- a/bundle-workflow/build.sh +++ b/bundle-workflow/build.sh @@ -6,4 +6,4 @@ set -e DIR="$(dirname "$0")" -"$DIR/run.sh" "$DIR/python/build.py" $@ +"$DIR/run.sh" "$DIR/src/build.py" $@ diff --git a/bundle-workflow/pytest.ini b/bundle-workflow/pytest.ini new file mode 100644 index 0000000000..2ab163cd57 --- /dev/null +++ b/bundle-workflow/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +testpaths = tests +addopts = -ra -q \ No newline at end of file diff --git a/bundle-workflow/python/build_workflow/build_args.py b/bundle-workflow/python/build_workflow/build_args.py index 8b22e66ce5..bb38b9aea3 100644 --- a/bundle-workflow/python/build_workflow/build_args.py +++ b/bundle-workflow/python/build_workflow/build_args.py @@ -1,21 +1,37 @@ # Copyright OpenSearch Contributors. # SPDX-License-Identifier: Apache-2.0 -import sys import argparse +import sys -class BuildArgs(): + +class BuildArgs: manifest: str snapshot: bool component: str keep: bool - + def __init__(self): - parser = argparse.ArgumentParser(description = "Build an OpenSearch Bundle") - parser.add_argument('manifest', type = argparse.FileType('r'), help = "Manifest file.") - parser.add_argument('-s', '--snapshot', action = 'store_true', default = False, help = "Build snapshot.") - parser.add_argument('-c', '--component', type = str, help = "Rebuild a single component.") - parser.add_argument('--keep', dest = 'keep', action='store_true', help = "Do not delete the working temporary directory.") + parser = argparse.ArgumentParser(description="Build an OpenSearch Bundle") + parser.add_argument( + "manifest", type=argparse.FileType("r"), help="Manifest file." + ) + parser.add_argument( + "-s", + "--snapshot", + action="store_true", + default=False, + help="Build snapshot.", + ) + parser.add_argument( + "-c", "--component", type=str, help="Rebuild a single component." + ) + parser.add_argument( + "--keep", + dest="keep", + action="store_true", + help="Do not delete the working temporary directory.", + ) args = parser.parse_args() self.manifest = args.manifest self.snapshot = args.snapshot @@ -23,12 +39,17 @@ def __init__(self): self.keep = args.keep def script_path(self): - return sys.argv[0].replace('/python/build.py', '/build.sh') + return sys.argv[0].replace("/src/build.py", "/build.sh") def component_command(self, name): - return ' '.join(filter(None, [ - self.script_path(), - self.manifest.name, - f'--component {name}', - f'--snapshot' if self.snapshot else None - ])) + return " ".join( + filter( + None, + [ + self.script_path(), + self.manifest.name, + f"--component {name}", + "--snapshot" if self.snapshot else None, + ], + ) + ) diff --git a/bundle-workflow/python/manifests/bundle_manifest.py b/bundle-workflow/python/manifests/bundle_manifest.py deleted file mode 100644 index 5bc863a2f7..0000000000 --- a/bundle-workflow/python/manifests/bundle_manifest.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright OpenSearch Contributors. -# SPDX-License-Identifier: Apache-2.0 - -import yaml - -class BundleManifest: - ''' - A BundleManifest is an immutable view of the outputs from a assemble step - The manifest contains information about the bundle that was built (in the `assemble` section), - and the components that made up the bundle in the `components` section. - - The format for schema version 1.0 is: - schema-version: 1.0 - build: - name: string - version: string - architecture: x64 or arm64 - location: /relative/path/to/tarball - components: - - name: string - repository: URL of git repository - ref: git ref that was built (sha, branch, or tag) - commit_id: The actual git commit ID that was built (i.e. the resolved "ref") - location: /relative/path/to/artifact - ''' - - @staticmethod - def from_file(file): - return BundleManifest(yaml.safe_load(file)) - - def __init__(self, data): - self.version = str(data['schema-version']) - if self.version != '1.0': - raise ValueError(f'Unsupported schema version: {self.version}') - self.build = self.Build(data['build']) - self.components = list(map(lambda entry: self.Component(entry), - data['components'])) - - def to_dict(self): - return { - 'schema-version': '1.0', - 'build': self.build.to_dict(), - 'components': list(map(lambda component: component.to_dict(), - self.components)) - } - - class Build: - def __init__(self, data): - self.name = data['name'] - self.version = data['version'] - self.architecture = data['architecture'] - self.location = data['location'] - self.id = data['id'] - - def to_dict(self): - return { - 'name': self.name, - 'version': self.version, - 'architecture': self.architecture, - 'location': self.location, - 'id': self.id - } - - class Component: - def __init__(self, data): - self.name = data['name'] - self.repository = data['repository'] - self.ref = data['ref'] - self.commit_id = data['commit_id'] - self.location = data['location'] - - def to_dict(self): - return { - 'name': self.name, - 'repository': self.repository, - 'ref': self.ref, - 'commit_id': self.commit_id, - 'location': self.location - } diff --git a/bundle-workflow/python/system/__init__.py b/bundle-workflow/python/system/__init__.py deleted file mode 100644 index e1e37100e9..0000000000 --- a/bundle-workflow/python/system/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This page intentionally left blank. \ No newline at end of file diff --git a/bundle-workflow/python/system/arch.py b/bundle-workflow/python/system/arch.py deleted file mode 100644 index 93c3961345..0000000000 --- a/bundle-workflow/python/system/arch.py +++ /dev/null @@ -1,10 +0,0 @@ -import subprocess - -def current_arch(): - arch = subprocess.check_output(['uname', '-m']).decode().strip() - if arch == 'x86_64': - return 'x64' - elif arch == 'aarch64' or arch == 'arm64': - return 'arm64' - else: - raise ValueError(f'Unsupported architecture: {arch}') diff --git a/bundle-workflow/python/test_workflow/local_test_cluster.py b/bundle-workflow/python/test_workflow/local_test_cluster.py deleted file mode 100644 index 1a3be42084..0000000000 --- a/bundle-workflow/python/test_workflow/local_test_cluster.py +++ /dev/null @@ -1,94 +0,0 @@ -import os -import urllib.request -import subprocess -import time -import ssl -import requests -from test_workflow.test_cluster import TestCluster, ClusterCreationException - -class LocalTestCluster(TestCluster): - ''' - Represents an on-box test cluster. This class downloads a bundle (from a BundleManifest) and runs it as a background process. - ''' - - def __init__(self, work_dir, bundle_manifest, security_enabled): - self.manifest = bundle_manifest - self.work_dir = os.path.join(work_dir, 'local-test-cluster') - os.makedirs(self.work_dir, exist_ok = True) - self.security_enabled = security_enabled - self.process = None - - def create(self): - self.download() - self.stdout = open('stdout.txt', 'w') - self.stderr = open('stderr.txt', 'w') - dir = f'opensearch-{self.manifest.build.version}' - if not self.security_enabled: - self.disable_security(dir) - self.process = subprocess.Popen('./opensearch-tar-install.sh', cwd = dir, shell = True, stdout = self.stdout, stderr = self.stderr) - print(f'Started OpenSearch with PID {self.process.pid}') - self.wait_for_service() - - def endpoint(self): - return 'localhost' - - def port(self): - return 9200 - - def destroy(self): - if self.process is None: - print('Local test cluster is not started') - return - print(f'Sending SIGTERM to PID {self.process.pid}') - self.process.terminate() - try: - print('Waiting for process to terminate') - self.process.wait(10) - except TimeoutExpired: - print('Process did not terminate after 10 seconds. Sending SIGKILL') - self.process.kill() - try: - print('Waiting for process to terminate') - self.process.wait(10) - except TimeoutExpired: - print('Process failed to terminate even after SIGKILL') - raise - finally: - print(f'Process terminated with exit code {self.process.returncode}') - self.stdout.close() - self.stderr.close() - self.process = None - - def url(self, path=''): - return f'{"https" if self.security_enabled else "http"}://{self.endpoint()}:{self.port()}{path}' - - def download(self): - print(f'Creating local test cluster in {self.work_dir}') - os.chdir(self.work_dir) - print(f'Downloading bundle from {self.manifest.build.location}') - urllib.request.urlretrieve(self.manifest.build.location, 'bundle.tgz') - print(f'Downloaded bundle to {os.path.realpath("bundle.tgz")}') - - print('Unpacking') - subprocess.check_call('tar -xzf bundle.tgz', shell = True) - print('Unpacked') - - def disable_security(self, dir): - subprocess.check_call(f'echo "plugins.security.disabled: true" >> {os.path.join(dir, "config", "opensearch.yml")}', shell = True) - - def wait_for_service(self): - print('Waiting for service to become available') - url = self.url('/_cluster/health') - - for attempt in range(10): - try: - print(f'Pinging {url} attempt {attempt}') - response = requests.get(url, verify = False, auth = ('admin', 'admin')) - print(f'{response.status_code}: {response.text}') - if response.status_code == 200 and '"status":"green"' in response.text: - print('Cluster is green') - return - except requests.exceptions.ConnectionError: - print(f'Service not available yet') - time.sleep(10) - raise ClusterCreationException('Cluster is not green after 10 attempts') diff --git a/bundle-workflow/sign.sh b/bundle-workflow/sign.sh index cdbaf7a50a..bf597a713f 100755 --- a/bundle-workflow/sign.sh +++ b/bundle-workflow/sign.sh @@ -3,4 +3,4 @@ set -e DIR="$(dirname "$0")" -"$DIR/run.sh" "$DIR/python/sign.py" $@ +"$DIR/run.sh" "$DIR/src/sign.py" $@ diff --git a/bundle-workflow/python/assemble.py b/bundle-workflow/src/assemble.py similarity index 60% rename from bundle-workflow/python/assemble.py rename to bundle-workflow/src/assemble.py index 2a13fd5469..d929c601de 100755 --- a/bundle-workflow/python/assemble.py +++ b/bundle-workflow/src/assemble.py @@ -3,33 +3,37 @@ # Copyright OpenSearch Contributors. # SPDX-License-Identifier: Apache-2.0 -import os -import tempfile import argparse +import os import shutil +import tempfile + from assemble_workflow.bundle import Bundle from assemble_workflow.bundle_recorder import BundleRecorder from manifests.build_manifest import BuildManifest from paths.script_finder import ScriptFinder -parser = argparse.ArgumentParser(description = "Assemble an OpenSearch Bundle") -parser.add_argument('manifest', type = argparse.FileType('r'), help="Manifest file.") +parser = argparse.ArgumentParser(description="Assemble an OpenSearch Bundle") +parser.add_argument("manifest", type=argparse.FileType("r"), help="Manifest file.") args = parser.parse_args() -tarball_installation_script = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../release/tar/linux/opensearch-tar-install.sh') +tarball_installation_script = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "../../release/tar/linux/opensearch-tar-install.sh", +) if not os.path.isfile(tarball_installation_script): - print(f'No installation script found at path: {tarball_installation_script}') + print(f"No installation script found at path: {tarball_installation_script}") exit(1) script_finder = ScriptFinder() build_manifest = BuildManifest.from_file(args.manifest) build = build_manifest.build artifacts_dir = os.path.dirname(os.path.realpath(args.manifest.name)) -output_dir = os.path.join(os.getcwd(), 'bundle') +output_dir = os.path.join(os.getcwd(), "bundle") os.makedirs(output_dir, exist_ok=True) with tempfile.TemporaryDirectory() as work_dir: - print(f'Bundling {build.name} ({build.architecture}) into {output_dir} ...') + print(f"Bundling {build.name} ({build.architecture}) into {output_dir} ...") os.chdir(work_dir) @@ -37,10 +41,15 @@ bundle = Bundle(build_manifest, artifacts_dir, bundle_recorder, script_finder) bundle.install_plugins() - print(f'Installed plugins: {bundle.installed_plugins}') + print(f"Installed plugins: {bundle.installed_plugins}") # Copy the tar installation script into the bundle - shutil.copyfile(tarball_installation_script, os.path.join(bundle.archive_path, os.path.basename(tarball_installation_script))) + shutil.copyfile( + tarball_installation_script, + os.path.join( + bundle.archive_path, os.path.basename(tarball_installation_script) + ), + ) # Save a copy of the manifest inside of the tar bundle_recorder.write_manifest(bundle.archive_path) @@ -48,4 +57,4 @@ bundle_recorder.write_manifest(output_dir) -print(f'Done.') +print("Done.") diff --git a/bundle-workflow/python/assemble_workflow/bundle.py b/bundle-workflow/src/assemble_workflow/bundle.py similarity index 80% rename from bundle-workflow/python/assemble_workflow/bundle.py rename to bundle-workflow/src/assemble_workflow/bundle.py index 4b6f48bbc9..eb69af9c6f 100644 --- a/bundle-workflow/python/assemble_workflow/bundle.py +++ b/bundle-workflow/src/assemble_workflow/bundle.py @@ -2,20 +2,19 @@ # SPDX-License-Identifier: Apache-2.0 import os -import tarfile -import tempfile import shutil import subprocess +import tarfile +import tempfile -''' +""" This class is responsible for executing the build of the full bundle and passing results to a bundle recorder. -It requires a min tarball distribution where plugins will be installed and the path to an artifacts directory where +It requires a min tarball distribution where plugins will be installed and the path to an artifacts directory where plugins can be found. -''' +""" class Bundle: - def __init__(self, build_manifest, artifacts_dir, bundle_recorder, script_finder): """ Construct a new Bundle instance. @@ -33,20 +32,24 @@ def __init__(self, build_manifest, artifacts_dir, bundle_recorder, script_finder tmp_path = self.add_component(self.min_tarball, "bundle") self.unpack(tmp_path, self.tmp_dir.name) # OpenSearch & Dashboard tars will include only a single folder at the top level of the tar - self.archive_path = next(iter([file.path for file in os.scandir(self.tmp_dir.name) if file.is_dir()])) + self.archive_path = next( + iter([file.path for file in os.scandir(self.tmp_dir.name) if file.is_dir()]) + ) def install_plugins(self): for plugin in self.plugins: - print(f'Installing {plugin.name}') + print(f"Installing {plugin.name}") self.install_plugin(plugin) - self.installed_plugins = os.listdir(os.path.join(self.archive_path, 'plugins')) + self.installed_plugins = os.listdir(os.path.join(self.archive_path, "plugins")) def install_plugin(self, plugin): tmp_path = self.add_component(plugin, "plugins") - cli_path = os.path.join(self.archive_path, 'bin/opensearch-plugin') - self.execute(f'{cli_path} install --batch file:{tmp_path}') + cli_path = os.path.join(self.archive_path, "bin/opensearch-plugin") + self.execute(f"{cli_path} install --batch file:{tmp_path}") post_install_script = self.script_finder.find_install_script(plugin.name) - self.execute(f'{post_install_script} -a "{self.artifacts_dir}" -o "{self.archive_path}"') + self.execute( + f'{post_install_script} -a "{self.artifacts_dir}" -o "{self.archive_path}"' + ) def add_component(self, component, component_type): rel_path = self.get_rel_path(component, component_type) @@ -79,14 +82,15 @@ def copy_component(self, rel_path, dest): shutil.copyfile(local_path, dest_path) return os.path.join(dest, os.path.basename(local_path)) else: - raise ValueError(f'No file found at path: {local_path}') + raise ValueError(f"No file found at path: {local_path}") def get_plugins(self, build_components): return [c for c in build_components if "plugins" in c.artifacts] def get_min_bundle(self, build_components): - min_bundle = next(iter([c for c in build_components if "bundle" in c.artifacts]), None) - if min_bundle == None: - raise ValueError(f'Missing min "bundle" in input artifacts.') + min_bundle = next( + iter([c for c in build_components if "bundle" in c.artifacts]), None + ) + if min_bundle is None: + raise ValueError('Missing min "bundle" in input artifacts.') return min_bundle - diff --git a/bundle-workflow/python/assemble_workflow/bundle_recorder.py b/bundle-workflow/src/assemble_workflow/bundle_recorder.py similarity index 59% rename from bundle-workflow/python/assemble_workflow/bundle_recorder.py rename to bundle-workflow/src/assemble_workflow/bundle_recorder.py index 617b28131e..981bb02a7b 100644 --- a/bundle-workflow/python/assemble_workflow/bundle_recorder.py +++ b/bundle-workflow/src/assemble_workflow/bundle_recorder.py @@ -2,23 +2,32 @@ # SPDX-License-Identifier: Apache-2.0 import os +from urllib.parse import urljoin + import yaml + from manifests.bundle_manifest import BundleManifest -from urllib.parse import urljoin class BundleRecorder: def __init__(self, build, output_dir, artifacts_dir): self.output_dir = output_dir self.build_id = build.id - self.public_url = os.getenv('PUBLIC_ARTIFACT_URL', None) + self.public_url = os.getenv("PUBLIC_ARTIFACT_URL", None) self.version = build.version self.tar_name = self.get_tar_name(build) self.artifacts_dir = artifacts_dir - self.bundle_manifest = self.BundleManifestBuilder(build.id, build.name, build.version, build.architecture, self.get_tar_location()) + self.bundle_manifest = self.BundleManifestBuilder( + build.id, + build.name, + build.version, + build.architecture, + self.get_tar_location(), + ) def get_tar_name(self, build): - return "-".join([build.name.lower(), build.version, 'linux', build.architecture]) + '.tar.gz' + parts = [build.name.lower(), build.version, "linux", build.architecture] + return "-".join(parts) + ".tar.gz" def get_public_url_path(self, folder, rel_path): path = "{}/{}/{}/{}".format(folder, self.version, self.build_id, rel_path) @@ -32,7 +41,9 @@ def get_location(self, folder_name, rel_path, abs_path): # Assembled bundles are expected to be served from a separate "bundles" folder # Example: https://artifacts.opensearch.org/bundles/1.0.0/ @@ -41,42 +52,46 @@ def get_component_location(self, component_rel_path): return self.get_location("builds", component_rel_path, abs_path) def record_component(self, component, rel_path): - abs_path = os.path.join(self.artifacts_dir, rel_path) - self.bundle_manifest.append_component(component.name, component.repository, component.ref, - component.commit_id, self.get_component_location(rel_path)) + self.bundle_manifest.append_component( + component.name, + component.repository, + component.ref, + component.commit_id, + self.get_component_location(rel_path), + ) def get_manifest(self): return self.bundle_manifest.to_manifest() def write_manifest(self, folder): output_manifest = self.get_manifest() - manifest_path = os.path.join(folder, 'manifest.yml') - with open(manifest_path, 'w') as file: + manifest_path = os.path.join(folder, "manifest.yml") + with open(manifest_path, "w") as file: yaml.dump(output_manifest.to_dict(), file) class BundleManifestBuilder: def __init__(self, build_id, name, version, arch, location): self.data = {} - self.data['build'] = {} - self.data['build']['id'] = build_id - self.data['build']['name'] = name - self.data['build']['version'] = str(version) - self.data['build']['architecture'] = arch - self.data['build']['location'] = location - self.data['schema-version'] = '1.0' + self.data["build"] = {} + self.data["build"]["id"] = build_id + self.data["build"]["name"] = name + self.data["build"]["version"] = str(version) + self.data["build"]["architecture"] = arch + self.data["build"]["location"] = location + self.data["schema-version"] = "1.0" # We need to store components as a hash so that we can append artifacts by component name # When we convert to a BundleManifest this will get converted back into a list - self.data['components'] = [] + self.data["components"] = [] def append_component(self, name, repository_url, ref, commit_id, location): component = { - 'name': name, - 'repository': repository_url, - 'ref': ref, - 'commit_id': commit_id, - 'location': location, + "name": name, + "repository": repository_url, + "ref": ref, + "commit_id": commit_id, + "location": location, } - self.data['components'].append(component) + self.data["components"].append(component) def to_manifest(self): return BundleManifest(self.data) diff --git a/bundle-workflow/python/build.py b/bundle-workflow/src/build.py similarity index 54% rename from bundle-workflow/python/build.py rename to bundle-workflow/src/build.py index 6b52000d24..698fce9c17 100755 --- a/bundle-workflow/python/build.py +++ b/bundle-workflow/src/build.py @@ -5,49 +5,61 @@ import os import uuid -from system.arch import current_arch -from system.temporary_directory import TemporaryDirectory -from manifests.input_manifest import InputManifest + +from build_workflow.build_args import BuildArgs from build_workflow.build_recorder import BuildRecorder from build_workflow.builder import Builder -from build_workflow.build_args import BuildArgs -from paths.script_finder import ScriptFinder from git.git_repository import GitRepository +from manifests.input_manifest import InputManifest +from paths.script_finder import ScriptFinder +from system.arch import current_arch +from system.temporary_directory import TemporaryDirectory args = BuildArgs() script_finder = ScriptFinder() arch = current_arch() manifest = InputManifest.from_file(args.manifest) -output_dir = os.path.join(os.getcwd(), 'artifacts') -os.makedirs(output_dir, exist_ok = True) -build_id = os.getenv('OPENSEARCH_BUILD_ID', uuid.uuid4().hex) +output_dir = os.path.join(os.getcwd(), "artifacts") +os.makedirs(output_dir, exist_ok=True) +build_id = os.getenv("OPENSEARCH_BUILD_ID", uuid.uuid4().hex) -with TemporaryDirectory(keep = args.keep) as work_dir: - print(f'Building in {work_dir}') +with TemporaryDirectory(keep=args.keep) as work_dir: + print(f"Building in {work_dir}") os.chdir(work_dir) - build_recorder = BuildRecorder(build_id, output_dir, manifest.build.name, manifest.build.version, arch, args.snapshot) + build_recorder = BuildRecorder( + build_id, + output_dir, + manifest.build.name, + manifest.build.version, + arch, + args.snapshot, + ) - print(f'Building {manifest.build.name} ({arch}) into {output_dir}') + print(f"Building {manifest.build.name} ({arch}) into {output_dir}") for component in manifest.components: if args.component and args.component != component.name: - print(f'\nSkipping {component.name}') + print(f"\nSkipping {component.name}") continue - print(f'\nBuilding {component.name}') - repo = GitRepository(component.repository, component.ref, os.path.join(work_dir, component.name)) + print(f"\nBuilding {component.name}") + repo = GitRepository( + component.repository, component.ref, os.path.join(work_dir, component.name) + ) try: builder = Builder(component.name, repo, script_finder, build_recorder) builder.build(manifest.build.version, arch, args.snapshot) builder.export_artifacts() except: - print(f'\nError building {component.name}, retry with\n\n\t{args.component_command(component.name)}\n') + print( + f"\nError building {component.name}, retry with\n\n\t{args.component_command(component.name)}\n" + ) raise build_recorder.write_manifest(output_dir) -print('Done.') +print("Done.") diff --git a/bundle-workflow/src/build_workflow/build_args.py b/bundle-workflow/src/build_workflow/build_args.py new file mode 100644 index 0000000000..bb38b9aea3 --- /dev/null +++ b/bundle-workflow/src/build_workflow/build_args.py @@ -0,0 +1,55 @@ +# Copyright OpenSearch Contributors. +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import sys + + +class BuildArgs: + manifest: str + snapshot: bool + component: str + keep: bool + + def __init__(self): + parser = argparse.ArgumentParser(description="Build an OpenSearch Bundle") + parser.add_argument( + "manifest", type=argparse.FileType("r"), help="Manifest file." + ) + parser.add_argument( + "-s", + "--snapshot", + action="store_true", + default=False, + help="Build snapshot.", + ) + parser.add_argument( + "-c", "--component", type=str, help="Rebuild a single component." + ) + parser.add_argument( + "--keep", + dest="keep", + action="store_true", + help="Do not delete the working temporary directory.", + ) + args = parser.parse_args() + self.manifest = args.manifest + self.snapshot = args.snapshot + self.component = args.component + self.keep = args.keep + + def script_path(self): + return sys.argv[0].replace("/src/build.py", "/build.sh") + + def component_command(self, name): + return " ".join( + filter( + None, + [ + self.script_path(), + self.manifest.name, + f"--component {name}", + "--snapshot" if self.snapshot else None, + ], + ) + ) diff --git a/bundle-workflow/python/build_workflow/build_recorder.py b/bundle-workflow/src/build_workflow/build_recorder.py similarity index 50% rename from bundle-workflow/python/build_workflow/build_recorder.py rename to bundle-workflow/src/build_workflow/build_recorder.py index 79bffea845..0b0b5f33da 100644 --- a/bundle-workflow/python/build_workflow/build_recorder.py +++ b/bundle-workflow/src/build_workflow/build_recorder.py @@ -3,63 +3,78 @@ import os import shutil + import yaml + from manifests.build_manifest import BuildManifest + class BuildRecorder: def __init__(self, build_id, output_dir, name, version, arch, snapshot): self.output_dir = output_dir - self.build_manifest = self.BuildManifestBuilder(build_id, name, version, arch, snapshot) + self.build_manifest = self.BuildManifestBuilder( + build_id, name, version, arch, snapshot + ) def record_component(self, component_name, git_repo): - self.build_manifest.append_component(component_name, git_repo.url, git_repo.ref, git_repo.sha) + self.build_manifest.append_component( + component_name, git_repo.url, git_repo.ref, git_repo.sha + ) - def record_artifact(self, component_name, artifact_type, artifact_path, artifact_file): - print(f'Recording {artifact_type} artifact for {component_name}: {artifact_path} (from {artifact_file})') + def record_artifact( + self, component_name, artifact_type, artifact_path, artifact_file + ): + print( + f"Recording {artifact_type} artifact for {component_name}: {artifact_path} (from {artifact_file})" + ) # Ensure the target directory exists dest_file = os.path.join(self.output_dir, artifact_path) dest_dir = os.path.dirname(dest_file) - os.makedirs(dest_dir, exist_ok = True) + os.makedirs(dest_dir, exist_ok=True) # Copy the file shutil.copyfile(artifact_file, dest_file) # Notify the recorder - self.build_manifest.append_artifact(component_name, artifact_type, artifact_path) + self.build_manifest.append_artifact( + component_name, artifact_type, artifact_path + ) def get_manifest(self): return self.build_manifest.to_manifest() def write_manifest(self, folder): output_manifest = self.get_manifest() - manifest_path = os.path.join(folder, 'manifest.yml') - with open(manifest_path, 'w') as file: + manifest_path = os.path.join(folder, "manifest.yml") + with open(manifest_path, "w") as file: yaml.dump(output_manifest.to_dict(), file) class BuildManifestBuilder: def __init__(self, build_id, name, version, arch, snapshot): self.data = {} - self.data['build'] = {} - self.data['build']['id'] = build_id - self.data['build']['name'] = name - self.data['build']['version'] = (str(version) + '-SNAPSHOT' if snapshot else str(version)) - self.data['build']['architecture'] = arch - self.data['build']['snapshot'] = str(snapshot).lower() - self.data['schema-version'] = '1.0' + self.data["build"] = {} + self.data["build"]["id"] = build_id + self.data["build"]["name"] = name + self.data["build"]["version"] = ( + str(version) + "-SNAPSHOT" if snapshot else str(version) + ) + self.data["build"]["architecture"] = arch + self.data["build"]["snapshot"] = str(snapshot).lower() + self.data["schema-version"] = "1.0" # We need to store components as a hash so that we can append artifacts by component name # When we convert to a BuildManifest this will get converted back into a list - self.data['components_hash'] = {} + self.data["components_hash"] = {} def append_component(self, name, repository_url, ref, commit_id): component = { - 'name': name, - 'repository': repository_url, - 'ref': ref, - 'commit_id': commit_id, - 'artifacts': {} + "name": name, + "repository": repository_url, + "ref": ref, + "commit_id": commit_id, + "artifacts": {}, } - self.data['components_hash'][name] = component + self.data["components_hash"][name] = component def append_artifact(self, component, type, path): - artifacts = self.data['components_hash'][component]['artifacts'] + artifacts = self.data["components_hash"][component]["artifacts"] list = artifacts.get(type, []) if len(list) == 0: artifacts[type] = list @@ -67,6 +82,6 @@ def append_artifact(self, component, type, path): def to_manifest(self): # The build manifest expects `components` to be a list, not a hash, so we need to munge things a bit - components = self.data['components_hash'].values() - self.data['components'] = components + components = self.data["components_hash"].values() + self.data["components"] = components return BuildManifest(self.data) diff --git a/bundle-workflow/python/build_workflow/builder.py b/bundle-workflow/src/build_workflow/builder.py similarity index 73% rename from bundle-workflow/python/build_workflow/builder.py rename to bundle-workflow/src/build_workflow/builder.py index c43821e09c..cb9f818ff3 100644 --- a/bundle-workflow/python/build_workflow/builder.py +++ b/bundle-workflow/src/build_workflow/builder.py @@ -3,38 +3,46 @@ import os -''' +""" This class is responsible for executing the build for a component and passing the results to a build recorder. It will notify the build recorder of build information such as repository and git ref, and any artifacts generated by the build. Artifacts found in "/artifacts/" will be recognized and recorded. -''' +""" + + class Builder: def __init__(self, component_name, git_repo, script_finder, build_recorder): - ''' + """ Construct a new Builder instance. :param component_name: The name of the component to build. :param git_repo: A GitRepository instance containing the checked-out code. :param script_finder: The ScriptFinder to use for finding build.sh scripts. :param build_recorder: The build recorder that will capture build information and artifacts. - ''' + """ self.component_name = component_name self.git_repo = git_repo self.script_finder = script_finder self.build_recorder = build_recorder - self.output_path = 'artifacts' + self.output_path = "artifacts" def build(self, version, arch, snapshot): - build_script = self.script_finder.find_build_script(self.component_name, self.git_repo.dir) - build_command = f'{build_script} -v {version} -a {arch} -s {str(snapshot).lower()} -o {self.output_path}' + build_script = self.script_finder.find_build_script( + self.component_name, self.git_repo.dir + ) + build_command = f"{build_script} -v {version} -a {arch} -s {str(snapshot).lower()} -o {self.output_path}" self.git_repo.execute(build_command) self.build_recorder.record_component(self.component_name, self.git_repo) def export_artifacts(self): - artifacts_dir = os.path.realpath(os.path.join(self.git_repo.dir, self.output_path)) + artifacts_dir = os.path.realpath( + os.path.join(self.git_repo.dir, self.output_path) + ) for artifact_type in ["maven", "bundle", "plugins", "libs", "core-plugins"]: for dir, dirs, files in os.walk(os.path.join(artifacts_dir, artifact_type)): for file_name in files: absolute_path = os.path.join(dir, file_name) relative_path = os.path.relpath(absolute_path, artifacts_dir) - self.build_recorder.record_artifact(self.component_name, artifact_type, relative_path, absolute_path) + self.build_recorder.record_artifact( + self.component_name, artifact_type, relative_path, absolute_path + ) diff --git a/bundle-workflow/python/git/git_repository.py b/bundle-workflow/src/git/git_repository.py similarity index 52% rename from bundle-workflow/python/git/git_repository.py rename to bundle-workflow/src/git/git_repository.py index 018681f438..be63073cd5 100644 --- a/bundle-workflow/python/git/git_repository.py +++ b/bundle-workflow/src/git/git_repository.py @@ -5,13 +5,15 @@ import subprocess import tempfile + class GitRepository: - ''' + """ This class checks out a Git repository at a particular ref into an empty named directory (or temporary a directory if no named directory is given). Temporary directories will be automatically deleted when the GitRepository object goes out of scope; named directories will be left alone. Clients can obtain the actual commit ID by querying the "sha" attribute, and the temp directory name with "dir". - ''' - def __init__(self, url, ref, directory = None): + """ + + def __init__(self, url, ref, directory=None): self.url = url self.ref = ref if directory is None: @@ -19,23 +21,32 @@ def __init__(self, url, ref, directory = None): self.dir = self.temp_dir.name else: self.dir = directory - os.makedirs(self.dir, exist_ok = False) + os.makedirs(self.dir, exist_ok=False) # Check out the repository - self.execute(f'git init', True) - self.execute(f'git remote add origin {self.url}', True) - self.execute(f'git fetch --depth 1 origin {self.ref}', True) - self.execute(f'git checkout FETCH_HEAD', True) - self.sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd = self.dir).decode().strip() - print(f'Checked out {self.url}@{self.ref} into {self.dir} at {self.sha}') + self.execute("git init", True) + self.execute(f"git remote add origin {self.url}", True) + self.execute(f"git fetch --depth 1 origin {self.ref}", True) + self.execute("git checkout FETCH_HEAD", True) + self.sha = ( + subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=self.dir) + .decode() + .strip() + ) + print(f"Checked out {self.url}@{self.ref} into {self.dir} at {self.sha}") - def execute(self, command, silent = False, subdirname = None): + def execute(self, command, silent=False, subdirname=None): dirname = self.dir if subdirname: dirname = os.path.join(self.dir, subdirname) print(f'Executing "{command}" in {dirname}') if silent: - subprocess.check_call(command, cwd = dirname, shell = True, stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL) + subprocess.check_call( + command, + cwd=dirname, + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) else: - subprocess.check_call(command, cwd = dirname, shell = True) - + subprocess.check_call(command, cwd=dirname, shell=True) diff --git a/bundle-workflow/python/manifests/__init__.py b/bundle-workflow/src/manifests/__init__.py similarity index 100% rename from bundle-workflow/python/manifests/__init__.py rename to bundle-workflow/src/manifests/__init__.py diff --git a/bundle-workflow/python/manifests/build_manifest.py b/bundle-workflow/src/manifests/build_manifest.py similarity index 50% rename from bundle-workflow/python/manifests/build_manifest.py rename to bundle-workflow/src/manifests/build_manifest.py index f4c037c794..68806dc2d8 100644 --- a/bundle-workflow/python/manifests/build_manifest.py +++ b/bundle-workflow/src/manifests/build_manifest.py @@ -3,7 +3,7 @@ import yaml -''' +""" A BuildManifest is an immutable view of the outputs from a build step The manifest contains information about the product that was built (in the `build` section), and the components that made up the build in the `components` section. @@ -30,56 +30,60 @@ - libs/relative/path/to/artifact - ... - ... -''' +""" + + class BuildManifest: @staticmethod def from_file(file): return BuildManifest(yaml.safe_load(file)) def __init__(self, data): - self.version = str(data['schema-version']) - if self.version != '1.0': - raise ValueError(f'Unsupported schema version: {self.version}') - self.build = self.Build(data['build']) - self.components = list(map(lambda entry: self.Component(entry), - data['components'])) + self.version = str(data["schema-version"]) + if self.version != "1.0": + raise ValueError(f"Unsupported schema version: {self.version}") + self.build = self.Build(data["build"]) + self.components = list( + map(lambda entry: self.Component(entry), data["components"]) + ) def to_dict(self): return { - 'schema-version': '1.0', - 'build': self.build.to_dict(), - 'components': list(map(lambda component: component.to_dict(), - self.components)) + "schema-version": "1.0", + "build": self.build.to_dict(), + "components": list( + map(lambda component: component.to_dict(), self.components) + ), } class Build: def __init__(self, data): - self.name = data['name'] - self.version = data['version'] - self.architecture = data['architecture'] - self.id = data['id'] + self.name = data["name"] + self.version = data["version"] + self.architecture = data["architecture"] + self.id = data["id"] def to_dict(self): return { - 'name': self.name, - 'version': self.version, - 'architecture': self.architecture, - 'id': self.id + "name": self.name, + "version": self.version, + "architecture": self.architecture, + "id": self.id, } class Component: def __init__(self, data): - self.name = data['name'] - self.repository = data['repository'] - self.ref = data['ref'] - self.commit_id = data['commit_id'] - self.artifacts = data['artifacts'] + self.name = data["name"] + self.repository = data["repository"] + self.ref = data["ref"] + self.commit_id = data["commit_id"] + self.artifacts = data["artifacts"] def to_dict(self): return { - 'name': self.name, - 'repository': self.repository, - 'ref': self.ref, - 'commit_id': self.commit_id, - 'artifacts': self.artifacts + "name": self.name, + "repository": self.repository, + "ref": self.ref, + "commit_id": self.commit_id, + "artifacts": self.artifacts, } diff --git a/bundle-workflow/src/manifests/bundle_manifest.py b/bundle-workflow/src/manifests/bundle_manifest.py new file mode 100644 index 0000000000..727d2f5604 --- /dev/null +++ b/bundle-workflow/src/manifests/bundle_manifest.py @@ -0,0 +1,82 @@ +# Copyright OpenSearch Contributors. +# SPDX-License-Identifier: Apache-2.0 + +import yaml + + +class BundleManifest: + """ + A BundleManifest is an immutable view of the outputs from a assemble step + The manifest contains information about the bundle that was built (in the `assemble` section), + and the components that made up the bundle in the `components` section. + + The format for schema version 1.0 is: + schema-version: 1.0 + build: + name: string + version: string + architecture: x64 or arm64 + location: /relative/path/to/tarball + components: + - name: string + repository: URL of git repository + ref: git ref that was built (sha, branch, or tag) + commit_id: The actual git commit ID that was built (i.e. the resolved "ref") + location: /relative/path/to/artifact + """ + + @staticmethod + def from_file(file): + return BundleManifest(yaml.safe_load(file)) + + def __init__(self, data): + self.version = str(data["schema-version"]) + if self.version != "1.0": + raise ValueError(f"Unsupported schema version: {self.version}") + self.build = self.Build(data["build"]) + self.components = list( + map(lambda entry: self.Component(entry), data["components"]) + ) + + def to_dict(self): + return { + "schema-version": "1.0", + "build": self.build.to_dict(), + "components": list( + map(lambda component: component.to_dict(), self.components) + ), + } + + class Build: + def __init__(self, data): + self.name = data["name"] + self.version = data["version"] + self.architecture = data["architecture"] + self.location = data["location"] + self.id = data["id"] + + def to_dict(self): + return { + "name": self.name, + "version": self.version, + "architecture": self.architecture, + "location": self.location, + "id": self.id, + } + + class Component: + def __init__(self, data): + self.name = data["name"] + self.repository = data["repository"] + self.ref = data["ref"] + self.commit_id = data["commit_id"] + self.location = data["location"] + + def to_dict(self): + return { + "name": self.name, + "repository": self.repository, + "ref": self.ref, + "commit_id": self.commit_id, + "location": self.location, + } diff --git a/bundle-workflow/python/manifests/input_manifest.py b/bundle-workflow/src/manifests/input_manifest.py similarity index 56% rename from bundle-workflow/python/manifests/input_manifest.py rename to bundle-workflow/src/manifests/input_manifest.py index 2a3990ad81..737d6dade8 100644 --- a/bundle-workflow/python/manifests/input_manifest.py +++ b/bundle-workflow/src/manifests/input_manifest.py @@ -3,7 +3,7 @@ import yaml -''' +""" An InputManifest is an immutable view of the input manifest for the build system. The manifest contains information about the product that is being built (in the `build` section), and the components that make up the product in the `components` section. @@ -18,27 +18,30 @@ repository: URL of git repository ref: git ref to build (sha, branch, or tag) - ... -''' +""" + + class InputManifest: @staticmethod def from_file(file): - return InputManifest(yaml.safe_load(file)) + return InputManifest(yaml.safe_load(file)) def __init__(self, data): - self.version = str(data['schema-version']) - if self.version != '1.0': - raise ValueError(f'Unsupported schema version: {self.version}') - self.build = self.Build(data['build']) - self.components = list(map(lambda entry: self.Component(entry), - data['components'])) + self.version = str(data["schema-version"]) + if self.version != "1.0": + raise ValueError(f"Unsupported schema version: {self.version}") + self.build = self.Build(data["build"]) + self.components = list( + map(lambda entry: self.Component(entry), data["components"]) + ) class Build: def __init__(self, data): - self.name = data['name'] - self.version = data['version'] + self.name = data["name"] + self.version = data["version"] class Component: def __init__(self, data): - self.name = data['name'] - self.repository = data['repository'] - self.ref = data['ref'] + self.name = data["name"] + self.repository = data["repository"] + self.ref = data["ref"] diff --git a/bundle-workflow/python/paths/script_finder.py b/bundle-workflow/src/paths/script_finder.py similarity index 53% rename from bundle-workflow/python/paths/script_finder.py rename to bundle-workflow/src/paths/script_finder.py index 501b06044d..8d3b664a03 100644 --- a/bundle-workflow/python/paths/script_finder.py +++ b/bundle-workflow/src/paths/script_finder.py @@ -3,11 +3,12 @@ import os + class ScriptFinder: - ''' + """ ScriptFinder is a helper that abstracts away the details of where to look for build, test and install scripts. - For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository, + For build.sh and integtest.sh scripts, given a component name and a checked-out Git repository, it will look in the following locations, in order: * Root of the Git repository * /scripts/ in the Git repository @@ -17,45 +18,57 @@ class ScriptFinder: For install.sh scripts, given a component name, it will look in the following locations, in order: * // * / - ''' + """ def __init__(self): - self.component_scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scripts/components') - self.default_scripts_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../scripts/default') + self.component_scripts_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "../../scripts/components" + ) + self.default_scripts_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "../../scripts/default" + ) def find_build_script(self, component_name, git_dir): paths = [ - os.path.realpath(os.path.join(git_dir, 'build.sh')), - os.path.realpath(os.path.join(git_dir, 'scripts/build.sh')), - os.path.realpath(os.path.join(self.component_scripts_path, component_name, 'build.sh')), - os.path.realpath(os.path.join(self.default_scripts_path, 'build.sh')) + os.path.realpath(os.path.join(git_dir, "build.sh")), + os.path.realpath(os.path.join(git_dir, "scripts/build.sh")), + os.path.realpath( + os.path.join(self.component_scripts_path, component_name, "build.sh") + ), + os.path.realpath(os.path.join(self.default_scripts_path, "build.sh")), ] build_script = next(filter(lambda path: os.path.exists(path), paths), None) if build_script is None: - raise RuntimeError(f'Could not find build.sh script. Looked in {paths}') + raise RuntimeError(f"Could not find build.sh script. Looked in {paths}") return build_script def find_integ_test_script(self, component_name, git_dir): paths = [ - os.path.realpath(os.path.join(git_dir, 'integtest.sh')), - os.path.realpath(os.path.join(git_dir, 'scripts/integtest.sh')), - os.path.realpath(os.path.join(self.component_scripts_path, component_name, 'integtest.sh')), - os.path.realpath(os.path.join(self.default_scripts_path, 'integtest.sh')) + os.path.realpath(os.path.join(git_dir, "integtest.sh")), + os.path.realpath(os.path.join(git_dir, "scripts/integtest.sh")), + os.path.realpath( + os.path.join( + self.component_scripts_path, component_name, "integtest.sh" + ) + ), + os.path.realpath(os.path.join(self.default_scripts_path, "integtest.sh")), ] test_script = next(filter(lambda path: os.path.exists(path), paths), None) if test_script is None: - raise RuntimeError(f'Could not find integtest.sh script. Looked in {paths}') + raise RuntimeError(f"Could not find integtest.sh script. Looked in {paths}") return test_script def find_install_script(self, component_name): paths = [ - os.path.realpath(os.path.join(self.component_scripts_path, component_name, 'install.sh')), - os.path.realpath(os.path.join(self.default_scripts_path, 'install.sh')) + os.path.realpath( + os.path.join(self.component_scripts_path, component_name, "install.sh") + ), + os.path.realpath(os.path.join(self.default_scripts_path, "install.sh")), ] install_script = next(filter(lambda path: os.path.exists(path), paths), None) if install_script is None: - raise RuntimeError(f'Could not find install.sh script. Looked in {paths}') - return install_script \ No newline at end of file + raise RuntimeError(f"Could not find install.sh script. Looked in {paths}") + return install_script diff --git a/bundle-workflow/python/sign.py b/bundle-workflow/src/sign.py similarity index 64% rename from bundle-workflow/python/sign.py rename to bundle-workflow/src/sign.py index a903dc6bd7..499bc807fd 100755 --- a/bundle-workflow/python/sign.py +++ b/bundle-workflow/src/sign.py @@ -1,14 +1,17 @@ #!/usr/bin/env python -import os import argparse +import os + from manifests.build_manifest import BuildManifest from signing_workflow.signer import Signer -parser = argparse.ArgumentParser(description = "Sign artifacts") -parser.add_argument('manifest', type = argparse.FileType('r'), help = "Path to local manifest file.") -parser.add_argument('--component', nargs = '?', help = "Component name") -parser.add_argument('--type', nargs = '?', help = "Artifact type") +parser = argparse.ArgumentParser(description="Sign artifacts") +parser.add_argument( + "manifest", type=argparse.FileType("r"), help="Path to local manifest file." +) +parser.add_argument("--component", nargs="?", help="Component name") +parser.add_argument("--type", nargs="?", help="Artifact type") args = parser.parse_args() manifest = BuildManifest.from_file(args.manifest) @@ -17,21 +20,21 @@ signer = Signer() for component in manifest.components: - + if args.component and args.component != component.name: - print(f'\nSkipping {component.name}') + print(f"\nSkipping {component.name}") continue - - print(f'\nSigning {component.name}') + + print(f"\nSigning {component.name}") for artifact_type in component.artifacts: - + if args.type and args.type != artifact_type: continue - + artifact_list = component.artifacts[artifact_type] for artifact in artifact_list: location = os.path.join(basepath, artifact) signer.sign(location) signer.verify(location + ".asc") -print('Done.') +print("Done.") diff --git a/bundle-workflow/python/signing_workflow/signer.py b/bundle-workflow/src/signing_workflow/signer.py similarity index 64% rename from bundle-workflow/python/signing_workflow/signer.py rename to bundle-workflow/src/signing_workflow/signer.py index 604d6e8489..f35058f3cf 100644 --- a/bundle-workflow/python/signing_workflow/signer.py +++ b/bundle-workflow/src/signing_workflow/signer.py @@ -2,21 +2,25 @@ # Copyright OpenSearch Contributors. # SPDX-License-Identifier: Apache-2.0 -import sys + import os +import sys -sys.path.insert(0,"../git") from git.git_repository import GitRepository -''' +""" This class is responsible for signing an artifact using the OpenSearch-signer-client and verifying its signature. The signed artifacts will be found in the same location as the original artifacts. -''' +""" + +sys.path.insert(0, "../git") + + class Signer: def __init__(self): self.git_repo = GitRepository(self.get_repo_url(), "HEAD") - self.git_repo.execute("./bootstrap", subdirname = "src") - self.git_repo.execute("rm config.cfg", subdirname = "src") + self.git_repo.execute("./bootstrap", subdirname="src") + self.git_repo.execute("rm config.cfg", subdirname="src") def get_repo_url(self): if "GITHUB_TOKEN" in os.environ: @@ -25,9 +29,17 @@ def get_repo_url(self): def sign(self, filename): signature_file = filename + ".asc" - signing_cmd = ['./opensearch-signer-client', '-i', filename, '-o', signature_file, '-p', 'pgp'] - self.git_repo.execute(" ".join(signing_cmd), subdirname = "src") + signing_cmd = [ + "./opensearch-signer-client", + "-i", + filename, + "-o", + signature_file, + "-p", + "pgp", + ] + self.git_repo.execute(" ".join(signing_cmd), subdirname="src") def verify(self, filename): - verify_cmd = ['gpg', '--verify-files', filename] + verify_cmd = ["gpg", "--verify-files", filename] self.git_repo.execute(" ".join(verify_cmd)) diff --git a/bundle-workflow/python/test_workflow/__init__.py b/bundle-workflow/src/system/__init__.py similarity index 100% rename from bundle-workflow/python/test_workflow/__init__.py rename to bundle-workflow/src/system/__init__.py diff --git a/bundle-workflow/src/system/arch.py b/bundle-workflow/src/system/arch.py new file mode 100644 index 0000000000..3eefaf3e99 --- /dev/null +++ b/bundle-workflow/src/system/arch.py @@ -0,0 +1,11 @@ +import subprocess + + +def current_arch(): + arch = subprocess.check_output(["uname", "-m"]).decode().strip() + if arch == "x86_64": + return "x64" + elif arch == "aarch64" or arch == "arm64": + return "arm64" + else: + raise ValueError(f"Unsupported architecture: {arch}") diff --git a/bundle-workflow/python/system/temporary_directory.py b/bundle-workflow/src/system/temporary_directory.py similarity index 63% rename from bundle-workflow/python/system/temporary_directory.py rename to bundle-workflow/src/system/temporary_directory.py index dff6fe35bc..4b2405f749 100644 --- a/bundle-workflow/python/system/temporary_directory.py +++ b/bundle-workflow/src/system/temporary_directory.py @@ -1,14 +1,15 @@ -from contextlib import contextmanager -import tempfile import shutil +import tempfile +from contextlib import contextmanager + @contextmanager -def TemporaryDirectory(keep = False): +def TemporaryDirectory(keep=False): name = tempfile.mkdtemp() try: yield name finally: if keep: - print(f'Keeping {name}') + print(f"Keeping {name}") else: - shutil.rmtree(name) \ No newline at end of file + shutil.rmtree(name) diff --git a/bundle-workflow/python/test.py b/bundle-workflow/src/test.py similarity index 68% rename from bundle-workflow/python/test.py rename to bundle-workflow/src/test.py index b5b98c8b37..3c33388006 100755 --- a/bundle-workflow/python/test.py +++ b/bundle-workflow/src/test.py @@ -1,23 +1,29 @@ #!/usr/bin/env python -import os import argparse -from manifests.bundle_manifest import BundleManifest +import os + from git.git_repository import GitRepository -from test_workflow.local_test_cluster import LocalTestCluster -from test_workflow.integ_test_suite import IntegTestSuite +from manifests.bundle_manifest import BundleManifest from paths.script_finder import ScriptFinder from system.temporary_directory import TemporaryDirectory +from test_workflow.integ_test_suite import IntegTestSuite +from test_workflow.local_test_cluster import LocalTestCluster -parser = argparse.ArgumentParser(description = "Test an OpenSearch Bundle") -parser.add_argument('manifest', type = argparse.FileType('r'), help = "Manifest file.") -parser.add_argument('--keep', dest = 'keep', action='store_true', help = "Do not delete the working temporary directory.") +parser = argparse.ArgumentParser(description="Test an OpenSearch Bundle") +parser.add_argument("manifest", type=argparse.FileType("r"), help="Manifest file.") +parser.add_argument( + "--keep", + dest="keep", + action="store_true", + help="Do not delete the working temporary directory.", +) args = parser.parse_args() manifest = BundleManifest.from_file(args.manifest) script_finder = ScriptFinder() -with TemporaryDirectory(keep = args.keep) as work_dir: +with TemporaryDirectory(keep=args.keep) as work_dir: os.chdir(work_dir) # Spin up a test cluster with security @@ -29,7 +35,11 @@ # For each component, check out the git repo and run `integtest.sh` for component in manifest.components: print(component.name) - repo = GitRepository(component.repository, component.commit_id, os.path.join(work_dir, component.name)) + repo = GitRepository( + component.repository, + component.commit_id, + os.path.join(work_dir, component.name), + ) test_suite = IntegTestSuite(component.name, repo, script_finder) test_suite.execute(cluster, True) finally: diff --git a/bundle-workflow/src/test_workflow/__init__.py b/bundle-workflow/src/test_workflow/__init__.py new file mode 100644 index 0000000000..a45992386a --- /dev/null +++ b/bundle-workflow/src/test_workflow/__init__.py @@ -0,0 +1 @@ +# This page intentionally left blank. diff --git a/bundle-workflow/python/test_workflow/integ_test_suite.py b/bundle-workflow/src/test_workflow/integ_test_suite.py similarity index 54% rename from bundle-workflow/python/test_workflow/integ_test_suite.py rename to bundle-workflow/src/test_workflow/integ_test_suite.py index f432991230..9eae8b7ff0 100644 --- a/bundle-workflow/python/test_workflow/integ_test_suite.py +++ b/bundle-workflow/src/test_workflow/integ_test_suite.py @@ -1,5 +1,6 @@ import os + class IntegTestSuite: def __init__(self, name, repo, script_finder): self.name = name @@ -8,7 +9,9 @@ def __init__(self, name, repo, script_finder): def execute(self, cluster, security): script = self.script_finder.find_integ_test_script(self.name, self.repo.dir) - if (os.path.exists(script)): - self.repo.execute(f'sh {script} -b {cluster.endpoint()} -p {cluster.port()} -s {str(security).lower()}') + if os.path.exists(script): + self.repo.execute( + f"sh {script} -b {cluster.endpoint()} -p {cluster.port()} -s {str(security).lower()}" + ) else: - print(f'{script} does not exist. Skipping integ tests for {self.name}') + print(f"{script} does not exist. Skipping integ tests for {self.name}") diff --git a/bundle-workflow/src/test_workflow/local_test_cluster.py b/bundle-workflow/src/test_workflow/local_test_cluster.py new file mode 100644 index 0000000000..9261664bac --- /dev/null +++ b/bundle-workflow/src/test_workflow/local_test_cluster.py @@ -0,0 +1,105 @@ +import os +import subprocess +import time +import urllib.request + +import requests + +from test_workflow.test_cluster import ClusterCreationException, TestCluster + + +class LocalTestCluster(TestCluster): + """ + Represents an on-box test cluster. This class downloads a bundle (from a BundleManifest) and runs it as a background process. + """ + + def __init__(self, work_dir, bundle_manifest, security_enabled): + self.manifest = bundle_manifest + self.work_dir = os.path.join(work_dir, "local-test-cluster") + os.makedirs(self.work_dir, exist_ok=True) + self.security_enabled = security_enabled + self.process = None + + def create(self): + self.download() + self.stdout = open("stdout.txt", "w") + self.stderr = open("stderr.txt", "w") + dir = f"opensearch-{self.manifest.build.version}" + if not self.security_enabled: + self.disable_security(dir) + self.process = subprocess.Popen( + "./opensearch-tar-install.sh", + cwd=dir, + shell=True, + stdout=self.stdout, + stderr=self.stderr, + ) + print(f"Started OpenSearch with PID {self.process.pid}") + self.wait_for_service() + + def endpoint(self): + return "localhost" + + def port(self): + return 9200 + + def destroy(self): + if self.process is None: + print("Local test cluster is not started") + return + print(f"Sending SIGTERM to PID {self.process.pid}") + self.process.terminate() + try: + print("Waiting for process to terminate") + self.process.wait(10) + except subprocess.TimeoutExpired: + print("Process did not terminate after 10 seconds. Sending SIGKILL") + self.process.kill() + try: + print("Waiting for process to terminate") + self.process.wait(10) + except subprocess.TimeoutExpired: + print("Process failed to terminate even after SIGKILL") + raise + finally: + print(f"Process terminated with exit code {self.process.returncode}") + self.stdout.close() + self.stderr.close() + self.process = None + + def url(self, path=""): + return f'{"https" if self.security_enabled else "http"}://{self.endpoint()}:{self.port()}{path}' + + def download(self): + print(f"Creating local test cluster in {self.work_dir}") + os.chdir(self.work_dir) + print(f"Downloading bundle from {self.manifest.build.location}") + urllib.request.urlretrieve(self.manifest.build.location, "bundle.tgz") + print(f'Downloaded bundle to {os.path.realpath("bundle.tgz")}') + + print("Unpacking") + subprocess.check_call("tar -xzf bundle.tgz", shell=True) + print("Unpacked") + + def disable_security(self, dir): + subprocess.check_call( + f'echo "plugins.security.disabled: true" >> {os.path.join(dir, "config", "opensearch.yml")}', + shell=True, + ) + + def wait_for_service(self): + print("Waiting for service to become available") + url = self.url("/_cluster/health") + + for attempt in range(10): + try: + print(f"Pinging {url} attempt {attempt}") + response = requests.get(url, verify=False, auth=("admin", "admin")) + print(f"{response.status_code}: {response.text}") + if response.status_code == 200 and '"status":"green"' in response.text: + print("Cluster is green") + return + except requests.exceptions.ConnectionError: + print("Service not available yet") + time.sleep(10) + raise ClusterCreationException("Cluster is not green after 10 attempts") diff --git a/bundle-workflow/python/test_workflow/test_cluster.py b/bundle-workflow/src/test_workflow/test_cluster.py similarity index 88% rename from bundle-workflow/python/test_workflow/test_cluster.py rename to bundle-workflow/src/test_workflow/test_cluster.py index 5629033233..f3a934593b 100644 --- a/bundle-workflow/python/test_workflow/test_cluster.py +++ b/bundle-workflow/src/test_workflow/test_cluster.py @@ -1,41 +1,44 @@ import abc + class TestCluster(abc.ABC): - ''' + """ Abstract base class for all types of test clusters. - ''' + """ @abc.abstractmethod def create(self): - ''' + """ Set up the cluster. When this method returns, the cluster must be available to take requests. Throws ClusterCreationException if the cluster could not start for some reason. If this exception is thrown, the caller does not need to call "destroy". - ''' + """ pass @abc.abstractmethod def destroy(self): - ''' + """ Tear down the cluster. If the cluster is already destroyed or has not yet been created then this is a no-op. - ''' + """ pass @abc.abstractmethod def endpoint(self): - ''' + """ Get the endpoint that this cluster is listening on, e.g. 'localhost' or 'some.ip.address'. - ''' + """ pass @abc.abstractmethod def port(self): - ''' + """ Get the port that this cluster is listening on. - ''' + """ pass + class ClusterCreationException(Exception): - ''' + """ Indicates that cluster creation failed for some reason. - ''' + """ + pass diff --git a/bundle-workflow/test.sh b/bundle-workflow/test.sh index ab111c769f..47e46f5939 100755 --- a/bundle-workflow/test.sh +++ b/bundle-workflow/test.sh @@ -6,4 +6,4 @@ set -e DIR="$(dirname "$0")" -"$DIR/run.sh" "$DIR/python/test.py" $@ +"$DIR/run.sh" "$DIR/src/test.py" $@ diff --git a/bundle-workflow/tests/system/__init__.py b/bundle-workflow/tests/system/__init__.py new file mode 100644 index 0000000000..c8e65ee3b8 --- /dev/null +++ b/bundle-workflow/tests/system/__init__.py @@ -0,0 +1,4 @@ +import os +import sys + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../..")) diff --git a/bundle-workflow/tests/system/test_arch.py b/bundle-workflow/tests/system/test_arch.py new file mode 100644 index 0000000000..3ed026db95 --- /dev/null +++ b/bundle-workflow/tests/system/test_arch.py @@ -0,0 +1,19 @@ +import subprocess +import unittest +from unittest.mock import MagicMock + +from src.system.arch import current_arch + + +class ArchTests(unittest.TestCase): + def test_current_arch(self): + self.assertTrue(current_arch() in ["x64", "arm64"]) + + def test_invalid_arch(self): + subprocess.check_output = MagicMock(return_value="invalid".encode()) + with self.assertRaises(ValueError) as context: + current_arch() + subprocess.check_output.assert_called_with(["uname", "-m"]) + self.assertEqual( + "Unsupported architecture: invalid", context.exception.__str__() + )