diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8f5c8c5..3af99ba 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,8 +3,22 @@ # These owners will be the default owners for everything in the # repo. Unless a later match takes precedence, these owners will be # requested for review when someone opens a pull request. -* @dav3r @felddy @jasonodoom @jsf9k @mcdonnnj +* @dav3r @felddy @jsf9k @mcdonnnj # These folks own any files in the .github directory at the root of # the repository and any of its subdirectories. -/.github/ @dav3r @felddy @jasonodoom @jsf9k @mcdonnnj +/.github/ @dav3r @felddy @jsf9k @mcdonnnj + +# These folks own all linting configuration files. +/.ansible-lint @dav3r @felddy @jsf9k @mcdonnnj +/.bandit.yml @dav3r @felddy @jsf9k @mcdonnnj +/.flake8 @dav3r @felddy @jsf9k @mcdonnnj +/.isort.cfg @dav3r @felddy @jsf9k @mcdonnnj +/.mdl_config.yaml @dav3r @felddy @jsf9k @mcdonnnj +/.pre-commit-config.yaml @dav3r @felddy @jsf9k @mcdonnnj +/.prettierignore @dav3r @felddy @jsf9k @mcdonnnj +/.yamllint @dav3r @felddy @jsf9k @mcdonnnj +/requirements.txt @dav3r @felddy @jsf9k @mcdonnnj +/requirements-dev.txt @dav3r @felddy @jsf9k @mcdonnnj +/requirements-test.txt @dav3r @felddy @jsf9k @mcdonnnj +/setup-env @dav3r @felddy @jsf9k @mcdonnnj diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2b7369a..62c03de 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,18 +18,21 @@ updates: - dependency-name: actions/checkout - dependency-name: actions/setup-go - dependency-name: actions/setup-python + - dependency-name: cisagov/setup-env-github-action - dependency-name: crazy-max/ghaction-dump-context - dependency-name: crazy-max/ghaction-github-labeler - dependency-name: crazy-max/ghaction-github-status + - dependency-name: GitHubSecurityLab/actions-permissions + - dependency-name: hashicorp/setup-packer - dependency-name: hashicorp/setup-terraform - dependency-name: mxschmitt/action-tmate - dependency-name: step-security/harden-runner # Managed by cisagov/skeleton-docker - dependency-name: actions/download-artifact - - dependency-name: actions/github-script - dependency-name: actions/upload-artifact - dependency-name: docker/build-push-action - dependency-name: docker/login-action + - dependency-name: docker/metadata-action - dependency-name: docker/setup-buildx-action - dependency-name: docker/setup-qemu-action - dependency-name: github/codeql-action diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 72cb364..414c652 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,46 +2,65 @@ name: build on: + merge_group: + types: + - checks_requested + pull_request: push: branches: - "**" tags: - "v*.*.*" - pull_request: - schedule: - - cron: "0 10 * * *" # everyday at 10am repository_dispatch: # Respond to rebuild requests. See: https://github.com/cisagov/action-apb/ - types: [apb] + types: + - apb + schedule: + - cron: "0 10 * * *" # everyday at 10am workflow_dispatch: inputs: - remote-shell: - description: "Debug with remote shell" - required: true - default: "false" image-tag: + default: "dispatch" description: "Tag to apply to pushed images" required: true - default: "dispatch" + remote-shell: + default: "false" + description: "Debug with remote shell" + required: true + +# Set a default shell for any run steps. The `-Eueo pipefail` sets errtrace, +# nounset, errexit, and pipefail. The `-x` will print all commands as they are +# run. Please see the GitHub Actions documentation for more information: +# https://docs.github.com/en/actions/using-jobs/setting-default-values-for-jobs +defaults: + run: + shell: bash -Eueo pipefail -x {0} env: BUILDX_CACHE_DIR: ~/.cache/buildx - CURL_CACHE_DIR: ~/.cache/curl IMAGE_NAME: cisagov/vdp-scanner PIP_CACHE_DIR: ~/.cache/pip - # These are the only three platforms that Alpine Linux 3.17 offers the chromium - # package for at this time. - PLATFORMS: "linux/amd64,linux/arm/v7,linux/arm64" PRE_COMMIT_CACHE_DIR: ~/.cache/pre-commit RUN_TMATE: ${{ secrets.RUN_TMATE }} + TERRAFORM_DOCS_REPO_BRANCH_NAME: improvement/support_atx_closed_markdown_headers + TERRAFORM_DOCS_REPO_DEPTH: 1 + TERRAFORM_DOCS_REPO_URL: https://github.com/mcdonnnj/terraform-docs.git jobs: diagnostics: name: Run diagnostics + # This job does not need any permissions + permissions: {} runs-on: ubuntu-latest steps: # Note that a duplicate of this step must be added at the top of # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -49,7 +68,7 @@ jobs: egress-policy: audit - id: github-status name: Check GitHub status - uses: crazy-max/ghaction-github-status@v3 + uses: crazy-max/ghaction-github-status@v4 - id: dump-context name: Dump context uses: crazy-max/ghaction-dump-context@v2 @@ -59,8 +78,15 @@ jobs: name: Lint sources needs: - diagnostics + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -70,23 +96,23 @@ jobs: uses: cisagov/setup-env-github-action@develop - uses: actions/checkout@v4 - id: setup-python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: ${{ steps.setup-env.outputs.python-version }} # We need the Go version and Go cache location for the actions/cache step, # so the Go installation must happen before that. - id: setup-go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: # There is no expectation for actual Go code so we disable caching as # it relies on the existence of a go.sum file. cache: false - go-version: "1.20" - - name: Lookup Go cache directory - id: go-cache + go-version: ${{ steps.setup-env.outputs.go-version }} + - id: go-cache + name: Lookup Go cache directory run: | echo "dir=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache@v4 env: BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ py${{ steps.setup-python.outputs.python-version }}-\ @@ -94,6 +120,10 @@ jobs: packer${{ steps.setup-env.outputs.packer-version }}-\ tf${{ steps.setup-env.outputs.terraform-version }}-" with: + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements-test.txt') }}-\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('**/.pre-commit-config.yaml') }}" # Note that the .terraform directory IS NOT included in the # cache because if we were caching, then we would need to use # the `-upgrade=true` option. This option blindly pulls down the @@ -103,30 +133,13 @@ jobs: path: | ${{ env.PIP_CACHE_DIR }} ${{ env.PRE_COMMIT_CACHE_DIR }} - ${{ env.CURL_CACHE_DIR }} ${{ steps.go-cache.outputs.dir }} - key: "${{ env.BASE_CACHE_KEY }}\ - ${{ hashFiles('**/requirements-test.txt') }}-\ - ${{ hashFiles('**/requirements.txt') }}-\ - ${{ hashFiles('**/.pre-commit-config.yaml') }}" restore-keys: | ${{ env.BASE_CACHE_KEY }} - - name: Setup curl cache - run: mkdir -p ${{ env.CURL_CACHE_DIR }} - - name: Install Packer - env: - PACKER_VERSION: ${{ steps.setup-env.outputs.packer-version }} - run: | - PACKER_ZIP="packer_${PACKER_VERSION}_linux_amd64.zip" - curl --output ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ - --time-cond ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ - --location \ - "https://releases.hashicorp.com/packer/${PACKER_VERSION}/${PACKER_ZIP}" - sudo unzip -d /opt/packer \ - ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" - sudo mv /usr/local/bin/packer /usr/local/bin/packer-default - sudo ln -s /opt/packer/packer /usr/local/bin/packer - - uses: hashicorp/setup-terraform@v2 + - uses: hashicorp/setup-packer@v3 + with: + version: ${{ steps.setup-env.outputs.packer-version }} + - uses: hashicorp/setup-terraform@v3 with: terraform_version: ${{ steps.setup-env.outputs.terraform-version }} - name: Install go-critic @@ -134,26 +147,38 @@ jobs: PACKAGE_URL: github.com/go-critic/go-critic/cmd/gocritic PACKAGE_VERSION: ${{ steps.setup-env.outputs.go-critic-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + - name: Install goimports + env: + PACKAGE_URL: golang.org/x/tools/cmd/goimports + PACKAGE_VERSION: ${{ steps.setup-env.outputs.goimports-version }} + run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - name: Install gosec env: PACKAGE_URL: github.com/securego/gosec/v2/cmd/gosec PACKAGE_VERSION: ${{ steps.setup-env.outputs.gosec-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - - name: Install shfmt - env: - PACKAGE_URL: mvdan.cc/sh/v3/cmd/shfmt - PACKAGE_VERSION: ${{ steps.setup-env.outputs.shfmt-version }} - run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - name: Install staticcheck env: PACKAGE_URL: honnef.co/go/tools/cmd/staticcheck PACKAGE_VERSION: ${{ steps.setup-env.outputs.staticcheck-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - - name: Install Terraform-docs - env: - PACKAGE_URL: github.com/terraform-docs/terraform-docs - PACKAGE_VERSION: ${{ steps.setup-env.outputs.terraform-docs-version }} - run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + # TODO: https://github.com/cisagov/skeleton-generic/issues/165 + # We are temporarily using @mcdonnnj's forked branch of terraform-docs + # until his PR: https://github.com/terraform-docs/terraform-docs/pull/745 + # is approved. This temporary fix will allow for ATX header support when + # terraform-docs is run during linting. + - name: Clone ATX headers branch from terraform-docs fork + run: | + git clone \ + --branch $TERRAFORM_DOCS_REPO_BRANCH_NAME \ + --depth $TERRAFORM_DOCS_REPO_DEPTH \ + --single-branch \ + $TERRAFORM_DOCS_REPO_URL /tmp/terraform-docs + - name: Build and install terraform-docs binary + run: | + go build \ + -C /tmp/terraform-docs \ + -o $(go env GOPATH)/bin/terraform-docs - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel @@ -166,109 +191,46 @@ jobs: uses: mxschmitt/action-tmate@v3 if: env.RUN_TMATE prepare: - # Calculates and publishes outputs that are used by other jobs. - # - # Outputs: - # created: - # The current date-time in RFC3339 format. - # repometa: - # The json metadata describing this repository. - # source_version: - # The source version as reported by the `bump_version.sh show` command. - # tags: - # A comma separated list of Docker tags to be applied to the images on - # Docker Hub. The tags will vary depending on: - # - The event that triggered the build. - # - The branch the build is based upon. - # - The git tag the build is based upon. - # - # When a build is based on a git tag of the form `v*.*.*` the image will - # be tagged on Docker Hub with multiple levels of version specificity. - # For example, a git tag of `v1.2.3+a` will generate Docker tags of - # `:1.2.3_a`, `:1.2.3`, `:1.2`, `:1`, and `:latest`. - # - # Builds targeting the default branch will be tagged with `:edge`. - # - # Builds from other branches will be tagged with the branch name. Solidi - # (`/` characters - commonly known as slashes) in branch names are - # replaced with hyphen-minuses (`-` characters) in the Docker tag. For - # more information about the solidus see these links: - # * https://www.compart.com/en/unicode/U+002F - # * https://en.wikipedia.org/wiki/Slash_(punctuation)#Encoding - # - # Builds triggered by a push event are tagged with a short hash in the - # form: sha-12345678 - # - # Builds triggered by a pull request are tagged with the pull request - # number in the form pr-123. - # - # Builds triggered using the GitHub GUI (workflow_dispatch) are tagged - # with the value specified by the user. - # - # Scheduled builds are tagged with `:nightly`. + # Generate Docker image metadata using the docker/metadata-action GitHub Action. name: Prepare build variables needs: - diagnostics outputs: - created: ${{ steps.prep.outputs.created }} - repometa: ${{ steps.repo.outputs.result }} - source_version: ${{ steps.prep.outputs.source_version }} - tags: ${{ steps.prep.outputs.tags }} + labels: ${{ steps.generate-metadata.outputs.labels }} + tags: ${{ steps.generate-metadata.outputs.tags }} + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 with: egress-policy: audit - uses: actions/checkout@v4 - - name: Gather repository metadata - id: repo - uses: actions/github-script@v7 + - id: generate-metadata + name: Generate Docker image metadata + uses: docker/metadata-action@v5 with: - script: | - const repo = await github.rest.repos.get(context.repo) - return repo.data - - name: Calculate output values - id: prep - run: | - VERSION=noop - SEMVER="^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-((0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?$" - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - VERSION=${{ github.event.inputs.image-tag }} - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - elif [[ $GITHUB_REF == refs/heads/* ]]; then - VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -r 's#/+#-#g') - if [ "${{ github.event.repository.default_branch }}" = "$VERSION" ]; - then - VERSION=edge - fi - elif [[ $GITHUB_REF == refs/pull/* ]]; then - VERSION=pr-${{ github.event.number }} - fi - if [[ $VERSION =~ $SEMVER ]]; then - VERSION_NO_V=${VERSION#v} - MAJOR="${BASH_REMATCH[1]}" - MINOR="${BASH_REMATCH[2]}" - PATCH="${BASH_REMATCH[3]}" - TAGS="${IMAGE_NAME}:${VERSION_NO_V//+/_},${IMAGE_NAME}:${MAJOR}.${MINOR}.${PATCH},${IMAGE_NAME}:${MAJOR}.${MINOR},${IMAGE_NAME}:${MAJOR},${IMAGE_NAME}:latest" - else - TAGS="${IMAGE_NAME}:${VERSION}" - fi - if [ "${{ github.event_name }}" = "push" ]; then - TAGS="${TAGS},${IMAGE_NAME}:sha-${GITHUB_SHA::8}" - fi - for i in ${TAGS//,/ } - do - TAGS="${TAGS},ghcr.io/${i}" - done - echo "created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT - echo "source_version=$(./bump_version.sh show)" >> $GITHUB_OUTPUT - echo "tags=${TAGS}" >> $GITHUB_OUTPUT - echo tags=${TAGS} + images: | + ${{ env.IMAGE_NAME }} + ghcr.io/${{ env.IMAGE_NAME }} + tags: | + type=edge + type=raw,event=workflow_dispatch,value=${{ github.event.inputs.image-tag }} + type=ref,event=branch + type=ref,event=pr + type=ref,event=tag + type=schedule + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{version}} + type=sha - name: Setup tmate debug session uses: mxschmitt/action-tmate@v3 if: github.event.inputs.remote-shell == 'true' || env.RUN_TMATE @@ -279,8 +241,15 @@ jobs: needs: - diagnostics - prepare + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -293,26 +262,25 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: buildx-${{ runner.os }}- with: - path: ${{ env.BUILDX_CACHE_DIR }} key: ${{ env.BASE_CACHE_KEY }}${{ github.sha }} + path: ${{ env.BUILDX_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - name: Create dist directory run: mkdir -p dist - name: Build image id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: - build-args: | - VERSION=${{ needs.prepare.outputs.source_version }} cache-from: type=local,src=${{ env.BUILDX_CACHE_DIR }} cache-to: type=local,dest=${{ env.BUILDX_CACHE_DIR }} context: . file: ./Dockerfile + labels: ${{ needs.prepare.outputs.labels }} outputs: type=docker,dest=dist/image.tar # Uncomment the following option if you are building an image for use # on Google Cloud Run or AWS Lambda. The current default image output @@ -322,33 +290,10 @@ jobs: tags: ${{ env.IMAGE_NAME }}:latest # not to be pushed # For a list of pre-defined annotation keys and value types see: # https://github.com/opencontainers/image-spec/blob/master/annotations.md - labels: "\ - org.opencontainers.image.created=${{ - needs.prepare.outputs.created }} - - org.opencontainers.image.description=${{ - fromJson(needs.prepare.outputs.repometa).description }} - - org.opencontainers.image.licenses=${{ - fromJson(needs.prepare.outputs.repometa).license.spdx_id }} - - org.opencontainers.image.revision=${{ github.sha }} - - org.opencontainers.image.source=${{ - fromJson(needs.prepare.outputs.repometa).clone_url }} - - org.opencontainers.image.title=${{ - fromJson(needs.prepare.outputs.repometa).name }} - - org.opencontainers.image.url=${{ - fromJson(needs.prepare.outputs.repometa).html_url }} - - org.opencontainers.image.version=${{ - needs.prepare.outputs.source_version }}" - name: Compress image run: gzip dist/image.tar - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dist path: dist @@ -361,28 +306,37 @@ jobs: needs: - diagnostics - build + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 with: egress-policy: audit + - id: setup-env + uses: cisagov/setup-env-github-action@develop - uses: actions/checkout@v4 - id: setup-python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: ${{ steps.setup-env.outputs.python-version }} - name: Cache testing environments - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ py${{ steps.setup-python.outputs.python-version }}-" with: - path: ${{ env.PIP_CACHE_DIR }} key: "${{ env.BASE_CACHE_KEY }}\ ${{ hashFiles('**/requirements-test.txt') }}-\ ${{ hashFiles('**/requirements.txt') }}" + path: ${{ env.PIP_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - name: Install dependencies @@ -390,7 +344,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel pip install --upgrade --requirement requirements-test.txt - name: Download docker image artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: dist path: dist @@ -404,12 +358,12 @@ jobs: uses: mxschmitt/action-tmate@v3 if: env.RUN_TMATE build-push-all: - # Builds the final set of images for each of the platforms listed in - # PLATFORMS environment variable. These images are tagged with the Docker - # tags calculated in the "prepare" job and pushed to Docker Hub and the - # GitHub Container Registry. The contents of README.md are pushed as the - # image's description to Docker Hub. This job is skipped when the - # triggering event is a pull request. + # Builds the final set of images for each of the platforms specified in the + # "platforms" input for the docker/build-push-action Action. These images + # are tagged with the Docker tags calculated in the "prepare" job and + # pushed to Docker Hub and the GitHub Container Registry. The contents of + # README.md are pushed as the image's description to Docker Hub. This job + # is skipped when the triggering event is a pull request. if: github.event_name != 'pull_request' name: Build and push all platforms needs: @@ -417,12 +371,18 @@ jobs: - lint - prepare - test - # When Dependabot creates a PR it requires this permission in - # order to push Docker images to ghcr.io. permissions: + # actions/checkout needs this to fetch code + contents: read + # When Dependabot creates a PR it requires this permission in + # order to push Docker images to ghcr.io. packages: write runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -431,14 +391,14 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 with: - username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.DOCKER_USERNAME }} - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: + password: ${{ secrets.GITHUB_TOKEN }} registry: ghcr.io username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - name: Checkout uses: actions/checkout@v4 - name: Set up QEMU @@ -446,27 +406,31 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: buildx-${{ runner.os }}- with: - path: ${{ env.BUILDX_CACHE_DIR }} key: ${{ env.BASE_CACHE_KEY }}${{ github.sha }} + path: ${{ env.BUILDX_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - name: Create cross-platform support Dockerfile-x run: ./buildx-dockerfile.sh - name: Build and push platform images to registries id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: - build-args: | - VERSION=${{ needs.prepare.outputs.source_version }} cache-from: type=local,src=${{ env.BUILDX_CACHE_DIR }} cache-to: type=local,dest=${{ env.BUILDX_CACHE_DIR }} context: . file: ./Dockerfile-x - platforms: ${{ env.PLATFORMS }} + labels: ${{ needs.prepare.outputs.labels }} + # These are the only three platforms that Alpine Linux 3.19 offers the + # chromium package for at this time. + platforms: | + linux/amd64 + linux/arm/v7 + linux/arm64 # Uncomment the following option if you are building an image for use # on Google Cloud Run or AWS Lambda. The current default image output # is unable to run on either. Please see the following issue for more @@ -476,29 +440,6 @@ jobs: tags: ${{ needs.prepare.outputs.tags }} # For a list of pre-defined annotation keys and value types see: # https://github.com/opencontainers/image-spec/blob/master/annotations.md - labels: "\ - org.opencontainers.image.created=${{ - needs.prepare.outputs.created }} - - org.opencontainers.image.description=${{ - fromJson(needs.prepare.outputs.repometa).description }} - - org.opencontainers.image.licenses=${{ - fromJson(needs.prepare.outputs.repometa).license.spdx_id }} - - org.opencontainers.image.revision=${{ github.sha }} - - org.opencontainers.image.source=${{ - fromJson(needs.prepare.outputs.repometa).clone_url }} - - org.opencontainers.image.title=${{ - fromJson(needs.prepare.outputs.repometa).name }} - - org.opencontainers.image.url=${{ - fromJson(needs.prepare.outputs.repometa).html_url }} - - org.opencontainers.image.version=${{ - needs.prepare.outputs.source_version }}" - name: Publish README.md to Docker Hub env: DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index dc49271..642adda 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -7,25 +7,36 @@ name: CodeQL on: + merge_group: + types: + - checks_requested + pull_request: + # The branches here must be a subset of the ones in the push key + branches: + - develop push: # Dependabot triggered push events have read-only access, but uploading code # scanning requires write access. branches-ignore: - dependabot/** - pull_request: - # The branches below must be a subset of the branches above - branches: - - develop schedule: - cron: '0 21 * * 6' jobs: diagnostics: name: Run diagnostics + # This job does not need any permissions + permissions: {} runs-on: ubuntu-latest steps: # Note that a duplicate of this step must be added at the top of # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -33,7 +44,7 @@ jobs: egress-policy: audit - id: github-status name: Check GitHub status - uses: crazy-max/ghaction-github-status@v3 + uses: crazy-max/ghaction-github-status@v4 - id: dump-context name: Dump context uses: crazy-max/ghaction-dump-context@v2 @@ -43,6 +54,8 @@ jobs: - diagnostics runs-on: ubuntu-latest permissions: + # actions/checkout needs this to fetch code + contents: read # required for all workflows security-events: write strategy: @@ -56,6 +69,10 @@ jobs: # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -67,7 +84,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} @@ -75,7 +92,7 @@ jobs: # Java). If this step fails, then you should remove it and run the build # manually (see below). - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -89,4 +106,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/sync-labels.yml b/.github/workflows/sync-labels.yml index 44e8e19..0005147 100644 --- a/.github/workflows/sync-labels.yml +++ b/.github/workflows/sync-labels.yml @@ -4,14 +4,42 @@ name: sync-labels on: push: paths: - - '.github/labels.yml' - - '.github/workflows/sync-labels.yml' + - .github/labels.yml + - .github/workflows/sync-labels.yml + workflow_dispatch: permissions: contents: read jobs: + diagnostics: + name: Run diagnostics + # This job does not need any permissions + permissions: {} + runs-on: ubuntu-latest + steps: + # Note that a duplicate of this step must be added at the top of + # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. + - id: harden-runner + name: Harden the runner + uses: step-security/harden-runner@v2 + with: + egress-policy: audit + - id: github-status + name: Check GitHub status + uses: crazy-max/ghaction-github-status@v4 + - id: dump-context + name: Dump context + uses: crazy-max/ghaction-dump-context@v2 labeler: + needs: + - diagnostics permissions: # actions/checkout needs this to fetch code contents: read @@ -19,6 +47,15 @@ jobs: issues: write runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + - id: harden-runner + name: Harden the runner + uses: step-security/harden-runner@v2 + with: + egress-policy: audit - uses: actions/checkout@v4 - name: Sync repository labels if: success() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b08759f..ce5a172 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,22 +4,30 @@ default_language_version: python: python3 repos: + # Check the pre-commit configuration + - repo: meta + hooks: + - id: check-useless-excludes + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-case-conflict - id: check-executables-have-shebangs - id: check-json - id: check-merge-conflict + - id: check-shebang-scripts-are-executable + - id: check-symlinks - id: check-toml + - id: check-vcs-permalinks - id: check-xml - id: debug-statements + - id: destroyed-symlinks - id: detect-aws-credentials args: - --allow-missing-credentials - id: detect-private-key - id: end-of-file-fixer - exclude: files/(issue|motd) - id: mixed-line-ending args: - --fix=lf @@ -31,17 +39,17 @@ repos: # Text file hooks - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.36.0 + rev: v0.42.0 hooks: - id: markdownlint args: - --config=.mdl_config.yaml - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.3 + - repo: https://github.com/rbubley/mirrors-prettier + rev: v3.3.3 hooks: - id: prettier - repo: https://github.com/adrienverge/yamllint - rev: v1.32.0 + rev: v1.35.1 hooks: - id: yamllint args: @@ -49,14 +57,14 @@ repos: # GitHub Actions hooks - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.26.3 + rev: 0.29.4 hooks: - id: check-github-actions - id: check-github-workflows # pre-commit hooks - repo: https://github.com/pre-commit/pre-commit - rev: v3.4.0 + rev: v4.0.1 hooks: - id: validate_manifest @@ -64,21 +72,25 @@ repos: - repo: https://github.com/TekWizely/pre-commit-golang rev: v1.0.0-rc.1 hooks: - # Style Checkers - - id: go-critic - # StaticCheck - - id: go-staticcheck-repo-mod # Go Build - id: go-build-repo-mod + # Style Checkers + - id: go-critic + # goimports + - id: go-imports-repo + args: + # Write changes to files + - -w # Go Mod Tidy - id: go-mod-tidy-repo + # GoSec + - id: go-sec-repo-mod + # StaticCheck + - id: go-staticcheck-repo-mod # Go Test - id: go-test-repo-mod # Go Vet - id: go-vet-repo-mod - # GoSec - - id: go-sec-repo-mod - # Nix hooks - repo: https://github.com/nix-community/nixpkgs-fmt rev: v1.3.0 @@ -86,29 +98,33 @@ repos: - id: nixpkgs-fmt # Shell script hooks - - repo: https://github.com/cisagov/pre-commit-shfmt - rev: v0.0.2 + - repo: https://github.com/scop/pre-commit-shfmt + rev: v3.10.0-1 hooks: - id: shfmt args: + # List files that will be formatted + - --list + # Write result to file instead of stdout + - --write # Indent by two spaces - - -i - - '2' + - --indent + - "2" # Binary operators may start a line - - -bn + - --binary-next-line # Switch cases are indented - - -ci + - --case-indent # Redirect operators are followed by a space - - -sr - - repo: https://github.com/detailyang/pre-commit-shell - rev: 1.0.5 + - --space-redirects + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: v0.10.0.1 hooks: - - id: shell-lint + - id: shellcheck # Python hooks # Run bandit on the "tests" tree with a configuration - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 + rev: 1.7.10 hooks: - id: bandit name: bandit (tests tree) @@ -117,48 +133,97 @@ repos: - --config=.bandit.yml # Run bandit on everything except the "tests" tree - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 + rev: 1.7.10 hooks: - id: bandit name: bandit (everything else) exclude: tests - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.9.1 + rev: 24.10.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.1.1 hooks: - id: flake8 additional_dependencies: - - flake8-docstrings + - flake8-docstrings==1.7.0 - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.1 + rev: v1.13.0 hooks: - id: mypy additional_dependencies: - types-docopt - types-requests - types-urllib3 + - repo: https://github.com/pypa/pip-audit + rev: v2.7.3 + hooks: + - id: pip-audit + args: + # Add any pip requirements files to scan + - --requirement + - requirements-dev.txt + - --requirement + - requirements-test.txt + - --requirement + - requirements.txt - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + rev: v3.19.0 hooks: - id: pyupgrade # Ansible hooks - repo: https://github.com/ansible/ansible-lint - rev: v6.19.0 + rev: v24.10.0 hooks: - id: ansible-lint - # files: molecule/default/playbook.yml + additional_dependencies: + # On its own ansible-lint does not pull in ansible, only + # ansible-core. Therefore, if an Ansible module lives in + # ansible instead of ansible-core, the linter will complain + # that the module is unknown. In these cases it is + # necessary to add the ansible package itself as an + # additional dependency, with the same pinning as is done in + # requirements-test.txt of cisagov/skeleton-ansible-role. + # + # Version 10 is required because the pip-audit pre-commit + # hook identifies a vulnerability in ansible-core 2.16.13, + # but all versions of ansible 9 have a dependency on + # ~=2.16.X. + # + # It is also a good idea to go ahead and upgrade to version + # 10 since version 9 is going EOL at the end of November: + # https://endoflife.date/ansible + # - ansible>=10,<11 + # ansible-core 2.16.3 through 2.16.6 suffer from the bug + # discussed in ansible/ansible#82702, which breaks any + # symlinked files in vars, tasks, etc. for any Ansible role + # installed via ansible-galaxy. Hence we never want to + # install those versions. + # + # Note that the pip-audit pre-commit hook identifies a + # vulnerability in ansible-core 2.16.13. The pin of + # ansible-core to >=2.17 effectively also pins ansible to + # >=10. + # + # It is also a good idea to go ahead and upgrade to + # ansible-core 2.17 since security support for ansible-core + # 2.16 ends this month: + # https://docs.ansible.com/ansible/devel/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix + # + # Note that any changes made to this dependency must also be + # made in requirements.txt in cisagov/skeleton-packer and + # requirements-test.txt in cisagov/skeleton-ansible-role. + - ansible-core>=2.17 # Terraform hooks - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.83.2 + rev: v1.96.1 hooks: - id: terraform_fmt - id: terraform_validate @@ -171,7 +236,7 @@ repos: # Packer hooks - repo: https://github.com/cisagov/pre-commit-packer - rev: v0.0.2 + rev: v0.3.0 hooks: - - id: packer_validate - id: packer_fmt + - id: packer_validate diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 27137f9..cff63f0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,9 +46,13 @@ There are a few ways to do this, but we prefer to use create and manage a Python virtual environment specific to this project. -If you already have `pyenv` and `pyenv-virtualenv` configured you can -take advantage of the `setup-env` tool in this repo to automate the -entire environment configuration process. +We recommend using the `setup-env` script located in this repository, +as it automates the entire environment configuration process. The +dependencies required to run this script are +[GNU `getopt`](https://github.com/util-linux/util-linux/blob/master/misc-utils/getopt.1.adoc), +[`pyenv`](https://github.com/pyenv/pyenv), and [`pyenv-virtualenv`](https://github.com/pyenv/pyenv-virtualenv). +If these tools are already configured on your system, you can simply run the +following command: ```console ./setup-env @@ -57,13 +61,18 @@ entire environment configuration process. Otherwise, follow the steps below to manually configure your environment. -#### Installing and using `pyenv` and `pyenv-virtualenv` #### +#### Installing and using GNU `getopt`, `pyenv`, and `pyenv-virtualenv` #### -On the Mac, we recommend installing [brew](https://brew.sh/). Then -installation is as simple as `brew install pyenv pyenv-virtualenv` and +On macOS, we recommend installing [brew](https://brew.sh/). Then +installation is as simple as `brew install gnu-getopt pyenv pyenv-virtualenv` and adding this to your profile: ```bash +# GNU getopt must be explicitly added to the path since it is +# keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean) +export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" + +# Setup pyenv export PYENV_ROOT="$HOME/.pyenv" export PATH="$PYENV_ROOT/bin:$PATH" eval "$(pyenv init --path)" @@ -71,13 +80,15 @@ eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" ``` -For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you +For Linux, Windows Subsystem for Linux (WSL), or macOS (if you don't want to use `brew`) you can use [pyenv/pyenv-installer](https://github.com/pyenv/pyenv-installer) to install the necessary tools. Before running this ensure that you have installed the prerequisites for your platform according to the [`pyenv` wiki page](https://github.com/pyenv/pyenv/wiki/common-build-problems). +GNU `getopt` is included in most Linux distributions as part of the +[`util-linux`](https://github.com/util-linux/util-linux) package. On WSL you should treat your platform as whatever Linux distribution you've chosen to install. diff --git a/Dockerfile b/Dockerfile index 03f10a4..0257bc4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,9 +6,11 @@ # in the Python Docker image we use for the build-stage. The tag of the Python # Docker image matches the version of the python3 package available on Alpine # for consistency. -FROM alpine:3.18 AS compile-stage +FROM docker.io/library/alpine:3.19 AS compile-stage -# Unprivileged user information necessary for the Python virtual environment +### +# Unprivileged user variables +### ARG CISA_USER="cisa" ENV CISA_HOME="/home/${CISA_USER}" ENV VIRTUAL_ENV="${CISA_HOME}/.venv" @@ -20,39 +22,55 @@ ENV PYTHON_SETUPTOOLS_VERSION=69.2.0 ENV PYTHON_WHEEL_VERSION=0.43.0 RUN apk --no-cache add \ - gcc=12.2.1_git20220924-r10 \ - libc-dev=0.7.2-r5 \ - libxml2-dev=2.11.7-r00 \ - libxslt-dev=1.1.38-r0 \ - py3-pip=23.1.2-r0 \ - py3-setuptools=67.7.2-r0 \ - py3-wheel=0.40.0-r1 \ - python3-dev=3.11.8-r0 \ - python3=3.11.8-r0 + py3-lxml=4.9.3-r1 \ + py3-pip=23.3.1-r0 \ + py3-setuptools=70.3.0-r0 \ + py3-wheel=0.42.0-r0 \ + python3-dev=3.11.11-r0 \ + python3=3.11.11-r0 -# Install pipenv to manage installing the Python dependencies into a created -# Python virtual environment. This is done separately from the virtual -# environment so that pipenv and its dependencies are not installed in the -# Python virtual environment used in the final image. -RUN python3 -m pip install --no-cache-dir --upgrade pipenv==${PYTHON_PIPENV_VERSION} \ - # Manually create Python virtual environment for the final image - && python3 -m venv ${VIRTUAL_ENV} \ - # Ensure the core Python packages are installed in the virtual environment - && ${VIRTUAL_ENV}/bin/python3 -m pip install --no-cache-dir --upgrade \ - pip==${PYTHON_PIP_VERSION} \ - setuptools==${PYTHON_SETUPTOOLS_VERSION} \ - wheel==${PYTHON_WHEEL_VERSION} +### +# Create a Python virtual environment (venv) for setup (due to PEP 668); install the +# specified versions of pip, setuptools, and wheel into the setup venv; install the +# specified version of pipenv into the setup venv; create the image dependency venv; +# and install the specified versions of pip, setuptools, and wheel into the dependency +# venv. +# +# Note that we use the --no-cache-dir flag to avoid writing to a local +# cache. This results in a smaller final image, at the cost of +# slightly longer install times. +### +RUN python3 -m venv --system-site-packages /usr/local \ + # Ensure the core Python packages are installed in the virtual environment + && /usr/local/bin/python3 -m pip install --no-cache-dir --upgrade \ + pip==${PYTHON_PIP_VERSION} \ + setuptools==${PYTHON_SETUPTOOLS_VERSION} \ + wheel==${PYTHON_WHEEL_VERSION} \ + && /usr/local/bin/python3 -m pip install --no-cache-dir --upgrade \ + pipenv==${PYTHON_PIPENV_VERSION} \ + # Manually create the virtual environment + && python3 -m venv --system-site-packages ${VIRTUAL_ENV} \ + # Ensure the core Python packages are installed in the virtual environment + && ${VIRTUAL_ENV}/bin/python3 -m pip install --no-cache-dir --upgrade \ + pip==${PYTHON_PIP_VERSION} \ + setuptools==${PYTHON_SETUPTOOLS_VERSION} \ + wheel==${PYTHON_WHEEL_VERSION} -# Install vdp_scanner.py requirements +### +# Check the Pipfile configuration and then install the Python dependencies into +# the virtual environment. +# +# Note that pipenv will install into a virtual environment if the VIRTUAL_ENV +# environment variable is set. +### WORKDIR /tmp COPY src/Pipfile src/Pipfile.lock ./ -# pipenv will install packages into the virtual environment specified in the -# VIRTUAL_ENV environment variable if it is set. -RUN pipenv sync --clear --verbose +RUN pipenv check --verbose \ + && pipenv install --clear --deploy --extra-pip-args "--no-cache-dir" --verbose # The version of Python used here should match the version of the Alpine # python3 package installed in the compile-stage. -FROM python:3.11.8-alpine3.18 AS build-stage +FROM docker.io/library/python:3.11.11-alpine3.19 AS build-stage ### # For a list of pre-defined annotation keys and value types see: @@ -63,7 +81,9 @@ FROM python:3.11.8-alpine3.18 AS build-stage LABEL org.opencontainers.image.authors="vm-fusion-dev-group@trio.dhs.gov" LABEL org.opencontainers.image.vendor="Cybersecurity and Infrastructure Security Agency" -# Unprivileged user information +### +# Unprivileged user setup variables +### ARG CISA_UID=2048 ARG CISA_GID=${CISA_UID} ARG CISA_USER="cisa" @@ -72,19 +92,25 @@ ENV CISA_HOME="/home/${CISA_USER}" ENV VIRTUAL_ENV="${CISA_HOME}/.venv" RUN apk --no-cache add \ - ca-certificates=20240226-r0 \ - chromium=119.0.6045.159-r0 \ - libxml2=2.11.7-r00 \ - libxslt=1.1.38-r0 + ca-certificates=20241121-r1 \ + chromium=124.0.6367.78-r0 \ + py3-lxml=4.9.3-r1 # Create unprivileged user RUN addgroup --system --gid ${CISA_GID} ${CISA_GROUP} \ && adduser --system --uid ${CISA_UID} --ingroup ${CISA_GROUP} ${CISA_USER} -# Copy in the Python venv we created in the compile stage and re-symlink -# python3 in the venv to the Python binary in this image -COPY --from=compile-stage --chown=${CISA_USER}:${CISA_GROUP} ${VIRTUAL_ENV} ${VIRTUAL_ENV}/ -RUN ln -sf "$(command -v python3)" "${VIRTUAL_ENV}"/bin/python3 +### +# Copy in the Python virtual environment created in compile-stage, symlink the +# Python binary in the venv to the system-wide Python, and add the venv to the PATH. +# +# Note that we symlink the Python binary in the venv to the system-wide Python so that +# any calls to `python3` will use our virtual environment. We are using short flags +# because the ln binary in Alpine Linux does not support long flags. The -f instructs +# ln to remove the existing file and the -s instructs ln to create a symbolic link. +### +COPY --from=compile-stage --chown=${CISA_USER}:${CISA_GROUP} ${VIRTUAL_ENV} ${VIRTUAL_ENV} +RUN ln -fs "$(command -v python3)" "${VIRTUAL_ENV}"/bin/python3 ENV PATH="${VIRTUAL_ENV}/bin:$PATH" WORKDIR ${CISA_HOME} @@ -93,7 +119,9 @@ RUN mkdir host_mount # Copy in the necessary files COPY --chown=${CISA_USER}:${CISA_GROUP} src/version.txt src/vdp_scanner.py ./ +### # Prepare to run +### USER ${CISA_USER}:${CISA_GROUP} ENTRYPOINT ["python3", "vdp_scanner.py"] CMD ["github"] diff --git a/README.md b/README.md index be86817..d9fd89c 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Python library. Then it will output CSVs with agency and domain level results. To run the `cisagov/vdp-scanner` image via Docker: ```console -docker run cisagov/vdp-scanner:0.2.0-dev.4 +docker run cisagov/vdp-scanner:0.2.0-dev.5 ``` ### Running with Docker Compose ### @@ -36,7 +36,7 @@ docker run cisagov/vdp-scanner:0.2.0-dev.4 services: vdp-scanner: - image: 'cisagov/vdp-scanner:0.2.0-dev.4' + image: cisagov/vdp-scanner:0.2.0-dev.5 volumes: - .:/task/host_mount ``` @@ -74,20 +74,50 @@ docker run cisagov/vdp-scanner:0.2.0-dev.4 1. Pull the new image: ```console - docker pull cisagov/vdp-scanner:0.2.0-dev.4 + docker pull cisagov/vdp-scanner:0.2.0-dev.5 ``` 1. Recreate and run the container by following the [previous instructions](#running-with-docker). +## Updating Python dependencies ## + +This image uses [Pipenv] to manage Python dependencies using a [Pipfile](https://github.com/pypa/pipfile). +Both updating dependencies and changing the [Pipenv] configuration in `src/Pipfile` +will result in a modified `src/Pipfile.lock` file that should be committed to the +repository. + +> [!WARNING] +> The `src/Pipfile.lock` as generated will fail `pre-commit` checks due to JSON formatting. + +### Updating dependencies ### + +If you want to update existing dependencies you would run the following command +in the `src/` subdirectory: + +```console +pipenv lock +``` + +### Modifying dependencies ### + +If you want to add or remove dependencies you would update the `src/Pipfile` file +and then update dependencies as you would above. + +> [!NOTE] +> You should only specify packages that are explicitly needed for your Docker +> configuration. Allow [Pipenv] to manage the dependencies of the specified +> packages. + ## Image tags ## -The images of this container are tagged with -[semantic versions](https://semver.org). It is recommended that most users use -a version tag (e.g. `:0.2.0-dev.4`). +The images of this container are tagged with [semantic +versions](https://semver.org) of the underlying example project that they +containerize. It is recommended that most users use a version tag (e.g. +`:0.2.0-dev.5`). | Image:tag | Description | |-----------|-------------| -|`cisagov/vdp-scanner:0.2.0-dev.4`| An exact release version. | +|`cisagov/vdp-scanner:0.2.0-dev.5`| An exact release version. | |`cisagov/vdp-scanner:0.2`| The most recent release matching the major and minor version numbers. | |`cisagov/vdp-scanner:0`| The most recent release matching the major version number. | |`cisagov/vdp-scanner:edge` | The most recent image built from a merge into the `develop` branch of this repository. | @@ -153,7 +183,7 @@ Build the image locally using this git repository as the [build context](https:/ ```console docker build \ - --tag cisagov/vdp-scanner:0.2.0-dev.4 \ + --tag cisagov/vdp-scanner:0.2.0-dev.5 \ https://github.com/cisagov/vdp-scanner-docker.git#develop ``` @@ -184,7 +214,7 @@ Docker: --file Dockerfile-x \ --platform linux/amd64 \ --output type=docker \ - --tag cisagov/vdp-scanner:0.2.0-dev.4 . + --tag cisagov/vdp-scanner:0.2.0-dev.5 . ``` ## Contributing ## @@ -204,3 +234,5 @@ dedication](https://creativecommons.org/publicdomain/zero/1.0/). All contributions to this project will be released under the CC0 dedication. By submitting a pull request, you are agreeing to comply with this waiver of copyright interest. + +[Pipenv]: https://pypi.org/project/pipenv/ diff --git a/bump-version b/bump-version new file mode 100755 index 0000000..324fea9 --- /dev/null +++ b/bump-version @@ -0,0 +1,172 @@ +#!/usr/bin/env bash + +# bump-version [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) +# bump-version --list-files + +set -o nounset +set -o errexit +set -o pipefail + +# Stores the canonical version for the project. +VERSION_FILE=src/version.txt +# Files that should be updated with the new version. +VERSION_FILES=("$VERSION_FILE" README.md) + +USAGE=$( + cat << END_OF_LINE +Update the version of the project. + +Usage: + ${0##*/} [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) + ${0##*/} --list-files + ${0##*/} (-h | --help) + +Options: + -h | --help Show this message. + --push Perform a \`git push\` after updating the version. + --label LABEL Specify the label to use when updating the build or prerelease version. + --list-files List the files that will be updated when the version is bumped. +END_OF_LINE +) + +old_version=$(< "$VERSION_FILE") +# Comment out periods so they are interpreted as periods and don't +# just match any character +old_version_regex=${old_version//\./\\\.} +new_version="$old_version" + +bump_part="" +label="" +commit_prefix="Bump" +with_push=false +commands_with_label=("build" "prerelease") +commands_with_prerelease=("major" "minor" "patch") +with_prerelease=false + +####################################### +# Display an error message, the help information, and exit with a non-zero status. +# Arguments: +# Error message. +####################################### +function invalid_option() { + echo "$1" + echo "$USAGE" + exit 1 +} + +####################################### +# Bump the version using the provided command. +# Arguments: +# The version to bump. +# The command to bump the version. +# Returns: +# The new version. +####################################### +function bump_version() { + local temp_version + temp_version=$(python -c "import semver; print(semver.parse_version_info('$1').${2})") + echo "$temp_version" +} + +if [ $# -eq 0 ]; then + echo "$USAGE" + exit 1 +else + while [ $# -gt 0 ]; do + case $1 in + --push) + if [ "$with_push" = true ]; then + invalid_option "Push has already been set." + fi + + with_push=true + shift + ;; + --label) + if [ -n "$label" ]; then + invalid_option "Label has already been set." + fi + + label="$2" + shift 2 + ;; + build | finalize | major | minor | patch) + if [ -n "$bump_part" ]; then + invalid_option "Only one version part should be bumped at a time." + fi + + bump_part="$1" + shift + ;; + prerelease) + with_prerelease=true + shift + ;; + show) + echo "$old_version" + exit 0 + ;; + -h | --help) + echo "$USAGE" + exit 0 + ;; + --list-files) + printf '%s\n' "${VERSION_FILES[@]}" + exit 0 + ;; + *) + invalid_option "Invalid option: $1" + ;; + esac + done +fi + +if [ -n "$label" ] && [ "$with_prerelease" = false ] && [[ ! " ${commands_with_label[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then + invalid_option "Setting the label is only allowed for the following commands: ${commands_with_label[*]}" +fi + +if [ "$with_prerelease" = true ] && [ -n "$bump_part" ] && [[ ! " ${commands_with_prerelease[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then + invalid_option "Changing the prerelease is only allowed in conjunction with the following commands: ${commands_with_prerelease[*]}" +fi + +label_option="" +if [ -n "$label" ]; then + label_option="token='$label'" +fi + +if [ -n "$bump_part" ]; then + if [ "$bump_part" = "finalize" ]; then + commit_prefix="Finalize" + bump_command="finalize_version()" + elif [ "$bump_part" = "build" ]; then + bump_command="bump_${bump_part}($label_option)" + else + bump_command="bump_${bump_part}()" + fi + new_version=$(bump_version "$old_version" "$bump_command") + echo Changing version from "$old_version" to "$new_version" +fi + +if [ "$with_prerelease" = true ]; then + bump_command="bump_prerelease($label_option)" + temp_version=$(bump_version "$new_version" "$bump_command") + echo Changing version from "$new_version" to "$temp_version" + new_version="$temp_version" +fi + +tmp_file=/tmp/version.$$ +for version_file in "${VERSION_FILES[@]}"; do + if [ ! -f "$version_file" ]; then + echo Missing expected file: "$version_file" + exit 1 + fi + sed "s/$old_version_regex/$new_version/" "$version_file" > $tmp_file + mv $tmp_file "$version_file" +done + +git add "${VERSION_FILES[@]}" +git commit --message "$commit_prefix version from $old_version to $new_version" + +if [ "$with_push" = true ]; then + git push +fi diff --git a/bump_version.sh b/bump_version.sh deleted file mode 100755 index 0071670..0000000 --- a/bump_version.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env bash - -# Usage: -# bump_version.sh (show|major|minor|patch|finalize) -# bump_version.sh (build|prerelease) [token] -# Notes: -# - If you specify a token it will only be used if the current version is -# tokenless or if the provided token matches the token used in the current -# version. - -set -o nounset -set -o errexit -set -o pipefail - -VERSION_FILE=src/version.txt -README_FILE=README.md - -function usage { - cat << HELP -Usage: - ${0##*/} (show|major|minor|patch|finalize) - ${0##*/} (build|prerelease) [token] - -Notes: - - If you specify a token it will only be used if the current version is - tokenless or if the provided token matches the token used in the current - version. -HELP - exit 1 -} - -function update_version { - # Comment out periods so they are interpreted as periods and don't - # just match any character - old_version_regex=${1//\./\\\.} - - echo Changing version from "$1" to "$2" - tmp_file=/tmp/version.$$ - sed "s/$old_version_regex/$2/" $VERSION_FILE > $tmp_file - mv $tmp_file $VERSION_FILE - sed "s/$old_version_regex/$2/" $README_FILE > $tmp_file - mv $tmp_file $README_FILE - git add $VERSION_FILE $README_FILE - git commit --message "$3" -} - -if [ $# -lt 1 ] || [ $# -gt 2 ]; then - usage -else - old_version=$(sed -n "s/^__version__ = \"\(.*\)\"$/\1/p" $VERSION_FILE) - case $1 in - major | minor | patch) - if [ $# -ne 1 ]; then - usage - fi - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") - update_version "$old_version" "$new_version" "Bump version from $old_version to $new_version" - ;; - build | prerelease) - if [ $# -eq 2 ]; then - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version', token='$2'))") - else - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") - fi - update_version "$old_version" "$new_version" "Bump version from $old_version to $new_version" - ;; - finalize) - if [ $# -ne 1 ]; then - usage - fi - new_version=$(python -c "import semver; print(semver.finalize_version('$old_version'))") - update_version "$old_version" "$new_version" "Finalize version from $old_version to $new_version" - ;; - show) - echo "$old_version" - ;; - *) - usage - ;; - esac -fi diff --git a/requirements-dev.txt b/requirements-dev.txt index bdc1615..d7a04ed 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ --requirement requirements-test.txt ipython pipenv -semver +semver>=3 diff --git a/setup-env b/setup-env index 77926bf..059ccad 100755 --- a/setup-env +++ b/setup-env @@ -9,60 +9,122 @@ USAGE=$( Configure a development environment for this repository. It does the following: + - Allows the user to specify the Python version to use for the virtual environment. + - Allows the user to specify a name for the virtual environment. - Verifies pyenv and pyenv-virtualenv are installed. - - Creates a Python virtual environment. + - Creates the Python virtual environment. - Configures the activation of the virtual enviroment for the repo directory. - Installs the requirements needed for development. - Installs git pre-commit hooks. - - Configures git upstream remote "lineage" repositories. + - Configures git remotes for upstream "lineage" repositories. Usage: - setup-env [options] [virt_env_name] + setup-env [--venv-name venv_name] [--python-version python_version] setup-env (-h | --help) Options: - -f --force Delete virtual enviroment if it already exists. - -h --help Show this message. - -i --install-hooks Install hook environments for all environments in the - pre-commit config file. + -f | --force Delete virtual enviroment if it already exists. + -h | --help Show this message. + -i | --install-hooks Install hook environments for all environments in the + pre-commit config file. + -l | --list-versions List available Python versions and select one interactively. + -v | --venv-name Specify the name of the virtual environment. + -p | --python-version Specify the Python version for the virtual environment. END_OF_LINE ) +# Display pyenv's installed Python versions +python_versions() { + pyenv versions --bare --skip-aliases --skip-envs +} + +check_python_version() { + local version=$1 + + # This is a valid regex for semantically correct Python version strings. + # For more information see here: + # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string + # Break down the regex into readable parts major.minor.patch + local major="0|[1-9]\d*" + local minor="0|[1-9]\d*" + local patch="0|[1-9]\d*" + + # Splitting the prerelease part for readability + # Start of the prerelease + local prerelease="(?:-" + # Numeric or alphanumeric identifiers + local prerelease+="(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)" + # Additional dot-separated identifiers + local prerelease+="(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*" + # End of the prerelease, making it optional + local prerelease+=")?" + # Optional build metadata + local build="(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?" + + # Final regex composed of parts + local regex="^($major)\.($minor)\.($patch)$prerelease$build$" + + # This checks if the Python version does not match the regex pattern specified in $regex, + # using Perl for regex matching. If the pattern is not found, then prompt the user with + # the invalid version message. + if ! echo "$version" | perl -ne "exit(!/$regex/)"; then + echo "Invalid version of Python: Python follows semantic versioning," \ + "so any version string that is not a valid semantic version is an" \ + "invalid version of Python." + exit 1 + # Else if the Python version isn't installed then notify the user. + # grep -E is used for searching through text lines that match the specific verison. + elif ! python_versions | grep -E "^${version}$" > /dev/null; then + echo "Error: Python version $version is not installed." + echo "Installed Python versions are:" + python_versions + exit 1 + else + echo "Using Python version $version" + fi +} + # Flag to force deletion and creation of virtual environment FORCE=0 -# Positional parameters -PARAMS="" +# Initialize the other flags +INSTALL_HOOKS=0 +LIST_VERSIONS=0 +PYTHON_VERSION="" +VENV_NAME="" -# Parse command line arguments -while (("$#")); do - case "$1" in - -f | --force) - FORCE=1 - shift - ;; - -h | --help) - echo "${USAGE}" - exit 0 - ;; - -i | --install-hooks) - INSTALL_HOOKS=1 - shift - ;; - -*) # unsupported flags - echo "Error: Unsupported flag $1" >&2 - exit 1 - ;; - *) # preserve positional arguments - PARAMS="$PARAMS $1" - shift - ;; - esac -done +# Define long options +LONGOPTS="force,help,install-hooks,list-versions,python-version:,venv-name:" + +# Define short options for getopt +SHORTOPTS="fhilp:v:" + +# Check for GNU getopt by matching a specific pattern ("getopt from util-linux") +# in its version output. This approach presumes the output format remains stable. +# Be aware that format changes could invalidate this check. +if [[ $(getopt --version 2> /dev/null) != *"getopt from util-linux"* ]]; then + cat << 'END_OF_LINE' -# set positional arguments in their proper place -eval set -- "$PARAMS" + Please note, this script requires GNU getopt due to its enhanced + functionality and compatibility with certain script features that + are not supported by the POSIX getopt found in some systems, particularly + those with a non-GNU version of getopt. This distinction is crucial + as a system might have a non-GNU version of getopt installed by default, + which could lead to unexpected behavior. + + On macOS, we recommend installing brew (https://brew.sh/). Then installation + is as simple as `brew install gnu-getopt` and adding this to your + profile: + + export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" + + GNU getopt must be explicitly added to the PATH since it + is keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean). + +END_OF_LINE + exit 1 +fi # Check to see if pyenv is installed if [ -z "$(command -v pyenv)" ] || { [ -z "$(command -v pyenv-virtualenv)" ] && [ ! -f "$(pyenv root)/plugins/pyenv-virtualenv/bin/pyenv-virtualenv" ]; }; then @@ -70,7 +132,7 @@ if [ -z "$(command -v pyenv)" ] || { [ -z "$(command -v pyenv-virtualenv)" ] && if [[ "$OSTYPE" == "darwin"* ]]; then cat << 'END_OF_LINE' - On the Mac, we recommend installing brew, https://brew.sh/. Then installation + On macOS, we recommend installing brew, https://brew.sh/. Then installation is as simple as `brew install pyenv pyenv-virtualenv` and adding this to your profile: @@ -81,7 +143,7 @@ END_OF_LINE fi cat << 'END_OF_LINE' - For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you don't want + For Linux, Windows Subsystem for Linux (WSL), or macOS (if you don't want to use "brew") you can use https://github.com/pyenv/pyenv-installer to install the necessary tools. Before running this ensure that you have installed the prerequisites for your platform according to the pyenv wiki page, @@ -100,16 +162,72 @@ END_OF_LINE exit 1 fi -set +o nounset +# Use GNU getopt to parse options +if ! PARSED=$(getopt --options $SHORTOPTS --longoptions $LONGOPTS --name "$0" -- "$@"); then + echo "Error parsing options" + exit 1 +fi +eval set -- "$PARSED" + +while true; do + case "$1" in + -f | --force) + FORCE=1 + shift + ;; + -h | --help) + echo "$USAGE" + exit 0 + ;; + -i | --install-hooks) + INSTALL_HOOKS=1 + shift + ;; + -l | --list-versions) + LIST_VERSIONS=1 + shift + ;; + -p | --python-version) + PYTHON_VERSION="$2" + shift 2 + # Check the Python version being passed in. + check_python_version "$PYTHON_VERSION" + ;; + -v | --venv-name) + VENV_NAME="$2" + shift 2 + ;; + --) + shift + break + ;; + *) + # Unreachable due to GNU getopt handling all options + echo "Programming error" + exit 64 + ;; + esac +done + # Determine the virtual environment name -if [ "$1" ]; then +if [ -n "$VENV_NAME" ]; then # Use the user-provided environment name - env_name=$1 + env_name="$VENV_NAME" else # Set the environment name to the last part of the working directory. env_name=${PWD##*/} fi -set -o nounset + +# List Python versions and select one interactively. +if [ $LIST_VERSIONS -ne 0 ]; then + echo Available Python versions: + python_versions + # Read the user's desired Python version. + # -r: treat backslashes as literal, -p: display prompt before input. + read -r -p "Enter the desired Python version: " PYTHON_VERSION + # Check the Python version being passed in. + check_python_version "$PYTHON_VERSION" +fi # Remove any lingering local configuration. if [ $FORCE -ne 0 ]; then @@ -118,7 +236,7 @@ if [ $FORCE -ne 0 ]; then elif [[ -f .python-version ]]; then cat << 'END_OF_LINE' An existing .python-version file was found. Either remove this file yourself - or re-run with --force option to have it deleted along with the associated + or re-run with the --force option to have it deleted along with the associated virtual environment. rm .python-version @@ -128,10 +246,18 @@ END_OF_LINE fi # Create a new virtual environment for this project -if ! pyenv virtualenv "${env_name}"; then +# +# If $PYTHON_VERSION is undefined then the current pyenv Python version will be used. +# +# We can't quote ${PYTHON_VERSION:=} below since if the variable is +# undefined then we want nothing to appear; this is the reason for the +# "shellcheck disable" line below. +# +# shellcheck disable=SC2086 +if ! pyenv virtualenv ${PYTHON_VERSION:=} "${env_name}"; then cat << END_OF_LINE An existing virtual environment named $env_name was found. Either delete this - environment yourself or re-run with --force option to have it deleted. + environment yourself or re-run with the --force option to have it deleted. pyenv virtualenv-delete ${env_name} diff --git a/src/Pipfile b/src/Pipfile index 3bbe265..3aa466d 100644 --- a/src/Pipfile +++ b/src/Pipfile @@ -6,8 +6,11 @@ name = "pypi" [packages] docopt = "*" hash-http-content = {file = "https://github.com/cisagov/hash-http-content/archive/v0.1.0.tar.gz"} +# We need to use the system package version of this package. This matches the version +# available for Alpine Linux 3.19. +lxml = "4.9.3" requests = "*" urllib3 = "*" [requires] -python_full_version = "3.11.8" +python_full_version = "3.11.11" diff --git a/src/Pipfile.lock b/src/Pipfile.lock index f43ccc5..2ca12fe 100644 --- a/src/Pipfile.lock +++ b/src/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "e6183d2c99bf10ffae9283233f6f3d41089f6193cc5007a6baf688930d1b8938" + "sha256": "88924eb6676c21aebe730c85dde115b33ca8dbb01d4f52afe55b601ca8679823" }, "pipfile-spec": 6, "requires": { - "python_full_version": "3.11.8" + "python_full_version": "3.11.11" }, "sources": [ { @@ -33,115 +33,109 @@ }, "certifi": { "hashes": [ - "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", - "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1" + "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", + "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db" ], "markers": "python_version >= '3.6'", - "version": "==2024.2.2" + "version": "==2024.12.14" }, "charset-normalizer": { "hashes": [ - "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", - "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", - "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", - "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", - "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", - "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", - "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", - "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", - "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", - "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", - "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", - "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", - "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", - "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", - "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", - "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", - "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", - "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", - "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", - "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", - "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", - "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", - "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", - "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", - "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", - "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", - "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", - "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", - "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", - "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", - "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", - "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", - "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", - "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", - "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", - "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", - "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", - "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", - "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", - "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", - "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", - "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", - "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", - "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", - "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", - "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", - "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", - "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", - "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", - "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", - "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", - "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", - "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", - "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", - "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", - "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", - "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", - "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", - "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", - "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", - "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", - "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", - "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", - "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", - "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", - "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", - "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", - "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", - "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", - "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", - "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", - "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", - "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", - "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", - "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", - "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", - "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", - "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", - "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", - "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", - "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", - "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", - "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", - "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", - "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", - "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", - "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", - "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", - "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", - "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" + "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", + "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa", + "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a", + "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", + "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b", + "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", + "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", + "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", + "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", + "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", + "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", + "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", + "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", + "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", + "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", + "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", + "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", + "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", + "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", + "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", + "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e", + "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a", + "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4", + "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca", + "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", + "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", + "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", + "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", + "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", + "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", + "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", + "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", + "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", + "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", + "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", + "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd", + "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c", + "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", + "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", + "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", + "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", + "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824", + "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", + "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf", + "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487", + "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d", + "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd", + "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", + "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534", + "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", + "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", + "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", + "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd", + "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", + "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9", + "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", + "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", + "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d", + "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", + "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", + "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", + "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", + "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", + "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", + "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8", + "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", + "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", + "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", + "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", + "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", + "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", + "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", + "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", + "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", + "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", + "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", + "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", + "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e", + "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6", + "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", + "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", + "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e", + "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", + "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", + "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c", + "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", + "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", + "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089", + "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", + "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e", + "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", + "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616" ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.3.2" - }, - "contextlib2": { - "hashes": [ - "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", - "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869" - ], - "markers": "python_version >= '3.6'", - "version": "==21.6.0" + "markers": "python_version >= '3.7'", + "version": "==3.4.1" }, "docopt": { "hashes": [ @@ -155,107 +149,126 @@ }, "idna": { "hashes": [ - "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", - "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" + "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", + "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" ], - "markers": "python_version >= '3.5'", - "version": "==3.6" + "markers": "python_version >= '3.6'", + "version": "==3.10" }, "importlib-metadata": { "hashes": [ - "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", - "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2" + "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", + "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7" ], "markers": "python_version >= '3.8'", - "version": "==7.1.0" + "version": "==8.5.0" }, "lxml": { "hashes": [ - "sha256:036b36c48cd775e4fd2084b34ae62ffeefa7a01f955f5a5b816f9257c308cfc0", - "sha256:04ef231dde88294a5499f61a74cdc42af97d8d5ecec1b0a645d1c7d436942789", - "sha256:071e5123d1eca861708c4be5b54e4d88923fa33fab3aa02722e907518b07071c", - "sha256:0e46181d15fae102c53621bed9356b7a599a1e837b978c934a350dd00842b1d9", - "sha256:13b73d78a8023203722cf98e9ea0b222da83110d1d5ef437ef8782a7755b4586", - "sha256:1528b37e83c3aeecb438e76e5be6279b353275560125a9c3f4d74642c5f110f9", - "sha256:161838cb95c97e8d76d01e544a3570b52ab6b863f4897a90e1f073bb110a75ba", - "sha256:19c6bc7476eeac4598ff925ae98597610109e21af4cd7ab1e060efcfc4b1c6e2", - "sha256:1b0611bba10d6f5467b86673e8f6bba4de0d00f7d111eea843bc872abfe11b5c", - "sha256:1d0270d33fbde6e1c6758ff58e2e284144f5331aa05dfe7f44ceafdf4e9d31aa", - "sha256:1d380f183bd03ab827899753ea96dabe27d2025eb0bfd4f2ac0eee4afa0f351d", - "sha256:20cd17eb21f5ae54da96791c49e1fbd3327bf66b2c00556cdf8d0552c2270f92", - "sha256:26974096654241df08a30dc2eb0e139c1ad5653660aa4b2ced66000230e96c14", - "sha256:2992480a25434d2df31413136ef87effab14d43b07f1f54c5012c4f6c7530144", - "sha256:29b2771b4eec4e85063f10294facdd9829d010e6cc9668040d0cf936dc56733a", - "sha256:2de00750318ae6869b9dfa6429a4f82b8ecad043049414547474d09db549c2ee", - "sha256:318847c165063549c8fda6b162a0d068689b10deb825cb3859caef69fddaaaff", - "sha256:3641bc124b037921de4220538a5ebb52354fd2799fc2bbfb335d28096063c7d6", - "sha256:3a6e9b34f59c9755aa279c652e1c48c333c665d05a88afcd8e5ff0bde86f3b14", - "sha256:3c5940f188189956ccb3d1adb413001ada79f2d2b81087d2612a0cc4a1197eed", - "sha256:3da8db291568c27b2bb248dcfc8838ca3149f373a24e204bcd1c2c89e2813d14", - "sha256:42a8aa957e98bd8b884a8142175ec24ce4ef0a57760e8879f193bfe64b757ca9", - "sha256:43f21b5929185fa4560836942020bb00a0fcdec9f67be98cac1a4b99501757c1", - "sha256:48dd28b9f410329de709a4bb6169c58f2cd8bff25f5a48d647678ec9b8a40c65", - "sha256:4958c378d9387c45ef8c4859495cf6be76f863e4e3b31494f6ec7f2c48d3b8e3", - "sha256:50007f4e94dc4e38030487a8b6c4af87a2d51ed059c7b74b29e3dd937cb1dfe1", - "sha256:5020b3081030b5cfc8149eee231167aea4ff68df73a610e1d542809e1f11fde7", - "sha256:52358249292bc155af681a9240ec3d944c1195f0124aa10ec4e3635adc1e10a1", - "sha256:55e13a19829dcdbf0c5233062977aeb6daf72e65124909128045976f659164e8", - "sha256:56f1e813ff660d031c77edba90a068d57e47ae93a9e811330fc88946fa68af9a", - "sha256:59ca75cfcf646ff64aa19ca4e7fd2a0fde77268d5a87856525d9e0b69b77d0c4", - "sha256:5bd2595ebe95214446e00a1ab94571f778b126e17736ea222c07505c4e092289", - "sha256:5dcb373720b70aa05419e508265dd86f06886ca0388967f6f024fbc4d551379f", - "sha256:60ceffdca5d637fe8ee95c7f06733a6c9646e07da80997efe3af2d4b4f366e36", - "sha256:666432274881cb2535e71dbe745e08ef10fe25c81fbb1a6b1e3c973177823b0c", - "sha256:68eed33377a9925aed7ba56c8611d50aaa1e45638c07a92b4b4b0a0436cc2dd2", - "sha256:6c49eb5deaed1990fde5b5d80d6800aec1b5fd6113346b5f11068d988f68f2c4", - "sha256:74d0967c6f91eec6fe91159f9e8ccb3720fa0fbf9f462109c7bef62550df397c", - "sha256:77425482e4311d1cff119a2b5ab26c52ec209d2a3d728a54db3223ab91995e20", - "sha256:77842b79b63c83c04dcfe2f045c78e15e4d97c86838eabd2e6518c1ed97e3900", - "sha256:81107c8de3e463052ae8fd05fd31b97c371c7a9ce4a189b8bb5f45b0b3545fb9", - "sha256:87b67d8620c2725d666e5d88ddba56bcdb1f52211a2e7d22f951b67c35f7f627", - "sha256:8a943826e7a9254eed661a7134fcde3c832a9fecd989d0f47c6e08c7b769cb2c", - "sha256:906966babd374fdfe46e130fc656488003f0d0d63b7cba612aa5a796c8804283", - "sha256:9113fe65a62f834b8e994c8f48e7b2179bf81878c0ec80ad7feba51ab9417663", - "sha256:95a51324a55000c55f4ab79e1f7f1e0bc42b7a24e39633f79542753023a9d4b7", - "sha256:99bcdf665576a26b44c7ce767d76b769a4418b0a13cda8300b26fb7b2647bd5b", - "sha256:9c03f3715c68fc707d9383d56e482d95d198ba07cb3dad4aee9e5a5ca06b2536", - "sha256:9cc30dc3c49ea914fa62ea73b57198b541cf2cd522fcf2b9559f99a24df769bb", - "sha256:a02ed1ebc469734dbfed5b688f709334de19e7a333cba7ae187b17d2b2c1d4ff", - "sha256:a103426e809640a2d985062d2f4b28db2f0fe4469ff72a67cb31fa70613158f1", - "sha256:a305d0469177fd78a0a9aa2231c60218266bb85d4b7955f9b67dab628c9267fd", - "sha256:a76a7b35e7660c74eb3f943c19f5f78c882dceab890cf8017027b6100b79ad8e", - "sha256:a94a97380ad689d751eb0a1e1ccd2a0622c5141771a31abe9a16075f80027e95", - "sha256:acff17e0cd5344677757a152631d8411efac6a84e4476d60123a9b33f5d6c511", - "sha256:adedfb61be862f48907218e3a24bf051fd2ecca53358f3958b0bdb17d7881c20", - "sha256:adff469b7dbfe9f3babc9e4479449ee97085ba70ac492fbe5f0f7217940c6731", - "sha256:aff34295a6c87638a1f1905355cf3a97e4026c45c0cf3bb6ed6bc35b885b4a33", - "sha256:b174885fd2cabd1ad48585296f495e25d607f02db99668c08b2afaceb668e21b", - "sha256:bc6904519dd1f92eb82f7d49814a33bbc444d0b66b1438e76daf3f79ef4aa38f", - "sha256:bd46b5b19ac969de8e87fb3d04414641d12ee489e2ea6cc75344087829b31c63", - "sha256:bf7e57dbe7b3c605e63849d9c8dae246a6ab9002223c57cd3f3dec7c3a0a8e6d", - "sha256:bfbdadc3cfe552331ecb0bbdcabf148d1697c73aa4321151e0e6c1704eeb76a7", - "sha256:c7c1d2f6e9c7a1c4478146ee38d16dbe0eb3be998424bc0f01346c671c38b86d", - "sha256:d1abbf2249467a37da45fb2d7ff37e578dfc9813f142800e58db9da761cb7899", - "sha256:d1f4d37b3f8d2d44493edce3d65ac987127bababd8ae208a6f0d7d260852346e", - "sha256:d26243d994d4077a50056e9008848e5b421be0c6f0fd4e932a9463e1d89fc42b", - "sha256:d55ddc73dec971277b181a6d1a6abdd34f50e4511e1e60f6b4ebe22cbaad05bb", - "sha256:d9358f7268c161dc0a1c3216018f26c04954b5dd47ba6dead79da6598f4725d4", - "sha256:d94a28c16cc430b68c374b37b8bb536ba5f0a4a080be0e1daa8310c44a00a75c", - "sha256:dec3491aa69a91ed07f5e6bc033e2b1a9424447ad5312ee69ac973e94d79083a", - "sha256:f05ab8cea65363d0cc7ce818f42407504b6d94ca885b4cde0270f021e2f4ef61", - "sha256:f1d0824e3ddb969fe1337b1bc45cf0bec8095b342f36903f41a74b7769cc8c73", - "sha256:f8682af96b5ad5093aab9eee5e4ff24cb7a9796c78699d914dd456ebfe7484a6" + "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3", + "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d", + "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a", + "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120", + "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305", + "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287", + "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23", + "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52", + "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f", + "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4", + "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584", + "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f", + "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693", + "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef", + "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5", + "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02", + "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc", + "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7", + "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da", + "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a", + "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40", + "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8", + "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd", + "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601", + "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c", + "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be", + "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2", + "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c", + "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129", + "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc", + "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2", + "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1", + "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7", + "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d", + "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477", + "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d", + "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e", + "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7", + "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2", + "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574", + "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf", + "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b", + "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98", + "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12", + "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42", + "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35", + "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d", + "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce", + "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d", + "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f", + "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db", + "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4", + "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694", + "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac", + "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2", + "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7", + "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96", + "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d", + "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b", + "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a", + "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13", + "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340", + "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6", + "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458", + "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c", + "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c", + "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9", + "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432", + "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991", + "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69", + "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf", + "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb", + "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b", + "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833", + "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76", + "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85", + "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e", + "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50", + "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8", + "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4", + "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b", + "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5", + "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190", + "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7", + "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa", + "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0", + "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9", + "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0", + "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b", + "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5", + "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7", + "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4" ], - "markers": "python_version >= '3.6'", - "version": "==5.1.1" + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.9.3" }, "pyee": { "hashes": [ - "sha256:5d346a7d0f861a4b2e6c47960295bd895f816725b27d656181947346be98d7c1", - "sha256:b53af98f6990c810edd9b56b87791021a8f54fd13db4edd1142438d44ba2263f" + "sha256:82e1eb1853f8497c4ff1a0c7fa26b9cd2f1253e2b6ffb93b4700fda907017302", + "sha256:9e4cdd7c2f9fcf247db94bad39a260aceffefdbe52286ce71be01959de34a5c2" ], "markers": "python_version >= '3.8'", - "version": "==11.1.0" + "version": "==11.1.1" }, "pyppeteer": { "hashes": [ @@ -267,61 +280,60 @@ }, "requests": { "hashes": [ - "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", - "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" ], "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==2.31.0" + "markers": "python_version >= '3.8'", + "version": "==2.32.3" }, "schema": { "hashes": [ - "sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197", - "sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c" + "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde", + "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807" ], - "version": "==0.7.5" + "version": "==0.7.7" }, "setuptools": { "hashes": [ - "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e", - "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c" + "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", + "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3" ], - "markers": "python_version >= '3.8'", - "version": "==69.2.0" + "markers": "python_version >= '3.9'", + "version": "==75.8.0" }, "soupsieve": { "hashes": [ - "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690", - "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7" + "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", + "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9" ], "markers": "python_version >= '3.8'", - "version": "==2.5" + "version": "==2.6" }, "tqdm": { "hashes": [ - "sha256:23097a41eba115ba99ecae40d06444c15d1c0c698d527a01c6c8bd1c5d0647e5", - "sha256:4f41d54107ff9a223dca80b53efe4fb654c67efaba7f47bada3ee9d50e05bd53" + "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", + "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2" ], - "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==4.66.3" + "version": "==4.67.1" }, "typing-extensions": { "hashes": [ - "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", - "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb" + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], "markers": "python_version >= '3.8'", - "version": "==4.10.0" + "version": "==4.12.2" }, "urllib3": { "hashes": [ - "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", - "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" ], "index": "pypi", "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.18" + "version": "==1.26.20" }, "websockets": { "hashes": [ @@ -400,11 +412,11 @@ }, "zipp": { "hashes": [ - "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b", - "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715" + "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", + "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931" ], - "markers": "python_version >= '3.8'", - "version": "==3.18.1" + "markers": "python_version >= '3.9'", + "version": "==3.21.0" } }, "develop": {} diff --git a/src/vdp_scanner.py b/src/vdp_scanner.py index 60463ab..ce2f82f 100644 --- a/src/vdp_scanner.py +++ b/src/vdp_scanner.py @@ -237,12 +237,9 @@ def output_all_csvs(self) -> None: def get_version(version_file) -> str: """Extract a version number from the given file path.""" with open(version_file) as vfile: - for line in vfile.read().splitlines(): - if line.startswith("__version__"): - delim = '"' if '"' in line else "'" - return line.split(delim)[1] + return vfile.read().strip() - raise RuntimeError("Unable to find version string.") + raise RuntimeError("Failed to extract version string.") def get_local_csv(file: str) -> List[Dict[str, str]]: diff --git a/src/version.txt b/src/version.txt index 0ab8f7c..92bf7ff 100644 --- a/src/version.txt +++ b/src/version.txt @@ -1 +1 @@ -__version__ = "0.2.0-dev.4" +0.2.0-dev.5 diff --git a/tag.sh b/tag.sh index e1f7447..0a0e607 100755 --- a/tag.sh +++ b/tag.sh @@ -4,6 +4,6 @@ set -o nounset set -o errexit set -o pipefail -version=$(./bump_version.sh show) +version=$(./bump-version show) git tag "v$version" && git push --tags diff --git a/tests/conftest.py b/tests/conftest.py index 1e20cae..51ab4d0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ https://docs.pytest.org/en/latest/writing_plugins.html#conftest-py-plugins """ + # Third-Party Libraries import pytest from python_on_whales import docker @@ -9,6 +10,8 @@ MAIN_SERVICE_NAME = "vdp-scanner" VERSION_SERVICE_NAME = f"{MAIN_SERVICE_NAME}-version" +VERSION_FILE = "src/version.txt" + @pytest.fixture(scope="session") def dockerc(): @@ -35,6 +38,14 @@ def version_container(dockerc): return dockerc.compose.ps(services=[VERSION_SERVICE_NAME], all=True)[0] +@pytest.fixture(scope="session") +def project_version(): + """Return the version of the project.""" + with open(VERSION_FILE) as f: + project_version = f.read().strip() + return project_version + + def pytest_addoption(parser): """Add new commandline options to pytest.""" parser.addoption( diff --git a/tests/container_test.py b/tests/container_test.py index 804b5b8..7bb6152 100644 --- a/tests/container_test.py +++ b/tests/container_test.py @@ -1,4 +1,3 @@ -#!/usr/bin/env pytest -vs """Tests for vdp-scanner container.""" # Standard Python Libraries @@ -22,37 +21,28 @@ def test_container_count(dockerc): @pytest.mark.skipif( RELEASE_TAG in [None, ""], reason="this is not a release (RELEASE_TAG not set)" ) -def test_release_version(): +def test_release_version(project_version): """Verify that release tag version agrees with the module version.""" - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] assert ( RELEASE_TAG == f"v{project_version}" ), "RELEASE_TAG does not match the project version" -def test_log_version(dockerc, version_container): +def test_log_version(dockerc, project_version, version_container): """Verify the container outputs the correct version to the logs.""" # make sure container exited if running test isolated dockerc.wait(version_container.id) log_output = version_container.logs().strip() - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] assert ( log_output == project_version ), f"Container version output to log does not match project version file {VERSION_FILE}" -def test_container_version_label_matches(version_container): +@pytest.mark.skipif( + RELEASE_TAG in [None, ""], reason="this is not a release (RELEASE_TAG not set)" +) +def test_container_version_label_matches(project_version, version_container): """Verify the container version label is the correct version.""" - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] assert ( version_container.config.labels["org.opencontainers.image.version"] == project_version