diff --git a/.github/workflows/automatus-ubuntu2404.yml b/.github/workflows/automatus-ubuntu2404.yml new file mode 100644 index 00000000000..2ccefdd6152 --- /dev/null +++ b/.github/workflows/automatus-ubuntu2404.yml @@ -0,0 +1,169 @@ +name: Automatus Ubuntu 24.04 +on: + pull_request: + branches: [ master, 'stabilization*' ] +concurrency: + group: ${{ github.workflow }}-${{ github.event.number || github.run_id }} + cancel-in-progress: true +jobs: + build-content: + name: Build Content + runs-on: ubuntu-24.04 + steps: + - name: Install build deps + run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils + - name: Install workflow deps + run: sudo apt install -y git python3-deepdiff python3-requests jq python3-pip + - name: Install deps python + run: pip3 install gitpython xmldiff + - name: Checkout + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + - name: Checkout (CTF) + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + repository: ComplianceAsCode/content-test-filtering + path: ctf + # https://github.com/actions/checkout/issues/766 + - name: Set git safe directory + run: git config --global --add safe.directory "$GITHUB_WORKSPACE" + - name: Find forking point + env: + BASE_BRANCH: ${{ github.base_ref }} + run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT + id: fork_point + - name: Detect content changes in the PR + run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json + - name: Test if there are no content changes + run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT + id: ctf + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + with: + name: output.json + path: output.json + - name: Print changes to content detected if any + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: cat output.json + - name: Get product attribute + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + id: product + uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0 + with: + path: 'output.json' + prop_path: 'product' + - name: Build product + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: ./build_product ${{steps.product.outputs.prop}} --datastream-only + - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + with: + name: ssg-${{steps.product.outputs.prop}}-ds.xml + path: build/ssg-${{steps.product.outputs.prop}}-ds.xml + validate-ubuntu: + name: Run Tests + needs: build-content + runs-on: ubuntu-24.04 + steps: + - name: Install test deps + run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils podman + - name: Checkout + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - name: Get cached CTF output + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4 + id: get_ctf_output + with: + name: output.json + # continue even if the file is unavailable that + # means there are no changes detected by CTF in the previous job + continue-on-error: true + - name: Test if there are no content changes + if: ${{ steps.get_ctf_output.outcome == 'success' }} + run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT + id: ctf + - name: Print changes to content detected if any + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: cat output.json + - name: Generate id_rsa key + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa + - name: Build test suite container + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-ubuntu2404 + working-directory: ./Dockerfiles + - name: Get rule ids to be tested + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + id: rules + uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0 + with: + path: 'output.json' + prop_path: 'rules' + - name: Get product attribute + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + id: product + uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0 + with: + path: 'output.json' + prop_path: 'product' + - name: Get bash attribute + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + id: bash + uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0 + with: + path: 'output.json' + prop_path: 'bash' + - name: Get ansible attribute + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + id: ansible + uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0 + with: + path: 'output.json' + prop_path: 'ansible' + - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4 + if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + with: + name: ssg-${{steps.product.outputs.prop}}-ds.xml + - name: Run tests in a container - Bash + if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream ssg-${{steps.product.outputs.prop}}-ds.xml ${{join(fromJSON(steps.rules.outputs.prop))}} + env: + ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified" + - name: Check for ERROR in logs + if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: grep -q "^ERROR" logs_bash/test_suite.log + id: check_results_bash + # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file + # and this means tests finished successfully without errors. So the job needs to keep going. + # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use + # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead. + # See the step below + continue-on-error: true + - name: Upload logs in case of failure + if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + with: + name: logs_bash + path: logs_bash/ + - name: Run tests in a container - Ansible + if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream ssg-${{steps.product.outputs.prop}}-ds.xml ${{join(fromJSON(steps.rules.outputs.prop))}} + env: + ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified" + - name: Check for ERROR in logs + if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: grep -q "^ERROR" logs_ansible/test_suite.log + id: check_results_ansible + continue-on-error: true + - name: Upload logs in case of failure + if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + with: + name: logs_ansible + path: logs_ansible/ + - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log + if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }} + run: | + [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - " + [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - " + exit 1 diff --git a/.github/workflows/gate.yaml b/.github/workflows/gate.yaml index 5b6dd1c5162..c3e3b6bb6e9 100644 --- a/.github/workflows/gate.yaml +++ b/.github/workflows/gate.yaml @@ -121,6 +121,25 @@ jobs: run: ctest -j2 --output-on-failure -E unique-stigids working-directory: ./build + validate-ubuntu-24-04: + name: Build, Test on Ubuntu 24.04 + runs-on: ubuntu-24.04 + steps: + - name: Install Deps + run: sudo apt-get update && sudo apt-get install cmake ninja-build openscap-utils libxml2-utils xsltproc ansible-lint bats python3-github python3-jinja2 python3-pip python3-pytest python3-pytest-cov python3-setuptools python3-yaml shellcheck + - name: Checkout + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - name: Install deps python + run: pip3 install -r requirements.txt -r test-requirements.txt + - name: Build + env: + ADDITIONAL_CMAKE_OPTIONS: "-DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF" + run: |- + ./build_product ubuntu2404 + - name: Test + run: ctest -j2 --output-on-failure -E unique-stigids + working-directory: ./build + validate-fedora-rawhide: name: Build, Test on Fedora Rawhide (Container) runs-on: ubuntu-latest