diff --git a/.github/actions/on_device_tests/action.yaml b/.github/actions/on_device_tests/action.yaml index df6b4bf39d626..f5cf336e1f5d9 100644 --- a/.github/actions/on_device_tests/action.yaml +++ b/.github/actions/on_device_tests/action.yaml @@ -1,9 +1,94 @@ -name: On Host Tests -description: Runs on-host tests. +name: On Device Test +description: Runs on-device tests. +inputs: + gcs_results_path: + description: "GCS path for the test results" + required: true + test_results_key: + description: "Artifact key used to store test results." + required: true + runs: using: "composite" steps: - - name: Run On-Device Tests + - name: Install Requirements + run: | + pip3 install --require-hashes --no-deps -r ${GITHUB_WORKSPACE}/cobalt/tools/requirements.txt + shell: bash + - name: Generate gRPC files + run: | + python -m grpc_tools.protoc -I${GITHUB_WORKSPACE}/cobalt/tools/ \ + --python_out=${GITHUB_WORKSPACE}/cobalt/tools/ \ + --grpc_python_out=${GITHUB_WORKSPACE}/cobalt/tools/ \ + ${GITHUB_WORKSPACE}/cobalt/tools/on_device_tests_gateway.proto + shell: bash + - name: Set Up Cloud SDK + uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 + - name: Set GCS Project Name + run: | + echo "PROJECT_NAME=$(gcloud config get-value project)" >> $GITHUB_ENV + shell: bash + - name: Run Tests on ${{ matrix.platform }} Platform + env: + GCS_ARTIFACTS_PATH: /bigstore/${{ env.PROJECT_NAME }}-test-artifacts/${{ github.workflow }}/${{ github.run_number }}/${{ matrix.platform }} + GCS_RESULTS_PATH: gs://cobalt-unittest-storage/results/${{ matrix.name }}/${{ github.run_id }} + GITHUB_SHA: ${{ github.sha }} + GITHUB_TOKEN: ${{ github.token }} + GITHUB_EVENT_NAME: ${{ github.event_name }} + GITHUB_ACTOR: ${{ github.actor }} + GITHUB_TRIGGERING_ACTOR: ${{ github.triggering_actor }} + GITHUB_ACTOR_ID: ${{ github.actor_id }} + GITHUB_REPO: ${{ github.repository }} + GITHUB_PR_HEAD_USER_LOGIN: ${{ github.event.pull_request.head.user.login }} + GITHUB_PR_HEAD_USER_ID: ${{ github.event.pull_request.head.user.id }} + GITHUB_COMMIT_AUTHOR_USERNAME: ${{ github.event.commits[0].author.username }} + GITHUB_COMMIT_AUTHOR_EMAIL: ${{ github.event.commits[0].author.email }} + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + GITHUB_RUN_NUMBER: ${{ github.run_number }} + GITHUB_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GITHUB_WORKFLOW: ${{ github.workflow }} + # TODO(b/382508397): Replace hardcoded list with dynamically generated one. + TEST_TARGETS_JSON_FILE: cobalt/build/testing/targets/${{ matrix.platform }}/test_targets.json + run: | + set -uxe + python3 -u cobalt/tools/on_device_tests_gateway_client.py \ + --token ${GITHUB_TOKEN} \ + trigger \ + --targets $(cat "${{ env.TEST_TARGETS_JSON_FILE }}" | jq -cr '.test_targets | join(",")') \ + --filter_json_dir "${GITHUB_WORKSPACE}/cobalt/testing/filters/${{ matrix.platform}}" \ + --label github_${GITHUB_PR_NUMBER:-postsubmit} \ + --label builder-${{ matrix.platform }} \ + --label builder_url-${GITHUB_RUN_URL} \ + --label github \ + --label ${GITHUB_EVENT_NAME} \ + --label ${GITHUB_WORKFLOW} \ + --label actor-${GITHUB_ACTOR} \ + --label actor_id-${GITHUB_ACTOR_ID} \ + --label triggering_actor-${GITHUB_TRIGGERING_ACTOR} \ + --label sha-${GITHUB_SHA} \ + --label repository-${GITHUB_REPO} \ + --label author-${GITHUB_PR_HEAD_USER_LOGIN:-$GITHUB_COMMIT_AUTHOR_USERNAME} \ + --label author_id-${GITHUB_PR_HEAD_USER_ID:-$GITHUB_COMMIT_AUTHOR_EMAIL} \ + ${DIMENSION:+"--dimension" "$DIMENSION"} \ + ${ON_DEVICE_TEST_ATTEMPTS:+"--test_attempts" "$ON_DEVICE_TEST_ATTEMPTS"} \ + --gcs_archive_path "${GCS_ARTIFACTS_PATH}" \ + --gcs_result_path "${GCS_RESULTS_PATH}" shell: bash + - name: Download ${{ matrix.platform }} Test Results from GCS + if: always() + env: + GCS_RESULTS_PATH: ${{ inputs.gcs_results_path }} run: | - echo "Nothing yet" + set -uxe + test_output="${GITHUB_WORKSPACE}/results" + echo "test_output=${test_output}" >> $GITHUB_ENV + + mkdir -p "${test_output}" + gsutil cp -r "${GCS_RESULTS_PATH}/" "${test_output}" + shell: bash + - name: Archive Test Results + uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ inputs.test_results_key }} + path: ${{ env.test_output }}/* diff --git a/.github/actions/on_host_tests/action.yaml b/.github/actions/on_host_tests/action.yaml index 776d05f4828d7..50d9ff2d36cff 100644 --- a/.github/actions/on_host_tests/action.yaml +++ b/.github/actions/on_host_tests/action.yaml @@ -82,5 +82,5 @@ runs: if: success() || failure() uses: actions/upload-artifact@v4 with: - name: ${{ inputs.test_results_key }}-${{ matrix.shard }} + name: ${{ inputs.test_results_key }} path: ${{ env.results_dir }}/shard_${{ matrix.shard }}/*.xml diff --git a/.github/actions/process_test_results/action.yaml b/.github/actions/process_test_results/action.yaml index 73a2e16c3633f..90c5da98ccda4 100644 --- a/.github/actions/process_test_results/action.yaml +++ b/.github/actions/process_test_results/action.yaml @@ -13,7 +13,7 @@ runs: - name: Download Test Results uses: actions/download-artifact@v4 with: - pattern: ${{ inputs.test_results_key }}-* + pattern: ${{ inputs.test_results_key }}* path: results/**/* - name: Test Summary action diff --git a/.github/actions/upload_test_artifacts/action.yaml b/.github/actions/upload_test_artifacts/action.yaml index a2a645aea29db..8469171cd0084 100644 --- a/.github/actions/upload_test_artifacts/action.yaml +++ b/.github/actions/upload_test_artifacts/action.yaml @@ -16,27 +16,16 @@ inputs: runs: using: "composite" steps: - - name: Set up Cloud SDK - if: inputs.upload_on_device_test_artifacts == 'true' - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Upload Android Test Artifacts to GCS - if: inputs.upload_on_device_test_artifacts == 'true' - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -eux - project_name=$(gcloud config get-value project) - gsutil cp "${GITHUB_WORKSPACE}/src/out/${{ matrix.platform }}_${{ matrix.config }}/**/*.apk" \ - "gs://${project_name}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}/" - shell: bash - - - name: Create On-Host Test Artifacts Archive - if: inputs.upload_on_host_test_artifacts == 'true' + - name: Archive Test Artifacts run: | set -x mkdir ${GITHUB_WORKSPACE}/artifacts cd src/ - ./cobalt/build/archive_test_artifacts.py \ + + # Put test targets json file in the out folder for the archiving script to pick up. + cp "${{ inputs.test_targets_json_file }}" out/${{ matrix.platform }}_${{ matrix.config }}/ + + time ./cobalt/build/archive_test_artifacts.py \ --source out/${{ matrix.platform }}_${{ matrix.config }}/ \ --destination ${GITHUB_WORKSPACE}/artifacts \ --platform ${{ matrix.platform }} \ @@ -49,3 +38,17 @@ runs: name: ${{ inputs.test_artifacts_key }} path: artifacts/* retention-days: 3 + - name: Set up Cloud SDK + if: inputs.upload_on_device_test_artifacts == 'true' + uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 + - name: Upload Android Test Artifacts to GCS + if: inputs.upload_on_device_test_artifacts == 'true' + env: + WORKFLOW: ${{ github.workflow }} + run: | + set -eux + project_name=$(gcloud config get-value project) + + gsutil cp "${GITHUB_WORKSPACE}/artifacts/*" \ + "gs://${project_name}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}/" + shell: bash diff --git a/.github/config/android-arm.json b/.github/config/android-arm.json index a58a046f4fcbc..9e1eb911cfc0e 100644 --- a/.github/config/android-arm.json +++ b/.github/config/android-arm.json @@ -3,6 +3,11 @@ "platforms": [ "android-arm" ], + "test_on_device": true, + "test_dimensions": { + "gtest_device": "sabrina", + "gtest_lab": "maneki" + }, "targets": [ "content_shell", "cobalt:gn_all" diff --git a/.github/config/android-arm64.json b/.github/config/android-arm64.json index bd0a9a7eb48ea..9ccc7ff3f29a7 100644 --- a/.github/config/android-arm64.json +++ b/.github/config/android-arm64.json @@ -3,6 +3,7 @@ "platforms": [ "android-arm64" ], + "test_on_device": true, "targets": [ "content_shell", "cobalt:gn_all" diff --git a/.github/config/chromium_android-arm.json b/.github/config/chromium_android-arm.json index 7e073505527ce..50fb7aee1fdb3 100644 --- a/.github/config/chromium_android-arm.json +++ b/.github/config/chromium_android-arm.json @@ -4,16 +4,6 @@ "chromium_android-arm" ], "targets": [ - "base_unittests", - "sql_unittests", - "net_unittests", - "url_unittests", - "ipc_tests", - "mojo_unittests", - "gpu_unittests", - "gin_unittests", - "blink_unittests", - "media_unittests", "content_shell", "system_webview_apk", "system_webview_shell_apk" diff --git a/.github/config/chromium_android-arm64.json b/.github/config/chromium_android-arm64.json index 1921fdbbad913..5c5239614c650 100644 --- a/.github/config/chromium_android-arm64.json +++ b/.github/config/chromium_android-arm64.json @@ -4,16 +4,6 @@ "chromium_android-arm64" ], "targets": [ - "base_unittests", - "sql_unittests", - "net_unittests", - "url_unittests", - "ipc_tests", - "mojo_unittests", - "gpu_unittests", - "gin_unittests", - "blink_unittests", - "media_unittests", "content_shell", "system_webview_apk", "system_webview_shell_apk" diff --git a/.github/config/chromium_android-x86.json b/.github/config/chromium_android-x86.json index 8498733285ed5..ec99f4fcb9db3 100644 --- a/.github/config/chromium_android-x86.json +++ b/.github/config/chromium_android-x86.json @@ -4,16 +4,6 @@ "chromium_android-x86" ], "targets": [ - "base_unittests", - "sql_unittests", - "net_unittests", - "url_unittests", - "ipc_tests", - "mojo_unittests", - "gpu_unittests", - "gin_unittests", - "blink_unittests", - "media_unittests", "content_shell", "system_webview_apk", "system_webview_shell_apk" diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index d521ed65099fb..6e383dc24311e 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -22,7 +22,6 @@ concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ inputs.platform }} @ ${{ github.event.label.name || github.event.pull_request.number || github.sha }} @ ${{ github.event.label.name && github.event.pull_request.number || github.event.action }} cancel-in-progress: true -# A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: # Retrieves configuration from json file. initialize: @@ -92,7 +91,7 @@ jobs: num_gtest_shards=$(cat ${GITHUB_WORKSPACE}/.github/config/${{ inputs.platform }}.json | jq -rc '.num_gtest_shards' || echo 1) echo "num_gtest_shards=${num_gtest_shards}" >> $GITHUB_ENV - # create a zero-indexed list of shards for use by matrix, e.g. [0,1,2,3,4,5] + # Create a zero-indexed list of shards for use by matrix, e.g. [0,1,2,3,4,5]. gtest_shards="[$(seq -s, 0 1 $((${num_gtest_shards} - 1)))]" echo "gtest_shards=${gtest_shards}" >> $GITHUB_ENV outputs: @@ -134,7 +133,6 @@ jobs: outputs: docker_tag: ${{ env.docker_tag }} - # Runs builds. build: needs: [initialize, docker-build-image] permissions: {} @@ -183,9 +181,44 @@ jobs: upload_on_host_test_artifacts: ${{ matrix.config == 'devel' && needs.initialize.outputs.test_on_host }} upload_on_device_test_artifacts: ${{ matrix.config == 'devel' && needs.initialize.outputs.test_on_device }} - test: + on-device-test: + needs: [initialize, build] + # Run ODT when on_device label is applied on PR. + # Also, run ODT on push and schedule if not explicitly disabled via repo vars. + if: needs.initialize.outputs.test_on_device == 'true' && + ( + (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'on_device')) || + ((inputs.nightly == 'true' || github.event_name == 'schedule') && vars.RUN_ODT_TESTS_ON_NIGHTLY != 'False') || + (github.event_name == 'push' && vars.RUN_ODT_TESTS_ON_POSTSUBMIT != 'False') + ) + runs-on: [self-hosted, odt-runner] + name: ${{ matrix.name }}_on_device_tests + permissions: {} + strategy: + fail-fast: false + matrix: + platform: ${{ fromJson(needs.initialize.outputs.platforms) }} + include: ${{ fromJson(needs.initialize.outputs.includes) }} + config: [devel] + env: + TEST_RESULTS_KEY: ${{ matrix.platform }}_${{ matrix.name }}_test_results + steps: + - name: Checkout + uses: actions/checkout@v4 + timeout-minutes: 30 + with: + fetch-depth: 1 + persist-credentials: false + - name: Run On-Device Tests + id: on-device-tests + uses: ./.github/actions/on_device_tests + with: + test_results_key: ${{ env.TEST_RESULTS_KEY }} + gcs_results_path: gs://cobalt-unittest-storage/results/${{ matrix.name }}/${{ github.run_id }} + + on-host-test: needs: [initialize, docker-build-image, build] - if: needs.initialize.outputs.test_on_host == 'true' || needs.initialize.outputs.test_on_device == 'true' + if: always() && needs.initialize.outputs.test_on_host == 'true' permissions: {} # TODO(b/372303096): Should have dedicated runner? runs-on: [self-hosted, chrobalt-linux-runner] @@ -201,7 +234,7 @@ jobs: env: TMPDIR: /__w/_temp TEST_ARTIFACTS_KEY: ${{ matrix.platform }}_${{ matrix.name }}_test_artifacts - TEST_RESULTS_KEY: ${{ matrix.platform }}_${{ matrix.name }}_test_results + TEST_RESULTS_KEY: ${{ matrix.platform }}_${{ matrix.name }}_test_results-${{ matrix.shard }} steps: - name: Checkout uses: actions/checkout@v4 @@ -209,7 +242,6 @@ jobs: path: src - name: Run On-Host Tests id: on-host-tests - if: always() && needs.initialize.outputs.test_on_host == 'true' uses: ./src/.github/actions/on_host_tests with: test_artifacts_key: ${{ env.TEST_ARTIFACTS_KEY }} @@ -217,14 +249,14 @@ jobs: num_gtest_shards: ${{ needs.initialize.outputs.num_gtest_shards }} test-upload: - needs: [initialize, docker-build-image, build, test] + needs: [initialize, on-host-test, on-device-test] if: always() && - ( - needs.initialize.outputs.test_on_host == 'true' || - needs.initialize.outputs.test_on_device == 'true' - ) + ( + needs.initialize.outputs.test_on_host == 'true' || + needs.initialize.outputs.test_on_device == 'true' + ) permissions: {} - runs-on: [self-hosted, chrobalt-linux-runner] + runs-on: ubuntu-latest name: ${{ matrix.name }}_tests_upload strategy: fail-fast: false @@ -232,7 +264,6 @@ jobs: platform: ${{ fromJson(needs.initialize.outputs.platforms) }} include: ${{ fromJson(needs.initialize.outputs.includes) }} config: [devel] - container: ${{ needs.docker-build-image.outputs.docker_tag }} env: TMPDIR: /__w/_temp TEST_ARTIFACTS_KEY: ${{ matrix.platform }}_${{ matrix.name }}_test_artifacts @@ -249,8 +280,9 @@ jobs: datadog_api_key: ${{ secrets.datadog_api_key }} continue-on-error: true + validate-test-result: - needs: [initialize, docker-build-image, build, test] + needs: [initialize, on-device-test, on-host-test] if: always() && ( needs.initialize.outputs.test_on_host == 'true' || @@ -265,8 +297,15 @@ jobs: include: ${{ fromJson(needs.initialize.outputs.includes) }} config: [devel] steps: - - name: Fail if any test shards have failed - if: ${{ needs.test.result != 'success' }} + - name: Check Status + if: ${{ ! (needs.on-device-test.result == 'success' || needs.on-host-test.result == 'success') }} run: | - echo "Failing because at least one test shard had errors." - exit 1 + if [ "${{ needs.on-device-test.result }}" != "success" ]; then + echo "On device tests failed. See separate job log for details." + exit 1 + fi + + if [ "${{ needs.on-host-test.result }}" != "success" ]; then + echo "On host tests failed. See separate job log for details." + exit 1 + fi diff --git a/.github/workflows/scorecards.yaml b/.github/workflows/scorecards.yaml index 181c96d6ba674..f0a4c8346beba 100644 --- a/.github/workflows/scorecards.yaml +++ b/.github/workflows/scorecards.yaml @@ -46,7 +46,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@97a0fba1372883ab732affbe8f94b823f91727db # v3.pre.node20 + uses: actions/upload-artifact@v4 with: name: SARIF file path: results.sarif diff --git a/cobalt/build/archive_test_artifacts.py b/cobalt/build/archive_test_artifacts.py index 711f7bb2734cb..db144d281660b 100755 --- a/cobalt/build/archive_test_artifacts.py +++ b/cobalt/build/archive_test_artifacts.py @@ -15,14 +15,24 @@ """Creates test artifacts tar with runtime dependencies.""" import argparse -import json import os +import shutil import subprocess import tempfile -from typing import List +from typing import List, Optional +# Path prefixes that contain files we don't need to run tests. +_EXCLUDE_DIRS = [ + '../../', + 'lib.java/', + './exe.unstripped/', + './lib.unstripped/', +] -def _make_tar(archive_path: str, file_list: str): + +def _make_tar(archive_path: str, + file_list: str, + base_path: Optional[str] = None): """Creates the tar file. Uses tar command instead of tarfile for performance. """ print(f'Creating {os.path.basename(archive_path)}') @@ -30,33 +40,27 @@ def _make_tar(archive_path: str, file_list: str): with tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') as temp_file: temp_file.write('\n'.join(sorted(file_list))) temp_file.flush() - tar_cmd = ['tar', '-I gzip -1', '-cvf', archive_path, '-T', temp_file.name] + base_path_arg = ['-C', base_path] if base_path else [] + tar_cmd = [ + 'tar', '-I gzip -1', '-cvf', archive_path, *base_path_arg, '-T', + temp_file.name + ] subprocess.check_call(tar_cmd) def create_archive(targets: List[str], source_dir: str, destination_dir: str, - platform: str, combine: bool): + platform: str, uber_archive: bool): """Main logic. Collects runtime dependencies from the source directory for each target.""" - # TODO(b/382508397): Remove when dynamically generated. - # Put the test targets in a json file in the archive. - test_target_names = [target.split(':')[1] for target in targets] - test_targets_json = os.path.join(source_dir, 'test_targets.json') - with open(test_targets_json, 'w', encoding='utf-8') as test_targets_file: - test_targets_file.write( - json.dumps({ - 'test_targets': - test_target_names, - 'executables': [ - os.path.join(source_dir, target_name) - for target_name in test_target_names - ] - })) - - deps = set([test_targets_json]) + tar_root = '.' if platform.startswith('android') else source_dir + deps = set() + # TODO(oxv): Make output from build step instead. + # Add test_targets.json to archive so that test runners know what to run. + deps.add(os.path.relpath(os.path.join(tar_root, 'test_targets.json'))) + for target in targets: target_path, target_name = target.split(':') - # These paths are configured in test.gni: + # Paths are configured in test.gni: # https://github.com/youtube/cobalt/blob/main/testing/test.gni if platform.startswith('android'): deps_file = os.path.join( @@ -66,22 +70,40 @@ def create_archive(targets: List[str], source_dir: str, destination_dir: str, deps_file = os.path.join(source_dir, f'{target_name}.runtime_deps') with open(deps_file, 'r', encoding='utf-8') as runtime_deps_file: + # The paths in the runtime_deps files are relative to the out folder. + # Android tests expects files to be relative to the out folder in the + # archive whereas Linux tests expect it relative to the source root. + # TODO(oxv): Pass as argument? target_deps = { - os.path.relpath(os.path.join(source_dir, line.strip())) + os.path.relpath(os.path.join(tar_root, line.strip())) for line in runtime_deps_file + if not any(line.startswith(path) for path in _EXCLUDE_DIRS) } deps |= target_deps - if not combine: + if not uber_archive: output_path = os.path.join(destination_dir, f'{target_name}_deps.tar.gz') - _make_tar(output_path, deps) + base_path = source_dir if platform.startswith('android') else None + _make_tar(output_path, deps, base_path) + deps = set([os.path.relpath(os.path.join(tar_root, 'test_targets.json'))]) - if combine: + if uber_archive: output_path = os.path.join(destination_dir, 'test_artifacts.tar.gz') _make_tar(output_path, deps) -if __name__ == '__main__': +def copy_apks(targets: List[str], source_dir: str, destination_dir: str): + """Copies the target APKs from the source directory to the destination. + The path to the APK in the source directory (assumed here to be the out + directory) is defined in build/config/android/rules.gni + """ + for target in targets: + _, target_name = target.split(':') + apk_path = f'{source_dir}/{target_name}_apk/{target_name}-debug.apk' + shutil.copy2(apk_path, destination_dir) + + +def main(): parser = argparse.ArgumentParser() parser.add_argument( '-s', @@ -103,9 +125,17 @@ def create_archive(targets: List[str], source_dir: str, destination_dir: str, '--targets', required=True, type=lambda arg: arg.split(','), - help='The targets to package, comma-separated. Must be fully qualified ' - 'for android.') + help='The targets to package, comma-separated. Must be fully qualified, ' + 'e.g. path/to:target_name,other/path/to:target_name.') args = parser.parse_args() + uber_archive = args.platform.startswith('linux') create_archive(args.targets, args.source_dir, args.destination_dir, - args.platform, args.platform.startswith('linux')) + args.platform, uber_archive) + + if args.platform.startswith('android'): + copy_apks(args.targets, args.source_dir, args.destination_dir) + + +if __name__ == '__main__': + main() diff --git a/cobalt/build/testing/targets/android-arm/test_targets.json b/cobalt/build/testing/targets/android-arm/test_targets.json index c67c6a0c5b16b..be926ef670826 100644 --- a/cobalt/build/testing/targets/android-arm/test_targets.json +++ b/cobalt/build/testing/targets/android-arm/test_targets.json @@ -1,16 +1,7 @@ { "TODO(b/382508397)": "Remove this when list is dynamically generated.", "test_targets": [ - "base:base_unittests", - "cc:cc_perftests", - "cc:cc_unittests", - "ipc:ipc_tests", - "media/capture:capture_unittests", "media/midi:midi_unittests", - "media:media_unittests", - "mojo:mojo_perftests", - "net:net_unittests", - "services/service_manager/tests:service_manager_unittests", "skia:skia_unittests", "sql:sql_unittests", "url:url_perftests", diff --git a/cobalt/testing/filters/android-arm/sql_unittests_filter.json b/cobalt/testing/filters/android-arm/sql_unittests_filter.json new file mode 100644 index 0000000000000..b5fefefe17ef8 --- /dev/null +++ b/cobalt/testing/filters/android-arm/sql_unittests_filter.json @@ -0,0 +1,5 @@ +{ + "failing_tests": [ + "SQLRecoveryTest.Bug387868" + ] +} diff --git a/cobalt/tools/on_device_tests_gateway.proto b/cobalt/tools/on_device_tests_gateway.proto new file mode 100644 index 0000000000000..f817ae82c01a9 --- /dev/null +++ b/cobalt/tools/on_device_tests_gateway.proto @@ -0,0 +1,67 @@ +// Copyright 2022 The Cobalt Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package on_device_tests_gateway; + +// A dumb proxy RPC service that passes user arguments to OmniLab +// to the on-device tests gateway and streams back output in real time. +service on_device_tests_gateway { + // Request to start a test session. + rpc exec_command(OnDeviceTestsCommand) + returns (stream OnDeviceTestsResponse) {} + + // Request to start watching a running test session. + rpc exec_watch_command(OnDeviceTestsWatchCommand) + returns (stream OnDeviceTestsResponse) {} +} + +message OnDeviceTestsCommand { + // Next ID: 25 + string token = 2; + repeated string labels = 8; + repeated TestRequest test_requests = 24; +} + +message TestRequest { + // Next ID: 7 + // Args picked up by MH, e.g. "name1=value", "name2=value". + repeated string test_args = 1; + // Args picked up by the test target "command_line_args=--arg1=value", + // "base_dir=/sdcard". + repeated string test_cmd_args = 2; + // Files to send to device, e.g. "build_apk=/bigstore/bucket/test.apk", + // "test_apk=/bigstore/bucket/test.apk", + repeated string files = 3; + // Args picked up by drivers, decorators, and plugins, e.g. + // "gcs_result_path=gs://some/gcs/path", "log_filename=test_output.txt". + repeated string params = 4; + // "sabrina" or "boreal" + string device_type = 5; + // "shared" or "maneki" + string device_pool = 6; +} + +message OnDeviceTestsWatchCommand { + // Next ID: 6 + string token = 2; + string session_id = 3; + bool dry_run = 5; +} + +message OnDeviceTestsResponse { + // Next ID: 2 + string response = 1; +} diff --git a/cobalt/tools/on_device_tests_gateway_client.py b/cobalt/tools/on_device_tests_gateway_client.py new file mode 100644 index 0000000000000..72890a1eb0a03 --- /dev/null +++ b/cobalt/tools/on_device_tests_gateway_client.py @@ -0,0 +1,330 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 The Cobalt Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC On-device Tests Gateway client.""" + +import argparse +import json +import logging +import os +import sys +from typing import List + +import grpc +import on_device_tests_gateway_pb2 +import on_device_tests_gateway_pb2_grpc + +_WORK_DIR = '/on_device_tests_gateway' + +# For local testing, set: _ON_DEVICE_TESTS_GATEWAY_SERVICE_HOST = ('localhost') +_ON_DEVICE_TESTS_GATEWAY_SERVICE_HOST = ( + 'on-device-tests-gateway-service.on-device-tests.svc.cluster.local') +_ON_DEVICE_TESTS_GATEWAY_SERVICE_PORT = '50052' + +# These paths are hardcoded in various places. DO NOT CHANGE! +_DIR_ON_DEVICE = '/sdcard/Download' +_DEPS_ARCHIVE = '/sdcard/chromium_tests_root/deps.tar.gz' + + +class OnDeviceTestsGatewayClient(): + """On-device tests Gateway Client class.""" + + def __init__(self): + self.channel = grpc.insecure_channel( + target=f'{_ON_DEVICE_TESTS_GATEWAY_SERVICE_HOST}:{_ON_DEVICE_TESTS_GATEWAY_SERVICE_PORT}', # pylint:disable=line-too-long + # These options need to match server settings. + options=[('grpc.keepalive_time_ms', 10000), + ('grpc.keepalive_timeout_ms', 5000), + ('grpc.keepalive_permit_without_calls', 1), + ('grpc.http2.max_pings_without_data', 0), + ('grpc.http2.min_time_between_pings_ms', 10000), + ('grpc.http2.min_ping_interval_without_data_ms', 5000)]) + self.stub = on_device_tests_gateway_pb2_grpc.on_device_tests_gatewayStub( + self.channel) + + def run_trigger_command(self, token: str, labels: List[str], test_requests): + """Calls On-Device Tests service and passing given parameters to it. + + Args: + args (Namespace): Arguments passed in command line. + test_requests (list): A list of test requests. + """ + for response_line in self.stub.exec_command( + on_device_tests_gateway_pb2.OnDeviceTestsCommand( + token=token, + labels=labels, + test_requests=test_requests, + )): + + print(response_line.response) + + def run_watch_command(self, token: str, session_id: str): + """Calls On-Device Tests watch service and passing given parameters to it. + + Args: + args (Namespace): Arguments passed in command line. + """ + for response_line in self.stub.exec_watch_command( + on_device_tests_gateway_pb2.OnDeviceTestsWatchCommand( + token=token, + session_id=session_id, + )): + + print(response_line.response) + + +def _get_gtest_filters(filter_json_dir, target_name): + """Retrieves gtest filters for a given target. + + Args: + filter_json_dir: Directory containing filter JSON files. + target_name: The name of the gtest target. + + Returns: + A string containing the gtest filters. + """ + gtest_filters = '*' + filter_json_file = os.path.join(filter_json_dir, f'{target_name}_filter.json') + print(f' gtest_filter_json_file = {filter_json_file}') + if os.path.exists(filter_json_file): + with open(filter_json_file, 'r', encoding='utf-8') as f: + filter_data = json.load(f) + print(f' Loaded filter data: {filter_data}') + failing_tests = ':'.join(filter_data.get('failing_tests', [])) + if failing_tests: + gtest_filters = '-' + failing_tests + print(f' gtest_filters = {gtest_filters}') + return gtest_filters + + +def _process_test_requests(args): + """Processes test requests from the given arguments. + + Constructs a list of test requests based on the provided arguments, + including test arguments, command arguments, files, parameters, + and device information. + + Args: + args: The parsed command-line arguments. + + Returns: + A list of test request dictionaries. + """ + test_requests = [] + + for gtest_target in args.targets.split(','): + _, target_name = gtest_target.split(':') + print(f' Processing gtest_target: {gtest_target}') + + tests_args = [ + f'job_timeout_secs={args.job_timeout_secs}', + f'test_timeout_secs={args.test_timeout_secs}', + f'start_timeout_secs={args.start_timeout_secs}' + ] + if args.test_attempts: + tests_args.append(f'test_attempts={args.test_attempts}') + if args.dimension: + tests_args += [f'dimension_{dimension}' for dimension in args.dimension] + + gtest_filter = _get_gtest_filters(args.filter_json_dir, target_name) + command_line_args = ' '.join([ + f'--gtest_output=xml:{_DIR_ON_DEVICE}/{target_name}_result.xml', + f'--gtest_filter={gtest_filter}', + ]) + test_cmd_args = [f'command_line_args={command_line_args}'] + + files = [ + f'test_apk={args.gcs_archive_path}/{target_name}-debug.apk', + f'build_apk={args.gcs_archive_path}/{target_name}-debug.apk', + f'test_runtime_deps={args.gcs_archive_path}/{target_name}_deps.tar.gz', + ] + + params = [] + if args.gcs_result_path: + params.append(f'gcs_result_path={args.gcs_result_path}') + params += [ + f'push_files=test_runtime_deps:{_DEPS_ARCHIVE}', + f'gtest_xml_file_on_device={_DIR_ON_DEVICE}/{target_name}_result.xml', + f'gcs_result_filename={target_name}_result.xml', + f'gcs_log_filename={target_name}_log.txt' + ] + + # TODO(oxv): Figure out how to get dimensions from config to here. + device_type = 'sabrina' + device_pool = 'maneki' + + test_requests.append({ + 'test_args': tests_args, + 'test_cmd_args': test_cmd_args, + 'files': files, + 'params': params, + 'device_type': device_type, + 'device_pool': device_pool, + }) + return test_requests + + +def main() -> int: + """Main routine for the on-device tests gateway client.""" + + logging.basicConfig( + level=logging.INFO, format='[%(filename)s:%(lineno)s] %(message)s') + print('Starting main routine') + + parser = argparse.ArgumentParser( + description='Client for interacting with the On-Device Tests gateway.', + epilog=('Example:' + 'python3 -u cobalt/tools/on_device_tests_gateway_client.py' + '--platform_json "${GITHUB_WORKSPACE}/src/.github/config/' + '${{ matrix.platform}}.json"' + '--filter_json_dir "${GITHUB_WORKSPACE}/src/cobalt/testing/' + '${{ matrix.platform}}"' + '--token ${GITHUB_TOKEN}' + '--label builder-${{ matrix.platform }}' + '--label builder_url-${GITHUB_RUN_URL}' + '--label github' + '--label ${GITHUB_EVENT_NAME}' + '--label ${GITHUB_WORKFLOW}' + '--label actor-${GITHUB_ACTOR}' + '--label actor_id-${GITHUB_ACTOR_ID}' + '--label triggering_actor-${GITHUB_TRIGGERING_ACTOR}' + '--label sha-${GITHUB_SHA}' + '--label repository-${GITHUB_REPO}' + '--label author-${GITHUB_PR_HEAD_USER_LOGIN:-' + '$GITHUB_COMMIT_AUTHOR_USERNAME}' + '--label author_id-${GITHUB_PR_HEAD_USER_ID:-' + '$GITHUB_COMMIT_AUTHOR_EMAIL}' + '--dimension host_name=regex:maneki-mhserver-05.*' + '${DIMENSION:+"--dimension" "$DIMENSION"}' + '${ON_DEVICE_TEST_ATTEMPTS:+"--test_attempts" ' + '"$ON_DEVICE_TEST_ATTEMPTS"}' + '--gcs_archive_path "${GCS_ARTIFACTS_PATH}"' + '--gcs_result_path "${GCS_RESULTS_PATH}"' + 'trigger'), + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + # Authentication + parser.add_argument( + '-t', + '--token', + type=str, + required=True, + help='On Device Tests authentication token', + ) + subparsers = parser.add_subparsers( + dest='action', help='On-Device tests commands', required=True) + + # Trigger command + trigger_parser = subparsers.add_parser( + 'trigger', + help='Trigger On-Device tests', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + # Group trigger arguments + trigger_args = trigger_parser.add_argument_group('Trigger Arguments') + trigger_parser.add_argument( + '--targets', + type=str, + required=True, + help='List of targets to test, comma separated. Must be fully qualified ' + 'ninja target.', + ) + trigger_parser.add_argument( + '--filter_json_dir', + type=str, + required=True, + help='Directory containing filter JSON files for test selection.', + ) + trigger_parser.add_argument( + '-l', + '--label', + type=str, + action='append', + help='Additional labels to assign to the test.', + ) + trigger_parser.add_argument( + '--dimension', + type=str, + action='append', + help='On-Device Tests dimension used to select a device. Must have the ' + 'following form: =. E.G. "release_version=regex:10.*"', + ) + trigger_parser.add_argument( + '--test_attempts', + type=str, + default='1', + help='The maximum number of times a test can retry.', + ) + trigger_args.add_argument( + '-a', + '--gcs_archive_path', + type=str, + required=True, + help='Path to Chrobalt archive to be tested. Must be on GCS.', + ) + trigger_parser.add_argument( + '--gcs_result_path', + type=str, + help='GCS URL where test result files should be uploaded.', + ) + trigger_parser.add_argument( + '--job_timeout_secs', + type=str, + default='1800', + help='Timeout in seconds for the job (default: 1800 seconds).', + ) + trigger_parser.add_argument( + '--test_timeout_secs', + type=str, + default='1800', + help='Timeout in seconds for the test (default: 1800 seconds).', + ) + trigger_parser.add_argument( + '--start_timeout_secs', + type=str, + default='180', + help='Timeout in seconds for the test to start (default: 180 seconds).', + ) + + # Watch command + watch_parser = subparsers.add_parser( + 'watch', help='Watch a previously triggered On-Device test') + watch_parser.add_argument( + 'session_id', + type=str, + help=('Session ID of a previously triggered Mobile Harness test. ' + 'The test will be watched until it completes.'), + ) + + args = parser.parse_args() + test_requests = _process_test_requests(args) + + client = OnDeviceTestsGatewayClient() + try: + if args.action == 'trigger': + client.run_trigger_command(args.token, args.label, test_requests) + else: + client.run_watch_command(args.token, args.session_id) + except grpc.RpcError as e: + logging.exception('gRPC error occurred:') # Log the full traceback + return e.code().value # Return the error code + + return 0 # Indicate successful execution + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/cobalt/tools/requirements.in b/cobalt/tools/requirements.in new file mode 100644 index 0000000000000..5692669c6a297 --- /dev/null +++ b/cobalt/tools/requirements.in @@ -0,0 +1,8 @@ +# IMPORTANT: Remember to also update requirements.txt when updating this file: +# $ docker run -it --mount type=bind,source=${COBALT_SRC},target=/code -w /code python:3.8 /bin/bash +# In the container: +# $ pip3 install pip-tools +# $ pip-compile --allow-unsafe --generate-hashes path/to/requirements.in + +grpcio==1.38.0 +grpcio-tools==1.38.0 diff --git a/cobalt/tools/requirements.txt b/cobalt/tools/requirements.txt new file mode 100644 index 0000000000000..740becbd4410f --- /dev/null +++ b/cobalt/tools/requirements.txt @@ -0,0 +1,148 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes cobalt/tools/requirements.in +# +grpcio==1.38.0 \ + --hash=sha256:0247b045feb7b138754c91efcca9ea7f7d2cc9f0bd2cc73f6588c523f38873c3 \ + --hash=sha256:07abf6b36c138bf247cef7ac0bad9f8ab1c8242f7c1302af23bb8e6877d08163 \ + --hash=sha256:0ab78d4c16d7f3924718711689f5e301aec52cfcf38eb4143bed0f74b7c4fd10 \ + --hash=sha256:10a6c62e0cddd456db91f9f04b53a8cccf67d86d7ca814d989423939099c2723 \ + --hash=sha256:1c11041ecb69d962d49e8a38a35736cdc6fc74230867b5f0ac6138770387a950 \ + --hash=sha256:1d157a2ac6632d31a3ff21f56bbe73420e1d7a21e4fe89d8c7ac792b79c1a589 \ + --hash=sha256:1d212af1008bdbfd4b8a287e17a8e63e14d72ac450475307452f20c1bbb6bae4 \ + --hash=sha256:1e88a8d5d961df958362f61b1b79ad3981a8b051f99224717b09308546298902 \ + --hash=sha256:25028420d004fe33d64015f5d4d97207c53530acdb493310e217fac76dcd2513 \ + --hash=sha256:2736b109ec5bd9fcf580bf871b5fd4f136c6ae9728407f344a3c64ad87bb6519 \ + --hash=sha256:277faad1d8d462bd1b986f43a47a2c2fe795b2e0de72c9318e11826d921e665a \ + --hash=sha256:291dcde4139bc25629de6a743cfcc0ca861e289e3547421ecd2273b51d95b8e1 \ + --hash=sha256:2c26cb7566e8942542ff1aee71f10ed35e2f9ee95c2f27179b198af0727fbebb \ + --hash=sha256:32067791bd56a13614399f1994454afea9e2475019fcabc4abd3112f09892005 \ + --hash=sha256:34fb08d46a70750bef6566c9556a16b98e08af6345a3bad6574477eb0b08c3dd \ + --hash=sha256:3cacfee78310b5a89769b2fac20b8cd723470130f5b1ba0c960da8db39e74a97 \ + --hash=sha256:4a1dd16ccf76ddc18c1cde900049c04ed996e6c02e0588d88d06396c398e6023 \ + --hash=sha256:604da72df5bece8844a15990ce0b3f2f8c5243a1333d3dcc02371048bf6f9269 \ + --hash=sha256:6461d69a8ae20e7abce4c6d9cc2603e9f16f4d6b64865eddd0e664127349c04d \ + --hash=sha256:6824567e2372dde1bd70214427d23b709d09f1a02a552133d1e0f504b616c84e \ + --hash=sha256:7466eef3b57b5ac8c7585251b26b917b093ab015750bf98aab4e0836c39e2a2b \ + --hash=sha256:752593a275e26ef250dc4d93a6f7917dd9986396b41eabcc114b0e0315ec1bf6 \ + --hash=sha256:7b74c48b2e41dd506f889a4a9974d40b3eead965b0fd2e0b1d55a9b3c0e3bc6e \ + --hash=sha256:897bcd54890e6ec6359063affb35e19a61a58ba37bc61c9e8ac6b464b854233e \ + --hash=sha256:8c4f39ad529fb4a33cd6e58d1d860c3b583902208547614b4b5b75fc306f13f6 \ + --hash=sha256:924552099365ea1dd32237dc161849452cd567d931efc57e8427260d8f0eacdb \ + --hash=sha256:9a2216df1be9fdbc3c1ebd3c5184d1ef4afb387c29224fce00346b9ddec7c7e3 \ + --hash=sha256:9f1747b81d44daed0649ff10395b58c4f29b03139a628afeb058f3e942ba6893 \ + --hash=sha256:a0d7c88b4cf9748147cd6c16e14569a124b683a3eb5d7787f43eb9d48cf86755 \ + --hash=sha256:a4789014f9d9e9ff29551314a004266b1ac90225c8a009dc87947aaf823fd83c \ + --hash=sha256:a836f21a1d08d28c8344e149b28729649ff4732c318a59a3628451bbd6c3c9ac \ + --hash=sha256:a8f9fcf5623282e4804595166a4ee1401cf4ccfc16fe84bb69e1eb23ffd836ac \ + --hash=sha256:abbf9c8c3df4d5233d5888c6cfa85c1bb68a6923749bd4dd1abc6e1e93986f17 \ + --hash=sha256:ac05434a7a7f444b2ddd109d72f87f4704364be159aea42a04bd6ea2ba6e10e4 \ + --hash=sha256:b4cd8fb4e3725e8852b1da734904edb3579c76660ae26a72283ac580779e5bf0 \ + --hash=sha256:b86a1b0654804b5f2248d9262c77a9d5f19882481fc21df53eb2823d0875b86d \ + --hash=sha256:be83ca2d24321c8bf6516b9cd1064da15ac3ff3978c6c502643be114e2a54af2 \ + --hash=sha256:c47e85eae34af5d17d1c2007a1f0b13a0293d4b7a6d8c8ae23761d718293803e \ + --hash=sha256:cbd2754da81bf5f18454c7808b4afe5b57c6736955a742fb599b32b6353fe99f \ + --hash=sha256:cd220606259f8aa2403bc0f4a4483bae5e36be879364ca3e256f0304ac44f575 \ + --hash=sha256:d3566acd87a65a0bc93875125a7064293ab2b6ffb9327333030939681d269f4f \ + --hash=sha256:d631304e66c908d5d2d1a3cc9c02d372d2f8bed8c3632902d6f3f77d7ca34ac2 \ + --hash=sha256:db01eaea57e7a1898c69271e35a84341cf8150cfdec5f0411eddcfb65b5f590e \ + --hash=sha256:e3072b9ebb573fe1f6757a55b610e4975979d2d58247cbe18ff4385f5aaa81a5 \ + --hash=sha256:e72dd202c982a5922c3b846976cae3b699e3fa8d2355d9d5bad119d066cf23ee \ + --hash=sha256:e83ab148911e6c8ae4ec5e1334e6d800c6b84c432b92eb0ebf0808087117cb39 \ + --hash=sha256:f19bd4b5bcf88ee059f478c4ab46a1607f09835587750294038fbd0120f1a9dc \ + --hash=sha256:f2c4ff0e8c98418c5d55c28ba4ff954e3a5d3c723af5008e8d3ddeae8f0ecb41 \ + --hash=sha256:f6f6d51c9efbfe56af9eb9eeb4881cad1b869e4c0e2a32c1d345897fd0979ee3 \ + --hash=sha256:f8dd51b05e7fde843d7a3140b058f02801fbec5784a036d5f6abb374450d4608 \ + --hash=sha256:f9b3678920017842a1b576de3524ecf8f6a2bf4b39f86fb25b870693141e0584 + # via + # -r cobalt/tools/requirements.in + # grpcio-tools +grpcio-tools==1.38.0 \ + --hash=sha256:041ee3c4c4ec048029db07981c8255bcf26ab3b8436e372e8f7c8b1258d8b44a \ + --hash=sha256:08b824a18d79dca6613ed53c7792ffd87899ea26639b5ce7eb83fc9295a6c7dd \ + --hash=sha256:08f0a888e9d5a2f0a1fa35b052d240a391e832a07be025dced713f92070407bf \ + --hash=sha256:0c5b459cf65be3a11a2222a5a200962fb50475585d31921bd1679c579590e188 \ + --hash=sha256:0eb8408aad583dce6e2d35b078b03d0720227c3d35322f4326936c820d4c0587 \ + --hash=sha256:0efe2ded3ce6301aff1cfb1103f75206f2dffd518f63879fe24a9833cd35ce38 \ + --hash=sha256:17e0c6543b573d66f2b35b7b4b80f201edce6ea8defc2bcdcc8a061609b51456 \ + --hash=sha256:18d7d198a6a43ce2907724c224ed66a1e6660975d4ce8eb5cb0d155cdc5eb78e \ + --hash=sha256:1be89cc16be3984b43e40ea456e9e5428561987e99da2d79c1fc2629e87b6438 \ + --hash=sha256:2cad9aef5edf0b40e16403627ea9ce186facfc4e26936ee467593116d7db15a2 \ + --hash=sha256:405a70b315970523c35bb487b1533a22ff4a126c649f074b4df49cb1e304561d \ + --hash=sha256:410495b05ebc822895078f7aad593009f0f65dec5133eadb18cf3d8d2c226ade \ + --hash=sha256:4bbfb71f37c72d0b1abac210ba5dc5780a34c065e67f2f7a2f1dc84e800b0ca3 \ + --hash=sha256:54ed34d2a45a62b9709615cea4df306128d7f42d95394b9728cc4bb3487a23bd \ + --hash=sha256:556ffa61c4f705e5fd230b629f5bdd3c4d6ae654299baea037d68b341e8d6525 \ + --hash=sha256:56754f7b1a570ef0f49181ce5d0403817dd7d375bbd208f2743a25e9eef361fd \ + --hash=sha256:57798ceae33bcc215aacc2174dfc548904ac7727cb2193d7547aeb1d1149d35c \ + --hash=sha256:58058dddb7d362249bbc3ac6d5c62837b050471c5b524e2b39a839607f4e1f21 \ + --hash=sha256:5a159a359588bb8130871b4bb8d7b89457babc82fc196a0e794c0d630358da7a \ + --hash=sha256:61fd0659fe3a67154906364e8e78837bbda23c3a9d18c11f9bb1e17c0efdf6df \ + --hash=sha256:76f3301f45d7913480a166d472969f8328f37f12f59eb9c9c993905199d4e6a2 \ + --hash=sha256:7a1e2e52a3b6e1c8e1762fc7e270d8d9a08b15201661fde94ba7ad66ca5d6c98 \ + --hash=sha256:7ae7be0f0b5d698859a606118dcd3b74ced56a45f906c99549b31882ecfd2bcd \ + --hash=sha256:8d4a1f889b36148295bb1a86e5423ad69d942230561752fc3ecfcc5c1ad29f86 \ + --hash=sha256:92b85f78efb90d2f78e6e0aa1a380c159eac9b0b88c425b436d7ac744cf6a30d \ + --hash=sha256:9bc75135df871347899cf9f03d6a2dd8bcd69350fd75f02911c88a070ccd8465 \ + --hash=sha256:9cb182b23a2e0a20d3f23cc6f84a692b773e4e24976c78a7d53b537d5f43f34a \ + --hash=sha256:a9a7afbdedee4735dfc1848f24a8c8c421d70f8f84529bbf30f2ffabf4ecf212 \ + --hash=sha256:adaa7dcdcfa168a24237c55444979e8bc2cab5aa20c8c64495ac0d029879feab \ + --hash=sha256:adb0d92ba1031e23feade7189d741d824cbc64dea256ae2d3498e88d4c64b632 \ + --hash=sha256:b4fb8779fca9b80f91ca2cb6060837992edf87d7dd459d570adf8ddc0ca56352 \ + --hash=sha256:b76e8ec283b1ca162f23a9c8cf4a4797bd9904c79523bb28e336b4b112855386 \ + --hash=sha256:b9e4df167e4e9ede2b14c4e9aa0c7f540b96ad80e875cdffa322938515baaf40 \ + --hash=sha256:ba11763a9436093f6855b39cfc2a6b2bc33e4176fe32015bcb3855bb48f2524a \ + --hash=sha256:ba596f23d4659d984be3a8c8b739a49bf79a7d3007b62f724b36272a6d60d292 \ + --hash=sha256:c0dbfa1a9b09743255298c4aaf8872076aafc3c8accc6f1d61a2eb126970cbc7 \ + --hash=sha256:c1d04e2900a5c14051e29479ff161b0b7decaea7e6f4a3e217cbf41edf698856 \ + --hash=sha256:c609678cf6faf4b467259757f848de4fbc7baca3c1e3f7b0e55c701c50fd94ae \ + --hash=sha256:c7941214cdb6b2863d4fab467aa48fdffc7d2d0ae2bd9036cdd134572cf1abfa \ + --hash=sha256:cabf621285501d64b5b8e202f290a6bd260081bbcce0457ddd60351ad11c3408 \ + --hash=sha256:cacdd2b7f41026c89321373a4b235243d1613b92a750fdc1ba7f48edfe809b76 \ + --hash=sha256:cf35e7d64a2e2faba0c041f2b4f715c138952b9f2fb5bb1a60a46d558f4113d4 \ + --hash=sha256:d6146277dbd6b8da6ce8518496d064f1de5d92210fbb0efae0bcf32fa193d987 \ + --hash=sha256:d8b25bfcde255bb391488cf463bb2dd64fa5d01f4650463640f780817a5e43b5 \ + --hash=sha256:dd0dafe045cdbff9bc32ae833853aa480ee0458c79bb7f10d2d6302b33914cd8 \ + --hash=sha256:ddcc0371875cb6c2728b4faff1a0b32b2294c004828d813a9cc69cb2b69c517e \ + --hash=sha256:e27b5b69b5756c6966606b30f6f5380a91050fe43048ceb3934c136ffe622e5e \ + --hash=sha256:e375a117fb5272e09faf138f08540d6d51e8d90e6983008dadd70affd195b30c \ + --hash=sha256:e59cd98e3d33a3a9561a032d885ba7871bf7403afdd05dede360f74c58db19ca \ + --hash=sha256:ec7917310360b8ddecb737d62af4810c44d9b674c3e38ef507670ef933ebd4b4 \ + --hash=sha256:fa1098f49ccab3f06ee9533a9caa82ba02fe90d8461b6c35b983173ad3dcd6fa + # via -r cobalt/tools/requirements.in +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee + # via grpcio-tools +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via grpcio + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.3.0 \ + --hash=sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd \ + --hash=sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686 + # via grpcio-tools diff --git a/docker-compose.yaml b/docker-compose.yaml index 656d567a5ec33..1bd9a0de3d133 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -4,8 +4,8 @@ services: build: context: cobalt/docker/linux cache_from: - - ghcr.io/youtube/cobalt/linux:latest - image: ghcr.io/youtube/cobalt/linux:latest + - ghcr.io/youtube/cobalt_sandbox/linux:latest + image: ghcr.io/youtube/cobalt_sandbox/linux:latest platform: linux/amd64 environment: - DEPOT_TOOLS_UPDATE=0