diff --git a/.github/actions/artifact_failure/action.yml b/.github/actions/artifact_failure/action.yml index d83c76229..071fe0917 100644 --- a/.github/actions/artifact_failure/action.yml +++ b/.github/actions/artifact_failure/action.yml @@ -16,7 +16,8 @@ runs: lsof +D `pwd` || true killall sccache || true killall sccache-dist || true - + # possible temp dirs for either linux or windows + cp "${TMP:-${TEMP:-${TMPDIR:-/tmp}}}"/sccache_*.txt . 2>/dev/null || true tar --exclude='target' \ --exclude='docs' \ --exclude='bins' \ @@ -25,6 +26,4 @@ runs: - uses: actions/upload-artifact@v3 with: name: ${{ inputs.name }} - path: | - target/failure-${{ inputs.name }}.tar.gz - /tmp/sccache_*.txt + path: target/failure-${{ inputs.name }}.tar.gz diff --git a/.github/actions/nvcc-toolchain/action.yml b/.github/actions/nvcc-toolchain/action.yml new file mode 100644 index 000000000..9154b9419 --- /dev/null +++ b/.github/actions/nvcc-toolchain/action.yml @@ -0,0 +1,16 @@ +name: nvcc-toolchain +inputs: + cuda-version: + description: CUDA Toolkit version + required: true + +runs: + using: composite + steps: + - if: runner.os == 'Linux' + shell: bash + run: .github/actions/nvcc-toolchain/install-cuda.sh ${{ inputs.cuda-version }} + + - if: runner.os == 'Windows' + shell: powershell + run: .\.github\actions\nvcc-toolchain\install-cuda.ps1 -cudaVersion ${{ inputs.cuda-version }} diff --git a/.github/actions/nvcc-toolchain/install-cuda.ps1 b/.github/actions/nvcc-toolchain/install-cuda.ps1 new file mode 100644 index 000000000..f98b2b40d --- /dev/null +++ b/.github/actions/nvcc-toolchain/install-cuda.ps1 @@ -0,0 +1,60 @@ +Param( + [Parameter(Mandatory=$false)] + [string] + $cudaVersion="12.6.0" +) + +# Use System.Version to tokenize version +$version = [Version]$cudaVersion + +$major = $version.Major +$minor = $version.Minor +$build = $version.Build + +# Minimum build is 0, not -1 as default in case "12.5" is passed +if ($build -lt 0) { + $build = 0 +} + +# mmb == major minor build +$mmbVersionTag = "${major}.${minor}.${build}" +# mm = major minor +$mmVersionTag = "${major}.${minor}" + +$cudaVersionUrl = "https://developer.download.nvidia.com/compute/cuda/${mmbVersionTag}/network_installers/cuda_${mmbVersionTag}_windows_network.exe" + +### +# `cuda_${mmbVersionTag}_windows_network.exe` name only valid back to CUDA v11.5.1. +# Before that it was named `cuda_${mmbVersionTag}_win10_network.exe`: +# * https://developer.download.nvidia.com/compute/cuda/11.5.1/network_installers/cuda_11.5.1_windows_network.exe +# * https://developer.download.nvidia.com/compute/cuda/11.5.0/network_installers/cuda_11.5.0_win10_network.exe +### + +if ([version]$mmbVersionTag -le "11.5.0") { + $cudaVersionUrl = "https://developer.download.nvidia.com/compute/cuda/${mmbVersionTag}/network_installers/cuda_${mmbVersionTag}_win10_network.exe" +} + +$cudaComponents = + "nvcc_$mmVersionTag", + "curand_$mmVersionTag", + "curand_dev_$mmVersionTag", + "cudart_$mmVersionTag", + "cupti_$mmVersionTag", + "nvrtc_$mmVersionTag", + "nvrtc_dev_$mmVersionTag", + "nvml_dev_$mmVersionTag", + "nvtx_$mmVersionTag" + +Invoke-WebRequest -Uri "$cudaVersionUrl" -OutFile "./cuda_network.exe" -UseBasicParsing +Start-Process -Wait -PassThru -FilePath .\cuda_network.exe -ArgumentList "-s $cudaComponents" + +$ENV:PATH="$ENV:PATH;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v$mmVersionTag\bin" +$ENV:CUDA_PATH="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v$mmVersionTag" + +$PATH_STR="PATH=$ENV:PATH" +$PATH_STR | Out-File -Append $ENV:GITHUB_ENV + +$CUDA_PATH_STR="CUDA_PATH=$ENV:CUDA_PATH" +$CUDA_PATH_STR | Out-File -Append $ENV:GITHUB_ENV + +Remove-Item .\cuda_network.exe diff --git a/.github/actions/nvcc-toolchain/install-cuda.sh b/.github/actions/nvcc-toolchain/install-cuda.sh new file mode 100755 index 000000000..8a9e76177 --- /dev/null +++ b/.github/actions/nvcc-toolchain/install-cuda.sh @@ -0,0 +1,72 @@ +#! /usr/bin/env bash +set -eu + +export DEBIAN_FRONTEND=noninteractive + +get_cuda_deb() { + local deb="$( \ + wget --no-hsts -q -O- "${1}/Packages" \ + | grep -P "^Filename: \./${2}(.*)\.deb$" \ + | sort -Vr | head -n1 | cut -d' ' -f2 \ + )"; + if [ -z "$deb" ]; then + echo "Error: No matching .deb found for '${1}' and '${2}'" >&2 + return 1 + fi + wget --no-hsts -q -O "/tmp/${deb#./}" "${1}/${deb#./}"; + echo -n "/tmp/${deb#./}"; +} + +VERSION="$1"; + +NVARCH="$(uname -p)"; + +if test "$NVARCH" = aarch64; then + NVARCH="sbsa"; +fi + +OSNAME="$( + . /etc/os-release; + major="$(cut -d'.' -f1 <<< "${VERSION_ID}")"; + minor="$(cut -d'.' -f2 <<< "${VERSION_ID}")"; + echo "$ID$((major - (major % 2)))${minor}"; +)"; + +CUDA_HOME="/usr/local/cuda"; + +cuda_repo_base="https://developer.download.nvidia.com/compute/cuda/repos"; +cuda_repo="${cuda_repo_base}/${OSNAME}/${NVARCH}"; + +cuda_ver="$VERSION"; +cuda_ver="$(grep -Po '^[0-9]+\.[0-9]+' <<< "${cuda_ver}")"; +cuda_ver="${cuda_ver/./-}"; + +if ! dpkg -s cuda-keyring; then + sudo apt-get install -y --no-install-recommends \ + "$(get_cuda_deb "${cuda_repo}" cuda-keyring)" \ + ; +fi + +PKGS=(); +PKGS+=("cuda-toolkit-${cuda_ver}"); + +sudo apt-get update; +sudo apt-get install -y --no-install-recommends "${PKGS[@]}"; + +if ! test -L "${CUDA_HOME}"; then + # Create /usr/local/cuda symlink + sudo ln -s "${CUDA_HOME}-${cuda_ver}" "${CUDA_HOME}"; +fi + +export PATH="$PATH:$CUDA_HOME/bin" + +which -a nvcc +nvcc --version + +cat </dev/null 2>&1; then + sudo apt remove -y gcc-14 g++-14 + sudo apt autoremove -y + fi + # Ubuntu20.04's clang-10 is too old for CTK 11+, so install clang-12 instead + if test "${{ matrix.os }}" = "ubuntu-20.04" && test -n "${{ matrix.cuda }}"; then + sudo apt install -y --no-install-recommends gcc clang-12 + sudo ln -sf $(which clang-12) /usr/bin/clang + sudo ln -sf $(which clang++-12) /usr/bin/clang++ + else + sudo apt install -y --no-install-recommends gcc clang + fi + echo 'gcc version:' + gcc --version + echo 'clang version:' + clang --version + + - if: matrix.cuda != '' && contains(fromJSON('["Linux", "Windows"]'), runner.os) + name: Install nvcc + uses: ./.github/actions/nvcc-toolchain + with: + cuda-version: ${{ matrix.cuda }} - name: Build tests run: cargo test --no-run --locked --all-targets ${{ matrix.extra_args }} @@ -160,6 +222,9 @@ jobs: - os: windows-2019 target: x86_64-pc-windows-msvc rustflags: -Ctarget-feature=+crt-static + - os: windows-2019 + target: aarch64-pc-windows-msvc + rustflags: -Ctarget-feature=+crt-static steps: - name: Clone repository uses: actions/checkout@v4 @@ -203,7 +268,8 @@ jobs: fail-fast: false matrix: include: - - os: ubuntu-20.04 + - os: ubuntu-22.04 + cuda: "11.8" rustc: nightly allow_failure: true extra_args: --features=unstable @@ -228,6 +294,12 @@ jobs: run: sudo apt-get install -y clang gcc if: ${{ matrix.os == 'ubuntu-20.04' }} + - if: matrix.cuda != '' && contains(fromJSON('["Linux", "Windows"]'), runner.os) + name: Install nvcc + uses: ./.github/actions/nvcc-toolchain + with: + cuda-version: ${{ matrix.cuda }} + - name: "`grcov` ~ install" run: cargo install grcov @@ -236,7 +308,7 @@ jobs: env: CARGO_INCREMENTAL: "0" RUSTC_WRAPPER: "" - RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off" + RUSTFLAGS: "-Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Cprofile-generate=target/debug" - name: Generate coverage data (via `grcov`) id: coverage @@ -255,7 +327,7 @@ jobs: echo "report=${COVERAGE_REPORT_FILE}" >> $GITHUB_OUTPUT - name: Upload coverage results (to Codecov.io) - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: file: ${{ steps.coverage.outputs.report }} ## flags: IntegrationTests, UnitTests, ${{ steps.vars.outputs.CODECOV_FLAGS }} @@ -264,7 +336,7 @@ jobs: fail_ci_if_error: false test_freebsd: - name: test freebsd-13.2 rust stable + name: test freebsd-14.1 rust stable runs-on: ${{ matrix.job.os }} timeout-minutes: 70 strategy: @@ -275,7 +347,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Prepare, build and test - uses: vmactions/freebsd-vm@v1.0.7 + uses: vmactions/freebsd-vm@v1 with: mem: 8192 usesh: true @@ -323,7 +395,7 @@ jobs: if: failure() uses: ./.github/actions/artifact_failure with: - name: test-freebsd-13.2-stable + name: test-freebsd-14.1-stable release: name: release diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 08fc28938..7399e08cb 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,5 +1,5 @@ name: integration-tests -on: [push, pull_request] +on: [ push, pull_request ] env: RUST_BACKTRACE: full @@ -594,6 +594,7 @@ jobs: env: LLVM_VERSION: "16" SCCACHE_GHA_ENABLED: "on" + SCCACHE_SERVER_UDS: "\\x00sccache.socket" steps: - uses: actions/checkout@v4 @@ -715,6 +716,7 @@ jobs: env: SCCACHE_GHA_ENABLED: "on" + SCCACHE_SERVER_UDS: "/home/runner/sccache.socket" steps: - uses: actions/checkout@v4 @@ -867,17 +869,30 @@ jobs: ${SCCACHE_PATH} --show-stats | grep -e "Cache hits\s*[1-9]" - rust-test-coverage: + # The test cargo "cargo build -Zprofile" + rust-test-Z-profile: runs-on: ubuntu-latest needs: build env: RUSTC_WRAPPER: /home/runner/.cargo/bin/sccache CARGO_INCREMENTAL: "0" - RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" + RUSTFLAGS: "-Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort -Zprofile" RUSTDOCFLAGS: "-Cpanic=abort" + # The last nightly rust that still support "-Zprofile" + # + # See https://github.com/rust-lang/rust/pull/131829 + RUST_TEST_TOOLCHAIN: nightly-2024-11-01 steps: + - name: Clone repository + uses: actions/checkout@v4 + + - name: Install rust + uses: ./.github/actions/rust-toolchain + with: + toolchain: ${{ env.RUST_TEST_TOOLCHAIN }} + - uses: actions/download-artifact@v4 with: name: integration-tests @@ -885,27 +900,20 @@ jobs: - name: Chmod for binary run: chmod +x ${SCCACHE_PATH} - - name: Prepare - run: | - rustup toolchain install nightly - cargo new coverage-test - cd coverage-test - echo "serde = { version = \"1.0\", features = [\"derive\"] }" >> Cargo.toml - - name: "Coverage test #1" - working-directory: ./coverage-test - run: cargo clean && cargo +nightly test + run: cargo +${{ env.RUST_TEST_TOOLCHAIN }} clean && cargo +${{ env.RUST_TEST_TOOLCHAIN }} build - name: Output - run: ${SCCACHE_PATH} --show-stats + run: | + ${SCCACHE_PATH} --show-stats - name: "Coverage test #2" - working-directory: ./coverage-test - run: cargo clean && cargo +nightly test + run: cargo +${{ env.RUST_TEST_TOOLCHAIN }} clean && cargo +${{ env.RUST_TEST_TOOLCHAIN }} build - name: Output run: | ${SCCACHE_PATH} --show-stats + ${SCCACHE_PATH} --show-stats | grep -e "Cache hits\s*[1-9]" zstd-compression-level: @@ -958,3 +966,44 @@ jobs: cargo clean && cargo build - name: lv10-stats-use run: ${SCCACHE_PATH} --show-stats + + xcode: + runs-on: macos-latest + env: + SCCACHE_PATH: target/debug/sccache + steps: + - name: Clone repository + uses: actions/checkout@v4 + + - name: Install rust + uses: ./.github/actions/rust-toolchain + with: + toolchain: "stable" + + - name: Build sccache + run: | + cargo build + + - name: Start server + run: ${SCCACHE_PATH} --start-server + + - name: Test compile xcode + working-directory: tests/xcode + run: | + xcodebuild -version + xcodebuild -xcconfig sccache.xcconfig + + - name: Output + run: | + ${SCCACHE_PATH} --show-stats + + - name: Test compile xcode cached + working-directory: tests/xcode + run: | + xcodebuild clean + xcodebuild -xcconfig sccache.xcconfig + + - name: Output + run: | + ${SCCACHE_PATH} --show-stats + ${SCCACHE_PATH} --show-stats | grep -e "Cache hits\s*[1-9]" diff --git a/Cargo.lock b/Cargo.lock index fc255b955..8a79ec0ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -117,9 +117,9 @@ checksum = "d67af77d68a931ecd5cbd8a3b5987d63a1d1d1278f7f6a60ae33db485cdebb69" [[package]] name = "arc-swap" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayref" @@ -156,9 +156,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -179,13 +179,12 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backon" -version = "0.4.4" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d67782c3f868daa71d3533538e98a8e13713231969def7536e8039606fc46bf0" +checksum = "e4fa97bb310c33c811334143cf64c5bb2b7b3c06e453db6b095d7061eff8f113" dependencies = [ "fastrand", - "futures-core", - "pin-project", + "gloo-timers", "tokio", ] @@ -343,9 +342,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.83" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" dependencies = [ "jobserver", "libc", @@ -949,6 +948,18 @@ version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "gzp" version = "0.11.3" @@ -1016,9 +1027,9 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -1146,7 +1157,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", "tower-service", "tracing", @@ -1183,11 +1194,11 @@ dependencies = [ "http 1.1.0", "hyper 1.1.0", "hyper-util", - "rustls 0.23.10", - "rustls-native-certs 0.7.0", + "rustls", + "rustls-native-certs", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls", "tower-service", "webpki-roots", ] @@ -1234,7 +1245,7 @@ dependencies = [ "http-body 1.0.0", "hyper 1.1.0", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", "tower", "tower-service", @@ -1328,9 +1339,9 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] @@ -1426,9 +1437,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "match_cfg" @@ -1488,13 +1499,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.11" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ + "hermit-abi", "libc", "wasi", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1575,11 +1587,10 @@ checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -1609,11 +1620,10 @@ checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] @@ -1630,9 +1640,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -1682,9 +1692,9 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opendal" -version = "0.47.3" +version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac4826fe3d5482a49b92955b0f6b06ce45b46ec84484176588209bfbf996870" +checksum = "213222b6c86949314d8f51acb26d8241e7c8dd0879b016a79471d49f21ee592f" dependencies = [ "anyhow", "async-trait", @@ -1702,7 +1712,7 @@ dependencies = [ "md-5", "once_cell", "percent-encoding", - "quick-xml", + "quick-xml 0.36.1", "redis", "reqsign", "reqwest 0.12.5", @@ -1843,18 +1853,18 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", @@ -1976,9 +1986,19 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quick-xml" -version = "0.31.0" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86e446ed58cef1bbfe847bc2fda0e2e4ea9f0e57b90c507d4781292590d72a4e" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quick-xml" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc" dependencies = [ "memchr", "serde", @@ -1994,8 +2014,8 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash", - "rustls 0.23.10", + "rustc-hash 1.1.0", + "rustls", "thiserror", "tokio", "tracing", @@ -2003,15 +2023,15 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.3" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" dependencies = [ "bytes", "rand", "ring", - "rustc-hash", - "rustls 0.23.10", + "rustc-hash 2.0.0", + "rustls", "slab", "thiserror", "tinyvec", @@ -2026,7 +2046,7 @@ checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" dependencies = [ "libc", "once_cell", - "socket2 0.5.5", + "socket2", "tracing", "windows-sys 0.52.0", ] @@ -2072,9 +2092,9 @@ dependencies = [ [[package]] name = "redis" -version = "0.23.3" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" +checksum = "81cccf17a692ce51b86564334614d72dcae1def0fd5ecebc9f02956da74352b5" dependencies = [ "arc-swap", "async-trait", @@ -2085,17 +2105,20 @@ dependencies = [ "futures-util", "itoa", "log", + "num-bigint", "percent-encoding", "pin-project-lite", "rand", - "rustls 0.21.11", - "rustls-native-certs 0.6.3", + "rustls", + "rustls-native-certs", + "rustls-pemfile", + "rustls-pki-types", "ryu", "sha1_smol", - "socket2 0.4.10", + "socket2", "tokio", - "tokio-retry", - "tokio-rustls 0.24.1", + "tokio-retry2", + "tokio-rustls", "tokio-util", "url", ] @@ -2151,9 +2174,9 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "reqsign" -version = "0.15.2" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70fe66d4cd0b5ed9b1abbfe639bf6baeaaf509f7da2d51b31111ba945be59286" +checksum = "03dd4ba7c3901dd43e6b8c7446a760d45bc1ea4301002e1a6fa48f97c3a796fa" dependencies = [ "anyhow", "async-trait", @@ -2169,7 +2192,7 @@ dependencies = [ "log", "once_cell", "percent-encoding", - "quick-xml", + "quick-xml 0.35.0", "rand", "reqwest 0.12.5", "rsa", @@ -2248,9 +2271,9 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.10", - "rustls-native-certs 0.7.0", - "rustls-pemfile 2.1.2", + "rustls", + "rustls-native-certs", + "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", @@ -2259,7 +2282,7 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.0", + "tokio-rustls", "tokio-util", "tower-service", "url", @@ -2360,6 +2383,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc_version" version = "0.4.0" @@ -2382,18 +2411,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "rustls" -version = "0.21.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.7", - "sct", -] - [[package]] name = "rustls" version = "0.23.10" @@ -2403,23 +2420,11 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.102.4", + "rustls-webpki", "subtle", "zeroize", ] -[[package]] -name = "rustls-native-certs" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" -dependencies = [ - "openssl-probe", - "rustls-pemfile 1.0.4", - "schannel", - "security-framework", -] - [[package]] name = "rustls-native-certs" version = "0.7.0" @@ -2427,21 +2432,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.2", + "rustls-pemfile", "rustls-pki-types", "schannel", "security-framework", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - [[package]] name = "rustls-pemfile" version = "2.1.2" @@ -2458,16 +2454,6 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "rustls-webpki" version = "0.102.4" @@ -2531,7 +2517,7 @@ dependencies = [ [[package]] name = "sccache" -version = "0.8.1" +version = "0.9.1" dependencies = [ "anyhow", "ar", @@ -2558,7 +2544,6 @@ dependencies = [ "http-body-util", "hyper 1.1.0", "hyper-util", - "is-terminal", "itertools", "jobserver", "jsonwebtoken", @@ -2588,6 +2573,7 @@ dependencies = [ "serde_json", "serial_test", "sha2", + "shlex", "strip-ansi-escapes", "syslog", "tar", @@ -2605,7 +2591,7 @@ dependencies = [ "version-compare", "walkdir", "which", - "winapi", + "windows-sys 0.52.0", "zip", "zstd", ] @@ -2636,16 +2622,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "sdd" version = "0.2.0" @@ -2798,6 +2774,12 @@ dependencies = [ "digest", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -2844,16 +2826,6 @@ version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.5" @@ -3216,28 +3188,27 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.37.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", @@ -3255,33 +3226,23 @@ dependencies = [ ] [[package]] -name = "tokio-retry" -version = "0.3.0" +name = "tokio-retry2" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +checksum = "903934dba1c4c2f2e9cb460ef10b5695e0b0ecad3bf9ee7c8675e540c5e8b2d1" dependencies = [ "pin-project", "rand", "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.11", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.10", + "rustls", "rustls-pki-types", "tokio", ] @@ -3690,15 +3651,14 @@ dependencies = [ [[package]] name = "which" -version = "6.0.0" +version = "6.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c" +checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" dependencies = [ "either", "home", - "once_cell", "rustix", - "windows-sys 0.52.0", + "winsafe", ] [[package]] @@ -3719,11 +3679,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -3902,6 +3862,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + [[package]] name = "xattr" version = "1.2.0" diff --git a/Cargo.toml b/Cargo.toml index 47f1090ab..d29cdbbb1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,7 @@ edition = "2021" name = "sccache" rust-version = "1.75.0" -version = "0.8.1" +version = "0.9.1" categories = ["command-line-utilities", "development-tools::build-utils"] description = "Sccache is a ccache-like tool. It is used as a compiler wrapper and avoids compilation when possible. Sccache has the capability to utilize caching in remote storage environments, including various cloud storage options, or alternatively, in local storage." @@ -53,22 +53,24 @@ hyper-util = { version = "0.1.3", optional = true, features = [ "tokio", "server", ] } -is-terminal = "0.4.12" +itertools = "0.12" jobserver = "0.1" jwt = { package = "jsonwebtoken", version = "9", optional = true } libc = "0.2.153" linked-hash-map = "0.5" log = "0.4" memchr = "2" +memmap2 = "0.9.4" mime = "0.3" num_cpus = "1.16" number_prefix = "0.4" +object = "0.32" once_cell = "1.19" -opendal = { version = "0.47.3", optional = true, default-features = false } +opendal = { version = "0.50.1", optional = true, default-features = false } openssl = { version = "0.10.64", optional = true } rand = "0.8.4" regex = "1.10.3" -reqsign = { version = "0.15.2", optional = true } +reqsign = { version = "0.16.0", optional = true } reqwest = { version = "0.12", features = [ "json", "blocking", @@ -82,6 +84,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = { version = "0.10.8", optional = true } +shlex = "1.3.0" strip-ansi-escapes = "0.2" tar = "0.4.40" tempfile = "3" @@ -106,7 +109,6 @@ zip = { version = "0.6", default-features = false } zstd = "0.13" # dist-server only -memmap2 = "0.9.4" nix = { version = "0.28.0", optional = true, features = [ "mount", "user", @@ -114,7 +116,6 @@ nix = { version = "0.28.0", optional = true, features = [ "signal", "process", ] } -object = "0.32" rouille = { version = "3.6", optional = true, default-features = false, features = [ "ssl", ] } @@ -140,16 +141,15 @@ daemonize = "0.5" optional = true version = "0.1.15" -[target.'cfg(windows)'.dependencies.winapi] +[target.'cfg(windows)'.dependencies.windows-sys] features = [ - "fileapi", - "handleapi", - "stringapiset", - "winnls", - "processenv", - "std", + "Win32_Foundation", + "Win32_Globalization", + "Win32_Storage_FileSystem", + "Win32_System_Threading", + "Win32_System_Console", ] -version = "0.3" +version = "0.52" [features] all = [ @@ -163,16 +163,16 @@ all = [ "webdav", "oss", ] -azure = ["opendal/services-azblob", "reqsign"] +azure = ["opendal/services-azblob", "reqsign", "reqwest"] default = ["all"] -gcs = ["opendal/services-gcs", "reqsign", "url", "reqwest/blocking"] -gha = ["opendal/services-ghac"] +gcs = ["opendal/services-gcs", "reqsign", "url", "reqwest"] +gha = ["opendal/services-ghac", "reqwest"] memcached = ["opendal/services-memcached"] native-zlib = [] -oss = ["opendal/services-oss", "reqsign"] +oss = ["opendal/services-oss", "reqsign", "reqwest"] redis = ["url", "opendal/services-redis"] s3 = ["opendal/services-s3", "reqsign", "reqwest"] -webdav = ["opendal/services-webdav"] +webdav = ["opendal/services-webdav", "reqwest"] # Enable features that will build a vendored version of openssl and # statically linked with it, instead of linking against the system-wide openssl # dynamically or statically. diff --git a/README.md b/README.md index 8fa5a6e6c..c582dc725 100644 --- a/README.md +++ b/README.md @@ -116,6 +116,13 @@ If you don't [specify otherwise](#storage-options), sccache will use a local dis sccache works using a client-server model, where the server runs locally on the same machine as the client. The client-server model allows the server to be more efficient by keeping some state in memory. The sccache command will spawn a server process if one is not already running, or you can run `sccache --start-server` to start the background server process without performing any compilation. +By default sccache server will listen on `127.0.0.1:4226`, you can specify environment variable `SCCACHE_SERVER_PORT` to use a different port or `SCCACHE_SERVER_UDS` to listen on unix domain socket. Abstract unix socket is also supported as long as the path is escaped following the [format](https://doc.rust-lang.org/std/ascii/fn.escape_default.html). For example: + +``` +% env SCCACHE_SERVER_UDS=$HOME/sccache.sock sccache --start-server # unix socket +% env SCCACHE_SERVER_UDS=\\x00sccache.sock sccache --start-server # abstract unix socket +``` + You can run `sccache --stop-server` to terminate the server. It will also terminate after (by default) 10 minutes of inactivity. Running `sccache --show-stats` will print a summary of cache statistics. @@ -284,6 +291,10 @@ Known Caveats * Symbolic links to sccache won't work. Use hardlinks: `ln sccache /usr/local/bin/cc` +### User Agent + +* Requests sent to your storage option of choice will have a user agent header indicating the current sccache version, e.g. `sccache/0.8.2`. + Storage Options --------------- diff --git a/docs/Caching.md b/docs/Caching.md index 31a0f4c71..619605c30 100644 --- a/docs/Caching.md +++ b/docs/Caching.md @@ -18,7 +18,8 @@ In parallel, we also take into account in the hash: * digests of all the shared libraries in rustc's $sysroot/lib * A shared, caching reader for rlib dependencies (for dist-client) * Parsed arguments from the rustc invocation - See https://github.com/mozilla/sccache/blob/8567bbe2ba493153e76177c1f9a6f98cc7ba419f/src/compiler/rust.rs#L122 for the full list + +See https://github.com/mozilla/sccache/blob/8567bbe2ba493153e76177c1f9a6f98cc7ba419f/src/compiler/rust.rs#L122 for the full list ### C/C++ compiler @@ -42,6 +43,7 @@ We also take into account in the hash: * Whether the compilation is generating profiling or coverage data * Color mode * Environment variables + See https://github.com/mozilla/sccache/blob/8567bbe2ba493153e76177c1f9a6f98cc7ba419f/src/compiler/c.rs#L84 ### C/C++ preprocessor diff --git a/docs/DistributedFreeBSD.md b/docs/DistributedFreeBSD.md index adc696558..d23614100 100644 --- a/docs/DistributedFreeBSD.md +++ b/docs/DistributedFreeBSD.md @@ -25,7 +25,7 @@ It's up to the user to create the reference pot that serves as a template to clone from when instantiating image and build containers, e.g.: ```sh -pot create -p sccache-template -N alias -i "lo0|127.0.0.2" -t single -b 13.2 +pot create -p sccache-template -N alias -i "lo0|127.0.0.2" -t single -b 14.1 pot set-cmd -p sccache-template -c /usr/bin/true pot set-attr -p sccache-template -A no-rc-script -V YES pot snapshot -p sccache-template diff --git a/docs/GHA.md b/docs/GHA.md index 6b62e88a5..c92c627ad 100644 --- a/docs/GHA.md +++ b/docs/GHA.md @@ -8,7 +8,7 @@ This cache type will need tokens like `ACTIONS_CACHE_URL` and `ACTIONS_RUNTIME_T ```yaml - name: Configure sccache - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: script: | core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || ''); diff --git a/docs/Rust.md b/docs/Rust.md index 9b805d7b1..ef266e726 100644 --- a/docs/Rust.md +++ b/docs/Rust.md @@ -7,6 +7,5 @@ sccache includes support for caching Rust compilation. This includes many caveat * Compilation from stdin is not supported, a source file must be provided. * Values from `env!` require Rust >= 1.46 to be tracked in caching. * Procedural macros that read files from the filesystem may not be cached properly -* Target specs aren't hashed (e.g. custom target specs) If you are using Rust 1.18 or later, you can ask cargo to wrap all compilation with sccache by setting `RUSTC_WRAPPER=sccache` in your build environment. diff --git a/docs/Webdav.md b/docs/Webdav.md index 526777c5f..22767dc59 100644 --- a/docs/Webdav.md +++ b/docs/Webdav.md @@ -1,17 +1,18 @@ # WebDAV -Set `SCCACHE_WEBDAV_ENDPOINT` to a webdav service endpoint to store cache in a webdav service. Set `SCCACHE_WEBDAV_KEY_PREFIX` to specify the key prefix of cache. - -The webdav cache is compatible with: +Users can configure sccache to cache incremental build artifacts in a remote WebDAV service. +The following services all expose a WebDAV interface and can be used as a backend: - [Ccache HTTP storage backend](https://ccache.dev/manual/4.7.4.html#_http_storage_backend) - [Bazel Remote Caching](https://bazel.build/remote/caching). - [Gradle Build Cache](https://docs.gradle.org/current/userguide/build_cache.html) -Users can set `SCCACHE_WEBDAV_ENDPOINT` to those services directly. +Set `SCCACHE_WEBDAV_ENDPOINT` to an appropriate webdav service endpoint to enable remote caching. +Set `SCCACHE_WEBDAV_KEY_PREFIX` to specify the key prefix of cache. ## Credentials Sccache is able to load credentials from the following sources: + - Set `SCCACHE_WEBDAV_USERNAME`/`SCCACHE_WEBDAV_PASSWORD` to specify the username/password pair for basic authentication. - Set `SCCACHE_WEBDAV_TOKEN` to specify the token value for bearer token authentication. diff --git a/docs/Xcode.md b/docs/Xcode.md new file mode 100644 index 000000000..92de2dac8 --- /dev/null +++ b/docs/Xcode.md @@ -0,0 +1,98 @@ +# Using `sccache` with Xcode + +It is possible to use `sccache` with Xcode with some setup. + +### Running the daemon +Before building, you need to run the daemon outside of Xcode. This needs to be done because if `sccache` invocation happens to implicitly start the server daemon, the Xcode build will hang on the `sccache` invocation, waiting for the process to idle timeout. + +You can do this in another terminal windows by calling +```sh +SCCACHE_LOG=info SCCACHE_START_SERVER=1 SCCACHE_NO_DAEMON=1 sccache +``` + +Or by setting it up in a `launchd` configuration, perhaps as `~/Library/LaunchAgents/sccache.plist` (note the paths in the plist): +```xml + + + + + Label + sccache.server + ProgramArguments + + /path/to/sccache + + EnvironmentVariables + + SCCACHE_START_SERVER + 1 + SCCACHE_NO_DAEMON + 1 + SCCACHE_IDLE_TIMEOUT + 0 + SCCACHE_LOG + info + + + StandardOutPath + /tmp/sccache.log + StandardErrorPath + /tmp/sccache.log + + + +``` + +### Setting it up for `xcodebuild` + +Xcode seems to support barely documented `C_COMPILER_LAUNCHER` attribute, for +having a custom launcher program. + +Then you can invoke `xcodebuild` like so +```sh +xcodebuild C_COMPILER_LAUNCHER=sccache + CLANG_ENABLE_MODULES=NO + COMPILER_INDEX_STORE_ENABLE=NO + CLANG_USE_RESPONSE_FILE=NO +``` +Where the additional arguments are for disabling some features that `sccache` can't cache currently. + +These build settings can also be put in a xcconfig file, like `sccache.xcconfig` +``` +C_COMPILER_LAUNCHER=sccache +CLANG_ENABLE_MODULES=NO +COMPILER_INDEX_STORE_ENABLE=NO +CLANG_USE_RESPONSE_FILE=NO +``` +Which can then be invoked with +```sh +xcodebuild -xcconfig sccache.xcconfig +``` + + +### Setting it up for `cmake` Xcode generator +While `cmake` has the convenient `CMAKE__COMPILER_LAUNCHER` for prepending tools like `sccache`, it is not supported for the Xcode generator. + +But you can configuring it directly with something like +```cmake + +# This bit before the first `project()`, as the COMPILER_LAUNCHER variables are read in then +if(DEFINED CCACHE) + find_program(CCACHE_EXE ${CCACHE} REQUIRED) + if(NOT CMAKE_GENERATOR STREQUAL "Xcode") + # Support for other generators should work with these + set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_EXE}") + set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_EXE}") + else() + # And this should work for Xcode generator + set(CMAKE_XCODE_ATTRIBUTE_C_COMPILER_LAUNCHER ${CCACHE_EXE}) + set(CMAKE_XCODE_ATTRIBUTE_CLANG_ENABLE_MODULES "NO") + set(CMAKE_XCODE_ATTRIBUTE_COMPILER_INDEX_STORE_ENABLE "NO") + set(CMAKE_XCODE_ATTRIBUTE_CLANG_USE_RESPONSE_FILE "NO") + endif() +endif() +``` +Then configuring with `-DCCACHE=sccache` should work on all generators. + + + diff --git a/scripts/freebsd-ci-test.sh b/scripts/freebsd-ci-test.sh index 716a606ff..a670c95df 100755 --- a/scripts/freebsd-ci-test.sh +++ b/scripts/freebsd-ci-test.sh @@ -24,8 +24,8 @@ # $HOME/.potcache # # mkdir $HOME/.potcache -# fetch -o $HOME/.potcache/13.2-RELEASE_base.txz \ -# https://ftp.freebsd.org/pub/FreeBSD/releases/amd64/13.2-RELEASE/base.txz +# fetch -o $HOME/.potcache/14.1-RELEASE_base.txz \ +# https://ftp.freebsd.org/pub/FreeBSD/releases/amd64/14.1-RELEASE/base.txz # # This script can be run from a github action. When run locally, make # sure to install the required packages: diff --git a/src/bin/sccache-dist/build.rs b/src/bin/sccache-dist/build.rs index 815674661..33f9f6d3c 100644 --- a/src/bin/sccache-dist/build.rs +++ b/src/bin/sccache-dist/build.rs @@ -231,7 +231,7 @@ impl OverlayBuilder { for (tc, _) in entries { warn!("Removing old un-compressed toolchain: {:?}", tc); assert!(toolchain_dir_map.remove(tc).is_some()); - fs::remove_dir_all(&self.dir.join("toolchains").join(&tc.archive_id)) + fs::remove_dir_all(self.dir.join("toolchains").join(&tc.archive_id)) .context("Failed to remove old toolchain directory")?; } } diff --git a/src/cache/azure.rs b/src/cache/azure.rs index 6faca3db1..9b8003225 100644 --- a/src/cache/azure.rs +++ b/src/cache/azure.rs @@ -20,13 +20,16 @@ use opendal::services::Azblob; use crate::errors::*; +use super::http_client::set_user_agent; + pub struct AzureBlobCache; impl AzureBlobCache { pub fn build(connection_string: &str, container: &str, key_prefix: &str) -> Result { - let mut builder = Azblob::from_connection_string(connection_string)?; - builder.container(container); - builder.root(key_prefix); + let builder = Azblob::from_connection_string(connection_string)? + .container(container) + .root(key_prefix) + .http_client(set_user_agent()); let op = Operator::new(builder)? .layer(LoggingLayer::default()) diff --git a/src/cache/cache.rs b/src/cache/cache.rs index 6c3887847..7072d4e9e 100644 --- a/src/cache/cache.rs +++ b/src/cache/cache.rs @@ -44,6 +44,7 @@ use crate::config::Config; use crate::config::{self, CacheType}; use async_trait::async_trait; use fs_err as fs; + use serde::{Deserialize, Serialize}; use std::fmt; use std::io::{self, Cursor, Read, Seek, Write}; @@ -101,6 +102,8 @@ pub enum Cache { Hit(CacheRead), /// Result was not found in cache. Miss, + /// Do not cache the results of the compilation. + None, /// Cache entry should be ignored, force compilation. Recache, } @@ -110,6 +113,7 @@ impl fmt::Debug for Cache { match *self { Cache::Hit(_) => write!(f, "Cache::Hit(...)"), Cache::Miss => write!(f, "Cache::Miss"), + Cache::None => write!(f, "Cache::None"), Cache::Recache => write!(f, "Cache::Recache"), } } diff --git a/src/cache/gcs.rs b/src/cache/gcs.rs index 79fbbd4ce..6adcc570a 100644 --- a/src/cache/gcs.rs +++ b/src/cache/gcs.rs @@ -22,6 +22,8 @@ use reqwest::Client; use serde::Deserialize; use url::Url; +use super::http_client::set_user_agent; + fn rw_to_scope(mode: CacheMode) -> &'static str { match mode { CacheMode::ReadOnly => "https://www.googleapis.com/auth/devstorage.read_only", @@ -42,24 +44,25 @@ impl GCSCache { rw_mode: CacheMode, credential_url: Option<&str>, ) -> Result { - let mut builder = Gcs::default(); - builder.bucket(bucket); - builder.root(key_prefix); - builder.scope(rw_to_scope(rw_mode)); + let mut builder = Gcs::default() + .bucket(bucket) + .root(key_prefix) + .scope(rw_to_scope(rw_mode)) + .http_client(set_user_agent()); if let Some(service_account) = service_account { - builder.service_account(service_account); + builder = builder.service_account(service_account); } if let Some(path) = cred_path { - builder.credential_path(path); + builder = builder.credential_path(path); } if let Some(cred_url) = credential_url { let _ = Url::parse(cred_url) .map_err(|err| anyhow!("gcs credential url is invalid: {err:?}"))?; - builder.customed_token_loader(Box::new(TaskClusterTokenLoader { + builder = builder.customized_token_loader(Box::new(TaskClusterTokenLoader { scope: rw_to_scope(rw_mode).to_string(), url: cred_url.to_string(), })); diff --git a/src/cache/gha.rs b/src/cache/gha.rs index 8d8373430..1053b1c60 100644 --- a/src/cache/gha.rs +++ b/src/cache/gha.rs @@ -19,24 +19,27 @@ use opendal::Operator; use crate::errors::*; use crate::VERSION; +use super::http_client::set_user_agent; + /// A cache that stores entries in GHA Cache Services. pub struct GHACache; impl GHACache { pub fn build(version: &str) -> Result { - let mut builder = Ghac::default(); - // This is the prefix of gha cache. - // From user side, cache key will be like `sccache/f/c/b/fcbxxx` - // - // User customization is theoretically supported, but I decided - // to see the community feedback first. - builder.root("/sccache"); + let mut builder = Ghac::default() + // This is the prefix of gha cache. + // From user side, cache key will be like `sccache/f/c/b/fcbxxx` + // + // User customization is theoretically supported, but I decided + // to see the community feedback first. + .root("/sccache") + .http_client(set_user_agent()); - if version.is_empty() { - builder.version(&format!("sccache-v{VERSION}")); + builder = if version.is_empty() { + builder.version(&format!("sccache-v{VERSION}")) } else { - builder.version(&format!("sccache-v{VERSION}-{version}")); - } + builder.version(&format!("sccache-v{VERSION}-{version}")) + }; let op = Operator::new(builder)? .layer(LoggingLayer::default()) diff --git a/src/cache/http_client.rs b/src/cache/http_client.rs new file mode 100644 index 000000000..35b689f50 --- /dev/null +++ b/src/cache/http_client.rs @@ -0,0 +1,9 @@ +use opendal::raw::HttpClient; +use reqwest::ClientBuilder; + +/// Set the user agent (helps with monitoring on the server side) +pub fn set_user_agent() -> HttpClient { + let user_agent = format!("{}/{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); + let client = ClientBuilder::new().user_agent(user_agent).build().unwrap(); + HttpClient::with(client) +} diff --git a/src/cache/memcached.rs b/src/cache/memcached.rs index b5735fc90..f01ca48bc 100644 --- a/src/cache/memcached.rs +++ b/src/cache/memcached.rs @@ -32,18 +32,18 @@ impl MemcachedCache { key_prefix: &str, expiration: u32, ) -> Result { - let mut builder = Memcached::default(); - builder.endpoint(url); + let mut builder = Memcached::default().endpoint(url); if let Some(username) = username { - builder.username(username); + builder = builder.username(username); } if let Some(password) = password { - builder.password(password); + builder = builder.password(password); } - builder.root(key_prefix); - builder.default_ttl(Duration::from_secs(expiration.into())); + builder = builder + .root(key_prefix) + .default_ttl(Duration::from_secs(expiration.into())); let op = Operator::new(builder)? .layer(LoggingLayer::default()) diff --git a/src/cache/mod.rs b/src/cache/mod.rs index 8ed327fb1..a55c0c9ba 100644 --- a/src/cache/mod.rs +++ b/src/cache/mod.rs @@ -33,4 +33,14 @@ pub mod s3; #[cfg(feature = "webdav")] pub mod webdav; +#[cfg(any( + feature = "azure", + feature = "gcs", + feature = "gha", + feature = "s3", + feature = "webdav", + feature = "oss" +))] +pub(crate) mod http_client; + pub use crate::cache::cache::*; diff --git a/src/cache/oss.rs b/src/cache/oss.rs index e98e8ccc2..7b4f16945 100644 --- a/src/cache/oss.rs +++ b/src/cache/oss.rs @@ -16,6 +16,8 @@ use opendal::Operator; use crate::errors::*; +use super::http_client::set_user_agent; + pub struct OSSCache; // Implement the Object Storage Service for Alibaba cloud @@ -26,18 +28,19 @@ impl OSSCache { endpoint: Option<&str>, no_credentials: bool, ) -> Result { - let mut builder = Oss::default(); - builder.bucket(bucket); - builder.root(key_prefix); + let mut builder = Oss::default() + .bucket(bucket) + .root(key_prefix) + .http_client(set_user_agent()); if let Some(endpoint) = endpoint { - builder.endpoint(endpoint); + builder = builder.endpoint(endpoint); } if no_credentials { // Allow anonymous access to OSS so that OpenDAL will not // throw error when no credentials are provided. - builder.allow_anonymous(); + builder = builder.allow_anonymous(); } let op = Operator::new(builder)? diff --git a/src/cache/redis.rs b/src/cache/redis.rs index 23c60b638..215bdc7ab 100644 --- a/src/cache/redis.rs +++ b/src/cache/redis.rs @@ -29,20 +29,20 @@ impl RedisCache { pub fn build_from_url(url: &str, key_prefix: &str, ttl: u64) -> Result { let parsed = Url::parse(url)?; - let mut builder = Redis::default(); - builder.endpoint(parsed.as_str()); - builder.username(parsed.username()); - builder.password(parsed.password().unwrap_or_default()); - builder.root(key_prefix); + let mut builder = Redis::default() + .endpoint(parsed.as_str()) + .username(parsed.username()) + .password(parsed.password().unwrap_or_default()) + .root(key_prefix); if ttl != 0 { - builder.default_ttl(Duration::from_secs(ttl)); + builder = builder.default_ttl(Duration::from_secs(ttl)); } let options: HashMap<_, _> = parsed .query_pairs() .map(|(k, v)| (k.to_string(), v.to_string())) .collect(); - builder.db(options + builder = builder.db(options .get("db") .map(|v| v.parse().unwrap_or_default()) .unwrap_or_default()); @@ -62,8 +62,7 @@ impl RedisCache { key_prefix: &str, ttl: u64, ) -> Result { - let mut builder = Redis::default(); - builder.endpoint(endpoint); + let builder = Redis::default().endpoint(endpoint); Self::build_common(builder, username, password, db, key_prefix, ttl) } @@ -77,8 +76,7 @@ impl RedisCache { key_prefix: &str, ttl: u64, ) -> Result { - let mut builder = Redis::default(); - builder.cluster_endpoints(endpoints); + let builder = Redis::default().cluster_endpoints(endpoints); Self::build_common(builder, username, password, db, key_prefix, ttl) } @@ -91,13 +89,14 @@ impl RedisCache { key_prefix: &str, ttl: u64, ) -> Result { - builder.username(username.unwrap_or_default()); - builder.password(password.unwrap_or_default()); - builder.root(key_prefix); + builder = builder + .username(username.unwrap_or_default()) + .password(password.unwrap_or_default()) + .db(db.into()) + .root(key_prefix); if ttl != 0 { - builder.default_ttl(Duration::from_secs(ttl)); + builder = builder.default_ttl(Duration::from_secs(ttl)); } - builder.db(db.into()); let op = Operator::new(builder)? .layer(LoggingLayer::default()) diff --git a/src/cache/s3.rs b/src/cache/s3.rs index f721177a8..1c7f1ddd9 100644 --- a/src/cache/s3.rs +++ b/src/cache/s3.rs @@ -11,13 +11,13 @@ // limitations under the License. use opendal::layers::LoggingLayer; -use opendal::raw::HttpClient; use opendal::services::S3; use opendal::Operator; -use reqwest::ClientBuilder; use crate::errors::*; +use super::http_client::set_user_agent; + pub struct S3Cache; impl S3Cache { @@ -33,10 +33,10 @@ impl S3Cache { access_key_id: Option<&str>, secret_access_key: Option<&str>, ) -> Result { - let mut builder = S3::default(); - builder.http_client(set_user_agent()); - builder.bucket(bucket); - builder.root(key_prefix); + let mut builder = S3::default() + .http_client(set_user_agent()) + .bucket(bucket) + .root(key_prefix); match (access_key_id, secret_access_key) { (Some(access_key_id), Some(secret_access_key)) => { @@ -50,27 +50,28 @@ impl S3Cache { } if let Some(region) = region { - builder.region(region); + builder = builder.region(region); } if no_credentials { - builder.disable_config_load(); - // Disable EC2 metadata to avoid OpenDAL trying to load - // credentials from EC2 metadata. - // - // A.k.a, don't try to visit `http://169.254.169.254` - builder.disable_ec2_metadata(); - // Allow anonymous access to S3 so that OpenDAL will not - // throw error when no credentials are provided. - builder.allow_anonymous(); + builder = builder + .disable_config_load() + // Disable EC2 metadata to avoid OpenDAL trying to load + // credentials from EC2 metadata. + // + // A.k.a, don't try to visit `http://169.254.169.254` + .disable_ec2_metadata() + // Allow anonymous access to S3 so that OpenDAL will not + // throw error when no credentials are provided. + .allow_anonymous(); } if let Some(endpoint) = endpoint { - builder.endpoint(&endpoint_resolver(endpoint, use_ssl)?); + builder = builder.endpoint(&endpoint_resolver(endpoint, use_ssl)?); } if server_side_encryption.unwrap_or_default() { - builder.server_side_encryption_with_s3_key(); + builder = builder.server_side_encryption_with_s3_key(); } let op = Operator::new(builder)? @@ -80,13 +81,6 @@ impl S3Cache { } } -/// Set the user agent (helps with monitoring on the server side) -fn set_user_agent() -> HttpClient { - let user_agent = format!("{}/{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); - let client_builder = ClientBuilder::new().user_agent(user_agent); - HttpClient::build(client_builder).unwrap() -} - /// Resolve given endpoint along with use_ssl settings. fn endpoint_resolver(endpoint: &str, use_ssl: Option) -> Result { let endpoint_uri: http::Uri = endpoint diff --git a/src/cache/webdav.rs b/src/cache/webdav.rs index f9c0ea56f..1abf211bd 100644 --- a/src/cache/webdav.rs +++ b/src/cache/webdav.rs @@ -15,6 +15,8 @@ use opendal::layers::LoggingLayer; use opendal::services::Webdav; use opendal::Operator; +use super::http_client::set_user_agent; + /// A cache that stores entries in a Webdav. pub struct WebdavCache; @@ -27,18 +29,13 @@ impl WebdavCache { password: Option<&str>, token: Option<&str>, ) -> Result { - let mut builder = Webdav::default(); - builder.endpoint(endpoint); - builder.root(key_prefix); - if let Some(username) = username { - builder.username(username); - } - if let Some(password) = password { - builder.password(password); - } - if let Some(token) = token { - builder.token(token); - } + let builder = Webdav::default() + .endpoint(endpoint) + .root(key_prefix) + .username(username.unwrap_or_default()) + .password(password.unwrap_or_default()) + .token(token.unwrap_or_default()) + .http_client(set_user_agent()); let op = Operator::new(builder)? .layer(LoggingLayer::default()) diff --git a/src/client.rs b/src/client.rs index 51a6a1fe3..f13ec2be7 100644 --- a/src/client.rs +++ b/src/client.rs @@ -13,28 +13,28 @@ // limitations under the License. use crate::errors::*; +use crate::net::Connection; use crate::protocol::{Request, Response}; use crate::util; use byteorder::{BigEndian, ByteOrder}; use retry::{delay::Fixed, retry}; use std::io::{self, BufReader, BufWriter, Read}; -use std::net::TcpStream; /// A connection to an sccache server. pub struct ServerConnection { /// A reader for the socket connected to the server. - reader: BufReader, + reader: BufReader>, /// A writer for the socket connected to the server. - writer: BufWriter, + writer: BufWriter>, } impl ServerConnection { /// Create a new connection using `stream`. - pub fn new(stream: TcpStream) -> io::Result { - let writer = stream.try_clone()?; + pub fn new(conn: Box) -> io::Result { + let write_conn = conn.try_clone()?; Ok(ServerConnection { - reader: BufReader::new(stream), - writer: BufWriter::new(writer), + reader: BufReader::new(conn), + writer: BufWriter::new(write_conn), }) } @@ -62,24 +62,24 @@ impl ServerConnection { } } -/// Establish a TCP connection to an sccache server listening on `port`. -pub fn connect_to_server(port: u16) -> io::Result { - trace!("connect_to_server({})", port); - let stream = TcpStream::connect(("127.0.0.1", port))?; - ServerConnection::new(stream) +/// Establish a TCP connection to an sccache server listening on `addr`. +pub fn connect_to_server(addr: &crate::net::SocketAddr) -> io::Result { + trace!("connect_to_server({addr})"); + let conn = crate::net::connect(addr)?; + ServerConnection::new(conn) } -/// Attempt to establish a TCP connection to an sccache server listening on `port`. +/// Attempt to establish a TCP connection to an sccache server listening on `addr`. /// /// If the connection fails, retry a few times. -pub fn connect_with_retry(port: u16) -> io::Result { - trace!("connect_with_retry({})", port); +pub fn connect_with_retry(addr: &crate::net::SocketAddr) -> io::Result { + trace!("connect_with_retry({addr})"); // TODOs: // * Pass the server Child in here, so we can stop retrying // if the process exited. // * Send a pipe handle to the server process so it can notify // us once it starts the server instead of us polling. - match retry(Fixed::from_millis(500).take(10), || connect_to_server(port)) { + match retry(Fixed::from_millis(500).take(10), || connect_to_server(addr)) { Ok(conn) => Ok(conn), Err(e) => Err(io::Error::new( io::ErrorKind::TimedOut, diff --git a/src/commands.rs b/src/commands.rs index ab740582e..4de22e758 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -25,11 +25,10 @@ use crate::util::daemonize; use byteorder::{BigEndian, ByteOrder}; use fs::{File, OpenOptions}; use fs_err as fs; -use is_terminal::IsTerminal; use log::Level::Trace; use std::env; use std::ffi::{OsStr, OsString}; -use std::io::{self, Write}; +use std::io::{self, IsTerminal, Write}; #[cfg(unix)] use std::os::unix::process::ExitStatusExt; use std::path::Path; @@ -50,11 +49,18 @@ pub const DEFAULT_PORT: u16 = 4226; const SERVER_STARTUP_TIMEOUT: Duration = Duration::from_millis(10000); /// Get the port on which the server should listen. -fn get_port() -> u16 { - env::var("SCCACHE_SERVER_PORT") +fn get_addr() -> crate::net::SocketAddr { + #[cfg(unix)] + if let Ok(addr) = env::var("SCCACHE_SERVER_UDS") { + if let Ok(uds) = crate::net::SocketAddr::parse_uds(&addr) { + return uds; + } + } + let port = env::var("SCCACHE_SERVER_PORT") .ok() .and_then(|s| s.parse().ok()) - .unwrap_or(DEFAULT_PORT) + .unwrap_or(DEFAULT_PORT); + crate::net::SocketAddr::with_port(port) } /// Check if ignoring all response errors @@ -133,11 +139,10 @@ fn redirect_stderr(f: File) { #[cfg(windows)] fn redirect_stderr(f: File) { use std::os::windows::io::IntoRawHandle; - use winapi::um::processenv::SetStdHandle; - use winapi::um::winbase::STD_ERROR_HANDLE; + use windows_sys::Win32::System::Console::{SetStdHandle, STD_ERROR_HANDLE}; // Ignore errors here. unsafe { - SetStdHandle(STD_ERROR_HANDLE, f.into_raw_handle()); + SetStdHandle(STD_ERROR_HANDLE, f.into_raw_handle() as _); } } @@ -176,11 +181,10 @@ fn run_server_process(startup_timeout: Option) -> Result) -> Result() as DWORD; + si.cb = mem::size_of::() as _; if unsafe { CreateProcessW( exe.as_mut_ptr(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), - FALSE, + 0, CREATE_UNICODE_ENVIRONMENT | CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW, - envp.as_mut_ptr() as LPVOID, + envp.as_mut_ptr().cast(), workdir.as_ptr(), - &mut si, + &si, &mut pi, - ) == TRUE + ) != 0 } { unsafe { CloseHandle(pi.hProcess); @@ -295,28 +299,27 @@ fn run_server_process(startup_timeout: Option) -> Result, ) -> Result { - trace!("connect_or_start_server({})", port); - match connect_to_server(port) { + trace!("connect_or_start_server({addr})"); + match connect_to_server(addr) { Ok(server) => Ok(server), Err(ref e) - if e.kind() == io::ErrorKind::ConnectionRefused - || e.kind() == io::ErrorKind::TimedOut => + if (e.kind() == io::ErrorKind::ConnectionRefused + || e.kind() == io::ErrorKind::TimedOut) + || (e.kind() == io::ErrorKind::NotFound && addr.is_unix_path()) => { // If the connection was refused we probably need to start // the server. match run_server_process(startup_timeout)? { - ServerStartup::Ok { port: actualport } => { - if port != actualport { + ServerStartup::Ok { addr: actual_addr } => { + if addr.to_string() != actual_addr { // bail as the next connect_with_retry will fail bail!( - "sccache: Listening on port {} instead of {}", - actualport, - port + "sccache: Listening on address {actual_addr} instead of {addr}" ); } } @@ -326,7 +329,7 @@ fn connect_or_start_server( ServerStartup::TimedOut => bail!("Timed out waiting for server startup. Maybe the remote service is unreachable?\nRun with SCCACHE_LOG=debug SCCACHE_NO_DAEMON=1 to get more information"), ServerStartup::Err { reason } => bail!("Server startup failed: {}\nRun with SCCACHE_LOG=debug SCCACHE_NO_DAEMON=1 to get more information", reason), } - let server = connect_with_retry(port)?; + let server = connect_with_retry(addr)?; Ok(server) } Err(e) => Err(e.into()), @@ -616,7 +619,7 @@ pub fn run_command(cmd: Command) -> Result { match cmd { Command::ShowStats(fmt, advanced) => { trace!("Command::ShowStats({:?})", fmt); - let stats = match connect_to_server(get_port()) { + let stats = match connect_to_server(&get_addr()) { Ok(srv) => request_stats(srv).context("failed to get stats from server")?, // If there is no server, spawning a new server would start with zero stats // anyways, so we can just return (mostly) empty stats directly. @@ -660,7 +663,7 @@ pub fn run_command(cmd: Command) -> Result { // We aren't asking for a log file daemonize()?; } - server::start_server(config, get_port())?; + server::start_server(config, &get_addr())?; } Command::StartServer => { trace!("Command::StartServer"); @@ -668,10 +671,8 @@ pub fn run_command(cmd: Command) -> Result { let startup = run_server_process(startup_timeout).context("failed to start server process")?; match startup { - ServerStartup::Ok { port } => { - if port != DEFAULT_PORT { - println!("sccache: Listening on port {}", port); - } + ServerStartup::Ok { addr } => { + println!("sccache: Listening on address {addr}"); } ServerStartup::TimedOut => bail!("Timed out waiting for server startup"), ServerStartup::AddrInUse => bail!("Server startup failed: Address in use"), @@ -681,13 +682,13 @@ pub fn run_command(cmd: Command) -> Result { Command::StopServer => { trace!("Command::StopServer"); println!("Stopping sccache server..."); - let server = connect_to_server(get_port()).context("couldn't connect to server")?; + let server = connect_to_server(&get_addr()).context("couldn't connect to server")?; let stats = request_shutdown(server)?; stats.print(false); } Command::ZeroStats => { trace!("Command::ZeroStats"); - let conn = connect_or_start_server(get_port(), startup_timeout)?; + let conn = connect_or_start_server(&get_addr(), startup_timeout)?; request_zero_stats(conn).context("couldn't zero stats on server")?; eprintln!("Statistics zeroed."); } @@ -749,7 +750,7 @@ pub fn run_command(cmd: Command) -> Result { ), Command::DistStatus => { trace!("Command::DistStatus"); - let srv = connect_or_start_server(get_port(), startup_timeout)?; + let srv = connect_or_start_server(&get_addr(), startup_timeout)?; let status = request_dist_status(srv).context("failed to get dist-status from server")?; serde_json::to_writer(&mut io::stdout(), &status)?; @@ -760,7 +761,7 @@ pub fn run_command(cmd: Command) -> Result { trace!("Command::PackageToolchain({})", executable.display()); let runtime = Runtime::new()?; - let jobserver = unsafe { Client::new() }; + let jobserver = Client::new(); let creator = ProcessCommandCreator::new(&jobserver); let args: Vec<_> = env::args_os().collect(); let env: Vec<_> = env::vars_os().collect(); @@ -786,8 +787,8 @@ pub fn run_command(cmd: Command) -> Result { env_vars, } => { trace!("Command::Compile {{ {:?}, {:?}, {:?} }}", exe, cmdline, cwd); - let jobserver = unsafe { Client::new() }; - let conn = connect_or_start_server(get_port(), startup_timeout)?; + let jobserver = Client::new(); + let conn = connect_or_start_server(&get_addr(), startup_timeout)?; let mut runtime = Runtime::new()?; let res = do_compile( ProcessCommandCreator::new(&jobserver), diff --git a/src/compiler/c.rs b/src/compiler/c.rs index 2badc6f71..c4f1cfc85 100644 --- a/src/compiler/c.rs +++ b/src/compiler/c.rs @@ -105,6 +105,8 @@ pub struct ParsedArguments { pub arch_args: Vec, /// Commandline arguments for the preprocessor or the compiler that don't affect the computed hash. pub unhashed_args: Vec, + /// Extra unhashed files that need to be sent along with dist compiles. + pub extra_dist_files: Vec, /// Extra files that need to have their contents hashed. pub extra_hash_files: Vec, /// Whether or not the `-showIncludes` argument is passed on MSVC @@ -151,8 +153,12 @@ pub enum CCompilerKind { Diab, /// Microsoft Visual C++ Msvc, - /// NVIDIA cuda compiler + /// NVIDIA CUDA compiler Nvcc, + /// NVIDIA CUDA optimizer and PTX generator + Cicc, + /// NVIDIA CUDA PTX assembler + Ptxas, /// NVIDIA hpc c, c++ compiler Nvhpc, /// Tasking VX @@ -173,6 +179,7 @@ pub trait CCompilerImpl: Clone + fmt::Debug + Send + Sync + 'static { &self, arguments: &[OsString], cwd: &Path, + env_vars: &[(OsString, OsString)], ) -> CompilerArguments; /// Run the C preprocessor with the specified set of arguments. #[allow(clippy::too_many_arguments)] @@ -191,7 +198,7 @@ pub trait CCompilerImpl: Clone + fmt::Debug + Send + Sync + 'static { T: CommandCreatorSync; /// Generate a command that can be used to invoke the C compiler to perform /// the compilation. - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -199,7 +206,13 @@ pub trait CCompilerImpl: Clone + fmt::Debug + Send + Sync + 'static { cwd: &Path, env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)>; + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync; } impl CCompiler @@ -296,7 +309,7 @@ impl Compiler for CCompiler { cwd: &Path, env_vars: &[(OsString, OsString)], ) -> CompilerArguments + 'static>> { - match self.compiler.parse_arguments(arguments, cwd) { + match self.compiler.parse_arguments(arguments, cwd, env_vars) { CompilerArguments::Ok(mut args) => { // Handle SCCACHE_EXTRAFILES for (k, v) in env_vars.iter() { @@ -356,7 +369,7 @@ where rewrite_includes_only: bool, storage: Arc, cache_control: CacheControl, - ) -> Result { + ) -> Result> { let start_of_compilation = std::time::SystemTime::now(); let CCompilerHasher { parsed_args, @@ -390,11 +403,38 @@ where arg ); } + + let use_preprocessor_cache_mode = { + let can_use_preprocessor_cache_mode = !may_dist + && preprocessor_cache_mode_config.use_preprocessor_cache_mode + && !too_hard_for_preprocessor_cache_mode; + + let mut use_preprocessor_cache_mode = can_use_preprocessor_cache_mode; + + // Allow overrides from the env + for (key, val) in env_vars.iter() { + if key == "SCCACHE_DIRECT" { + if let Some(val) = val.to_str() { + use_preprocessor_cache_mode = match val.to_lowercase().as_str() { + "false" | "off" | "0" => false, + _ => can_use_preprocessor_cache_mode, + }; + } + break; + } + } + + if can_use_preprocessor_cache_mode && !use_preprocessor_cache_mode { + debug!( + "parse_arguments: Disabling preprocessor cache because SCCACHE_DIRECT=false" + ); + } + + use_preprocessor_cache_mode + }; + // Disable preprocessor cache when doing distributed compilation - let mut preprocessor_key = if !may_dist - && preprocessor_cache_mode_config.use_preprocessor_cache_mode - && !too_hard_for_preprocessor_cache_mode - { + let mut preprocessor_key = if use_preprocessor_cache_mode { preprocessor_cache_entry_hash_key( &executable_digest, parsed_args.language, @@ -482,7 +522,7 @@ where &env_vars, may_dist, rewrite_includes_only, - preprocessor_cache_mode_config.use_preprocessor_cache_mode, + use_preprocessor_cache_mode, ) .await; let out_pretty = parsed_args.output_pretty().into_owned(); @@ -1107,12 +1147,16 @@ fn include_is_too_new( false } -impl Compilation for CCompilation { +impl Compilation for CCompilation { fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { + ) -> Result<( + Box>, + Option, + Cacheable, + )> { let CCompilation { ref parsed_args, ref executable, @@ -1121,6 +1165,7 @@ impl Compilation for CCompilation { ref env_vars, .. } = *self; + compiler.generate_compile_commands( path_transformer, executable, @@ -1151,6 +1196,7 @@ impl Compilation for CCompilation { input_path, preprocessed_input, path_transformer, + extra_dist_files: parsed_args.extra_dist_files, extra_hash_files: parsed_args.extra_hash_files, }); let toolchain_packager = Box::new(CToolchainPackager { @@ -1180,6 +1226,7 @@ struct CInputsPackager { input_path: PathBuf, path_transformer: dist::PathTransformer, preprocessed_input: Vec, + extra_dist_files: Vec, extra_hash_files: Vec, } @@ -1190,6 +1237,7 @@ impl pkg::InputsPackager for CInputsPackager { input_path, mut path_transformer, preprocessed_input, + extra_dist_files, extra_hash_files, } = *self; @@ -1207,8 +1255,8 @@ impl pkg::InputsPackager for CInputsPackager { builder.append(&file_header, preprocessed_input.as_slice())?; } - for input_path in extra_hash_files { - let input_path = pkg::simplify_path(&input_path)?; + for input_path in extra_hash_files.iter().chain(extra_dist_files.iter()) { + let input_path = pkg::simplify_path(input_path)?; if !super::CAN_DIST_DYLIBS && input_path @@ -1263,7 +1311,7 @@ impl pkg::ToolchainPackager for CToolchainPackager { // files by path. let named_file = |kind: &str, name: &str| -> Option { let mut output = process::Command::new(&self.executable) - .arg(&format!("-print-{}-name={}", kind, name)) + .arg(format!("-print-{}-name={}", kind, name)) .output() .ok()?; debug!( @@ -1337,6 +1385,10 @@ impl pkg::ToolchainPackager for CToolchainPackager { add_named_file(&mut package_builder, "liblto_plugin.so")?; } + CCompilerKind::Cicc => {} + + CCompilerKind::Ptxas => {} + CCompilerKind::Nvcc => { // Various programs called by the nvcc front end. // presumes the underlying host compiler is consistent @@ -1448,6 +1500,42 @@ mod test { ); } + #[test] + fn test_header_differs() { + let args = ovec!["a", "b", "c"]; + const PREPROCESSED: &[u8] = b"hello world"; + assert_neq!( + hash_key("abcd", Language::C, &args, &[], &[], PREPROCESSED, false), + hash_key( + "abcd", + Language::CHeader, + &args, + &[], + &[], + PREPROCESSED, + false + ) + ); + } + + #[test] + fn test_plusplus_header_differs() { + let args = ovec!["a", "b", "c"]; + const PREPROCESSED: &[u8] = b"hello world"; + assert_neq!( + hash_key("abcd", Language::Cxx, &args, &[], &[], PREPROCESSED, true), + hash_key( + "abcd", + Language::CxxHeader, + &args, + &[], + &[], + PREPROCESSED, + true + ) + ); + } + #[test] fn test_hash_key_executable_contents_differs() { let args = ovec!["a", "b", "c"]; diff --git a/src/compiler/cicc.rs b/src/compiler/cicc.rs new file mode 100644 index 000000000..0e144d7e5 --- /dev/null +++ b/src/compiler/cicc.rs @@ -0,0 +1,347 @@ +// Copyright 2016 Mozilla Foundation +// SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![allow(unused_imports, dead_code, unused_variables)] + +use crate::compiler::args::*; +use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; +use crate::compiler::{ + CCompileCommand, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + SingleCompileCommand, +}; +use crate::{counted_array, dist}; + +use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; + +use async_trait::async_trait; + +use std::collections::HashMap; +use std::ffi::OsString; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process; + +use crate::errors::*; + +/// A unit struct on which to implement `CCompilerImpl`. +#[derive(Clone, Debug)] +pub struct Cicc { + pub version: Option, +} + +#[async_trait] +impl CCompilerImpl for Cicc { + fn kind(&self) -> CCompilerKind { + CCompilerKind::Cicc + } + fn plusplus(&self) -> bool { + true + } + fn version(&self) -> Option { + self.version.clone() + } + fn parse_arguments( + &self, + arguments: &[OsString], + cwd: &Path, + _env_vars: &[(OsString, OsString)], + ) -> CompilerArguments { + parse_arguments(arguments, cwd, Language::Ptx, &ARGS[..]) + } + #[allow(clippy::too_many_arguments)] + async fn preprocess( + &self, + _creator: &T, + _executable: &Path, + parsed_args: &ParsedArguments, + cwd: &Path, + _env_vars: &[(OsString, OsString)], + _may_dist: bool, + _rewrite_includes_only: bool, + _preprocessor_cache_mode: bool, + ) -> Result + where + T: CommandCreatorSync, + { + preprocess(cwd, parsed_args).await + } + fn generate_compile_commands( + &self, + path_transformer: &mut dist::PathTransformer, + executable: &Path, + parsed_args: &ParsedArguments, + cwd: &Path, + env_vars: &[(OsString, OsString)], + _rewrite_includes_only: bool, + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars).map( + |(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }, + ) + } +} + +pub fn parse_arguments( + arguments: &[OsString], + cwd: &Path, + language: Language, + arg_info: S, +) -> CompilerArguments +where + S: SearchableArgInfo, +{ + let mut args = arguments.to_vec(); + let input_loc = arguments.len() - 3; + let input = args.splice(input_loc..input_loc + 1, []).next().unwrap(); + + let mut take_next = false; + let mut outputs = HashMap::new(); + let mut extra_dist_files = vec![]; + let mut gen_module_id_file = false; + let mut module_id_file_name = Option::::None; + + let mut common_args = vec![]; + let mut unhashed_args = vec![]; + + for arg in ArgsIter::new(args.iter().cloned(), arg_info) { + match arg { + Ok(arg) => { + let args = match arg.get_data() { + Some(PassThrough(_)) => { + take_next = false; + &mut common_args + } + Some(Output(o)) => { + take_next = false; + let path = cwd.join(o); + outputs.insert( + "obj", + ArtifactDescriptor { + path, + optional: false, + }, + ); + continue; + } + Some(UnhashedGenModuleIdFileFlag) => { + take_next = false; + gen_module_id_file = true; + &mut unhashed_args + } + Some(UnhashedModuleIdFileName(o)) => { + take_next = false; + module_id_file_name = Some(cwd.join(o)); + &mut unhashed_args + } + Some(UnhashedOutput(o)) => { + take_next = false; + let path = cwd.join(o); + if let Some(flag) = arg.flag_str() { + outputs.insert( + flag, + ArtifactDescriptor { + path, + optional: false, + }, + ); + } + &mut unhashed_args + } + Some(UnhashedFlag) => { + take_next = false; + &mut unhashed_args + } + None => match arg { + Argument::Raw(ref p) => { + if take_next { + take_next = false; + &mut common_args + } else { + continue; + } + } + Argument::UnknownFlag(ref p) => { + let s = p.to_string_lossy(); + take_next = s.starts_with('-'); + &mut common_args + } + _ => unreachable!(), + }, + }; + args.extend(arg.iter_os_strings()); + } + _ => continue, + }; + } + + if let Some(module_id_path) = module_id_file_name { + if gen_module_id_file { + outputs.insert( + "--module_id_file_name", + ArtifactDescriptor { + path: module_id_path, + optional: true, + }, + ); + } else { + extra_dist_files.push(module_id_path); + } + } + + CompilerArguments::Ok(ParsedArguments { + input: input.into(), + outputs, + double_dash_input: false, + language, + compilation_flag: OsString::new(), + depfile: None, + dependency_args: vec![], + preprocessor_args: vec![], + common_args, + arch_args: vec![], + unhashed_args, + extra_dist_files, + extra_hash_files: vec![], + msvc_show_includes: false, + profile_generate: false, + color_mode: ColorMode::Off, + suppress_rewrite_includes_only: false, + too_hard_for_preprocessor_cache_mode: None, + }) +} + +pub async fn preprocess(cwd: &Path, parsed_args: &ParsedArguments) -> Result { + // cicc and ptxas expect input to be an absolute path + let input = if parsed_args.input.is_absolute() { + parsed_args.input.clone() + } else { + cwd.join(&parsed_args.input) + }; + std::fs::read(input) + .map_err(anyhow::Error::new) + .map(|s| process::Output { + status: process::ExitStatus::default(), + stdout: s, + stderr: vec![], + }) +} + +pub fn generate_compile_commands( + path_transformer: &mut dist::PathTransformer, + executable: &Path, + parsed_args: &ParsedArguments, + cwd: &Path, + env_vars: &[(OsString, OsString)], +) -> Result<( + SingleCompileCommand, + Option, + Cacheable, +)> { + // Unused arguments + #[cfg(not(feature = "dist-client"))] + { + let _ = path_transformer; + } + + trace!("compile"); + + let lang_str = &parsed_args.language.as_str(); + let out_file = match parsed_args.outputs.get("obj") { + Some(obj) => &obj.path, + None => return Err(anyhow!("Missing {:?} file output", lang_str)), + }; + + let mut arguments: Vec = vec![]; + arguments.extend_from_slice(&parsed_args.common_args); + arguments.extend_from_slice(&parsed_args.unhashed_args); + arguments.extend(vec![ + (&parsed_args.input).into(), + "-o".into(), + out_file.into(), + ]); + + if log_enabled!(log::Level::Trace) { + trace!( + "[{}]: {} command: {:?}", + out_file.file_name().unwrap().to_string_lossy(), + executable.file_name().unwrap().to_string_lossy(), + [ + &[format!("cd {} &&", cwd.to_string_lossy()).to_string()], + &[executable.to_str().unwrap_or_default().to_string()][..], + &dist::osstrings_to_strings(&arguments).unwrap_or_default()[..] + ] + .concat() + .join(" ") + ); + } + + let command = SingleCompileCommand { + executable: executable.to_owned(), + arguments, + env_vars: env_vars.to_owned(), + cwd: cwd.to_owned(), + }; + + #[cfg(not(feature = "dist-client"))] + let dist_command = None; + #[cfg(feature = "dist-client")] + let dist_command = (|| { + let mut arguments: Vec = vec![]; + arguments.extend(dist::osstrings_to_strings(&parsed_args.common_args)?); + arguments.extend(dist::osstrings_to_strings(&parsed_args.unhashed_args)?); + arguments.extend(vec![ + path_transformer.as_dist(&parsed_args.input)?, + "-o".into(), + path_transformer.as_dist(out_file)?, + ]); + Some(dist::CompileCommand { + executable: path_transformer.as_dist(executable.canonicalize().unwrap().as_path())?, + arguments, + env_vars: dist::osstring_tuples_to_strings(env_vars)?, + cwd: path_transformer.as_dist_abs(cwd)?, + }) + })(); + + Ok((command, dist_command, Cacheable::Yes)) +} + +ArgData! { pub + Output(PathBuf), + PassThrough(OsString), + UnhashedFlag, + UnhashedGenModuleIdFileFlag, + UnhashedModuleIdFileName(PathBuf), + UnhashedOutput(PathBuf), +} + +use self::ArgData::*; + +counted_array!(pub static ARGS: [ArgInfo; _] = [ + take_arg!("--gen_c_file_name", PathBuf, Separated, UnhashedOutput), + take_arg!("--gen_device_file_name", PathBuf, Separated, UnhashedOutput), + flag!("--gen_module_id_file", UnhashedGenModuleIdFileFlag), + take_arg!("--include_file_name", OsString, Separated, PassThrough), + take_arg!("--module_id_file_name", PathBuf, Separated, UnhashedModuleIdFileName), + take_arg!("--stub_file_name", PathBuf, Separated, UnhashedOutput), + take_arg!("-o", PathBuf, Separated, Output), +]); diff --git a/src/compiler/clang.rs b/src/compiler/clang.rs index 370754645..ae5ffd3dd 100644 --- a/src/compiler/clang.rs +++ b/src/compiler/clang.rs @@ -18,7 +18,7 @@ use crate::compiler::args::*; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; use crate::compiler::gcc::ArgData::*; use crate::compiler::{ - gcc, write_temp_file, Cacheable, CompileCommand, CompilerArguments, Language, + gcc, write_temp_file, CCompileCommand, Cacheable, CompileCommand, CompilerArguments, Language, }; use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; use crate::util::{run_input_output, OsStrExt}; @@ -94,6 +94,7 @@ impl CCompilerImpl for Clang { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { gcc::parse_arguments( arguments, @@ -140,11 +141,12 @@ impl CCompilerImpl for Clang { self.kind(), rewrite_includes_only, ignorable_whitespace_flags, + language_to_clang_arg, ) .await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -152,7 +154,14 @@ impl CCompilerImpl for Clang { cwd: &Path, env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { gcc::generate_compile_commands( path_transformer, executable, @@ -161,7 +170,29 @@ impl CCompilerImpl for Clang { env_vars, self.kind(), rewrite_includes_only, + language_to_clang_arg, ) + .map(|(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }) + } +} + +pub fn language_to_clang_arg(lang: Language) -> Option<&'static str> { + match lang { + Language::C => Some("c"), + Language::CHeader => Some("c-header"), + Language::Cxx => Some("c++"), + Language::CxxHeader => Some("c++-header"), + Language::ObjectiveC => Some("objective-c"), + Language::ObjectiveCxx => Some("objective-c++"), + Language::ObjectiveCxxHeader => Some("objective-c++-header"), + Language::Cuda => Some("cuda"), + Language::Ptx => None, + Language::Cubin => None, + Language::Rust => None, // Let the compiler decide + Language::Hip => Some("hip"), + Language::GenericHeader => None, // Let the compiler decide } } @@ -176,12 +207,15 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ take_arg!("-MF", PathBuf, CanBeSeparated, DepArgumentPath), take_arg!("-MQ", OsString, CanBeSeparated, DepTarget), take_arg!("-MT", OsString, CanBeSeparated, DepTarget), + flag!("-Wno-unknown-cuda-version", PassThroughFlag), + flag!("-Wno-unused-parameter", PassThroughFlag), take_arg!("-Xclang", OsString, Separated, XClang), take_arg!("-add-plugin", OsString, Separated, PassThrough), take_arg!("-debug-info-kind", OsString, Concatenated('='), PassThrough), take_arg!("-dependency-file", PathBuf, Separated, DepArgumentPath), flag!("-emit-pch", PassThroughFlag), flag!("-fcolor-diagnostics", DiagnosticsColorFlag), + flag!("-fcuda-allow-variadic-functions", PassThroughFlag), flag!("-fcxx-modules", TooHardFlag), take_arg!("-fdebug-compilation-dir", OsString, Separated, PassThrough), take_arg!("-fembed-offload-object", PathBuf, Concatenated('='), ExtraHashFile), @@ -237,6 +271,8 @@ mod test { use crate::compiler::gcc; use crate::compiler::*; use crate::mock_command::*; + use crate::server; + use crate::test::mock_storage::MockStorage; use crate::test::utils::*; use std::collections::HashMap; use std::future::Future; @@ -249,7 +285,7 @@ mod test { is_appleclang: false, version: None, } - .parse_arguments(&arguments, &std::env::current_dir().unwrap()) + .parse_arguments(&arguments, &std::env::current_dir().unwrap(), &[]) } macro_rules! parses { @@ -998,4 +1034,63 @@ mod test { ]) ); } + + #[test] + fn test_compile_clang_cuda_does_not_dist_compile() { + let creator = new_creator(); + let f = TestFixture::new(); + let parsed_args = ParsedArguments { + input: "foo.cu".into(), + double_dash_input: false, + language: Language::Cuda, + compilation_flag: "-c".into(), + depfile: None, + outputs: vec![( + "obj", + ArtifactDescriptor { + path: "foo.cu.o".into(), + optional: false, + }, + )] + .into_iter() + .collect(), + dependency_args: vec![], + preprocessor_args: vec![], + common_args: vec![], + arch_args: vec![], + unhashed_args: vec![], + extra_dist_files: vec![], + extra_hash_files: vec![], + msvc_show_includes: false, + profile_generate: false, + color_mode: ColorMode::Auto, + suppress_rewrite_includes_only: false, + too_hard_for_preprocessor_cache_mode: None, + }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); + let compiler = &f.bins[0]; + // Compiler invocation. + next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); + let mut path_transformer = dist::PathTransformer::new(); + let (command, dist_command, cacheable) = gcc::generate_compile_commands( + &mut path_transformer, + compiler, + &parsed_args, + f.tempdir.path(), + &[], + CCompilerKind::Clang, + false, + language_to_clang_arg, + ) + .unwrap(); + // ClangCUDA cannot be dist-compiled + assert!(dist_command.is_none()); + let _ = command.execute(&service, &creator).wait(); + assert_eq!(Cacheable::Yes, cacheable); + // Ensure that we ran all processes. + assert_eq!(0, creator.lock().unwrap().children.len()); + } } diff --git a/src/compiler/compiler.rs b/src/compiler/compiler.rs index a37d20775..152f9e972 100644 --- a/src/compiler/compiler.rs +++ b/src/compiler/compiler.rs @@ -16,6 +16,7 @@ use crate::cache::{Cache, CacheWrite, DecompressionFailure, FileObjectSource, Storage}; use crate::compiler::args::*; use crate::compiler::c::{CCompiler, CCompilerKind}; +use crate::compiler::cicc::Cicc; use crate::compiler::clang::Clang; use crate::compiler::diab::Diab; use crate::compiler::gcc::Gcc; @@ -24,6 +25,7 @@ use crate::compiler::msvc::Msvc; use crate::compiler::nvcc::Nvcc; use crate::compiler::nvcc::NvccHostCompiler; use crate::compiler::nvhpc::Nvhpc; +use crate::compiler::ptxas::Ptxas; use crate::compiler::rust::{Rust, RustupProxy}; use crate::compiler::tasking_vx::TaskingVX; #[cfg(feature = "dist-client")] @@ -31,6 +33,7 @@ use crate::dist::pkg; #[cfg(feature = "dist-client")] use crate::lru_disk_cache; use crate::mock_command::{exit_status, CommandChild, CommandCreatorSync, RunCommand}; +use crate::server; use crate::util::{fmt_duration_as_secs, run_input_output}; use crate::{counted_array, dist}; use async_trait::async_trait; @@ -71,24 +74,130 @@ pub const CAN_DIST_DYLIBS: bool = true; ))] pub const CAN_DIST_DYLIBS: bool = false; -#[derive(Clone, Debug)] -pub struct CompileCommand { +#[async_trait] +pub trait CompileCommand: Send + Sync + 'static +where + T: CommandCreatorSync, +{ + async fn execute( + &self, + service: &server::SccacheService, + creator: &T, + ) -> Result; + + fn get_executable(&self) -> PathBuf; + fn get_arguments(&self) -> Vec; + fn get_env_vars(&self) -> Vec<(OsString, OsString)>; + fn get_cwd(&self) -> PathBuf; +} + +#[derive(Debug)] +pub struct CCompileCommand +where + I: CompileCommandImpl, +{ + cmd: I, +} + +impl CCompileCommand +where + I: CompileCommandImpl, +{ + #[allow(clippy::new_ret_no_self)] + pub fn new(cmd: I) -> Box> + where + T: CommandCreatorSync, + { + Box::new(CCompileCommand { cmd }) as Box> + } +} + +#[async_trait] +impl CompileCommand for CCompileCommand +where + T: CommandCreatorSync, + I: CompileCommandImpl, +{ + fn get_executable(&self) -> PathBuf { + self.cmd.get_executable() + } + fn get_arguments(&self) -> Vec { + self.cmd.get_arguments() + } + fn get_env_vars(&self) -> Vec<(OsString, OsString)> { + self.cmd.get_env_vars() + } + fn get_cwd(&self) -> PathBuf { + self.cmd.get_cwd() + } + + async fn execute( + &self, + service: &server::SccacheService, + creator: &T, + ) -> Result { + self.cmd.execute(service, creator).await + } +} + +#[async_trait] +pub trait CompileCommandImpl: Send + Sync + 'static { + fn get_executable(&self) -> PathBuf; + fn get_arguments(&self) -> Vec; + fn get_env_vars(&self) -> Vec<(OsString, OsString)>; + fn get_cwd(&self) -> PathBuf; + + async fn execute( + &self, + service: &server::SccacheService, + creator: &T, + ) -> Result + where + T: CommandCreatorSync; +} + +#[derive(Debug)] +pub struct SingleCompileCommand { pub executable: PathBuf, pub arguments: Vec, pub env_vars: Vec<(OsString, OsString)>, pub cwd: PathBuf, } -impl CompileCommand { - pub async fn execute(self, creator: &T) -> Result +#[async_trait] +impl CompileCommandImpl for SingleCompileCommand { + fn get_executable(&self) -> PathBuf { + self.executable.clone() + } + fn get_arguments(&self) -> Vec { + self.arguments.clone() + } + fn get_env_vars(&self) -> Vec<(OsString, OsString)> { + self.env_vars.clone() + } + fn get_cwd(&self) -> PathBuf { + self.cwd.clone() + } + + async fn execute( + &self, + _: &server::SccacheService, + creator: &T, + ) -> Result where T: CommandCreatorSync, { - let mut cmd = creator.clone().new_command_sync(self.executable); - cmd.args(&self.arguments) + let SingleCompileCommand { + executable, + arguments, + env_vars, + cwd, + } = self; + let mut cmd = creator.clone().new_command_sync(executable); + cmd.args(arguments) .env_clear() - .envs(self.env_vars) - .current_dir(self.cwd); + .envs(env_vars.to_vec()) + .current_dir(cwd); run_input_output(cmd, None).await } } @@ -111,7 +220,10 @@ pub enum Language { CxxHeader, ObjectiveC, ObjectiveCxx, + ObjectiveCxxHeader, Cuda, + Ptx, + Cubin, Rust, Hip, } @@ -134,6 +246,8 @@ impl Language { Some("M") | Some("mm") => Some(Language::ObjectiveCxx), // TODO mii Some("cu") => Some(Language::Cuda), + Some("ptx") => Some(Language::Ptx), + Some("cubin") => Some(Language::Cubin), // TODO cy Some("rs") => Some(Language::Rust), Some("hip") => Some(Language::Hip), @@ -146,12 +260,16 @@ impl Language { pub fn as_str(self) -> &'static str { match self { - Language::C | Language::CHeader => "c", - Language::Cxx | Language::CxxHeader => "c++", + Language::C => "c", + Language::CHeader => "cHeader", + Language::Cxx => "c++", + Language::CxxHeader => "c++Header", Language::GenericHeader => "c/c++", Language::ObjectiveC => "objc", - Language::ObjectiveCxx => "objc++", + Language::ObjectiveCxx | Language::ObjectiveCxxHeader => "objc++", Language::Cuda => "cuda", + Language::Ptx => "ptx", + Language::Cubin => "cubin", Language::Rust => "rust", Language::Hip => "hip", } @@ -167,8 +285,11 @@ impl CompilerKind { | Language::CxxHeader | Language::GenericHeader | Language::ObjectiveC - | Language::ObjectiveCxx => "C/C++", + | Language::ObjectiveCxx + | Language::ObjectiveCxxHeader => "C/C++", Language::Cuda => "CUDA", + Language::Ptx => "PTX", + Language::Cubin => "CUBIN", Language::Rust => "Rust", Language::Hip => "HIP", } @@ -181,8 +302,10 @@ impl CompilerKind { CompilerKind::C(CCompilerKind::Diab) => textual_lang + " [diab]", CompilerKind::C(CCompilerKind::Gcc) => textual_lang + " [gcc]", CompilerKind::C(CCompilerKind::Msvc) => textual_lang + " [msvc]", - CompilerKind::C(CCompilerKind::Nvhpc) => textual_lang + " [nvhpc]", CompilerKind::C(CCompilerKind::Nvcc) => textual_lang + " [nvcc]", + CompilerKind::C(CCompilerKind::Cicc) => textual_lang + " [cicc]", + CompilerKind::C(CCompilerKind::Ptxas) => textual_lang + " [ptxas]", + CompilerKind::C(CCompilerKind::Nvhpc) => textual_lang + " [nvhpc]", CompilerKind::C(CCompilerKind::TaskingVX) => textual_lang + " [taskingvx]", CompilerKind::Rust => textual_lang, } @@ -274,7 +397,7 @@ where rewrite_includes_only: bool, storage: Arc, cache_control: CacheControl, - ) -> Result; + ) -> Result>; /// Return the state of any `--color` option passed to the compiler. fn color_mode(&self) -> ColorMode; @@ -284,6 +407,7 @@ where #[allow(clippy::too_many_arguments)] async fn get_cached_or_compile( self: Box, + service: &server::SccacheService, dist_client: Option>, creator: T, storage: Arc, @@ -335,7 +459,9 @@ where // If `ForceRecache` is enabled, we won't check the cache. let start = Instant::now(); let cache_status = async { - if cache_control == CacheControl::ForceRecache { + if cache_control == CacheControl::ForceNoCache { + Ok(Cache::None) + } else if cache_control == CacheControl::ForceRecache { Ok(Cache::Recache) } else { storage.get(&key).await @@ -395,6 +521,14 @@ where ); Ok(CacheLookupResult::Miss(MissType::Normal)) } + (Ok(Ok(Cache::None)), duration) => { + debug!( + "[{}]: Cache none in {}", + out_pretty, + fmt_duration_as_secs(&duration) + ); + Ok(CacheLookupResult::Miss(MissType::ForcedNoCache)) + } (Ok(Ok(Cache::Recache)), duration) => { debug!( "[{}]: Cache recache in {}", @@ -431,6 +565,7 @@ where let start = Instant::now(); let (cacheable, dist_type, compiler_result) = dist_or_local_compile( + service, dist_client, creator, cwd, @@ -446,7 +581,22 @@ where out_pretty, fmt_duration_as_secs(&duration_compilation) ); - return Ok((CompileResult::CompileFailed, compiler_result)); + return Ok(( + CompileResult::CompileFailed(dist_type, duration_compilation), + compiler_result, + )); + } + if miss_type == MissType::ForcedNoCache { + // Do not cache + debug!( + "[{}]: Compiled in {}, but not caching", + out_pretty, + fmt_duration_as_secs(&duration_compilation) + ); + return Ok(( + CompileResult::NotCached(dist_type, duration_compilation), + compiler_result, + )); } if cacheable != Cacheable::Yes { // Not cacheable @@ -455,7 +605,10 @@ where out_pretty, fmt_duration_as_secs(&duration_compilation) ); - return Ok((CompileResult::NotCacheable, compiler_result)); + return Ok(( + CompileResult::NotCacheable(dist_type, duration_compilation), + compiler_result, + )); } debug!( "[{}]: Compiled in {}, storing in cache", @@ -514,10 +667,11 @@ where #[cfg(not(feature = "dist-client"))] async fn dist_or_local_compile( + service: &server::SccacheService, _dist_client: Option>, creator: T, _cwd: PathBuf, - compilation: Box, + compilation: Box>, _weak_toolchain_key: String, out_pretty: String, ) -> Result<(Cacheable, DistType, process::Output)> @@ -531,17 +685,18 @@ where debug!("[{}]: Compiling locally", out_pretty); compile_cmd - .execute(&creator) + .execute(&service, &creator) .await .map(move |o| (cacheable, DistType::NoDist, o)) } #[cfg(feature = "dist-client")] async fn dist_or_local_compile( + service: &server::SccacheService, dist_client: Option>, creator: T, cwd: PathBuf, - compilation: Box, + compilation: Box>, weak_toolchain_key: String, out_pretty: String, ) -> Result<(Cacheable, DistType, process::Output)> @@ -559,12 +714,12 @@ where .generate_compile_commands(&mut path_transformer, rewrite_includes_only) .context("Failed to generate compile commands")?; - let dist_client = match dist_client { + let dist_client = match dist_compile_cmd.clone().and(dist_client) { Some(dc) => dc, None => { debug!("[{}]: Compiling locally", out_pretty); return compile_cmd - .execute(&creator) + .execute(service, &creator) .await .map(move |o| (cacheable, DistType::NoDist, o)); } @@ -573,8 +728,8 @@ where debug!("[{}]: Attempting distributed compilation", out_pretty); let out_pretty2 = out_pretty.clone(); - let local_executable = compile_cmd.executable.clone(); - let local_executable2 = compile_cmd.executable.clone(); + let local_executable = compile_cmd.get_executable(); + let local_executable2 = compile_cmd.get_executable(); let do_dist_compile = async move { let mut dist_compile_cmd = @@ -659,7 +814,7 @@ where dist::RunJobResult::Complete(jc) => jc, dist::RunJobResult::JobNotFound => bail!("Job {} not found on server", job_id), }; - info!( + debug!( "fetched {:?}", jc.outputs .iter() @@ -735,7 +890,7 @@ where ); compile_cmd - .execute(&creator) + .execute(service, &creator) .await .map(|o| (DistType::Error, o)) } @@ -751,14 +906,21 @@ impl Clone for Box> { } /// An interface to a compiler for actually invoking compilation. -pub trait Compilation: Send { +pub trait Compilation: Send +where + T: CommandCreatorSync, +{ /// Given information about a compiler command, generate a command that can /// execute the compiler. fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)>; + ) -> Result<( + Box>, + Option, + Cacheable, + )>; /// Create a function that will create the inputs used to perform a distributed compilation #[cfg(feature = "dist-client")] @@ -800,11 +962,14 @@ impl OutputsRewriter for NoopOutputsRewriter { } /// Result of generating a hash from a compiler command. -pub struct HashResult { +pub struct HashResult +where + T: CommandCreatorSync, +{ /// The hash key of the inputs. pub key: String, /// An object to use for the actual compilation, if necessary. - pub compilation: Box, + pub compilation: Box + 'static>, /// A weak key that may be used to identify the toolchain pub weak_toolchain_key: String, } @@ -854,6 +1019,8 @@ pub enum DistType { pub enum MissType { /// The compilation was not found in the cache, nothing more. Normal, + /// Do not cache the results of the compilation. + ForcedNoCache, /// Cache lookup was overridden, recompilation was forced. ForcedRecache, /// Cache took too long to respond. @@ -884,10 +1051,12 @@ pub enum CompileResult { Duration, // Compilation time Pin> + Send>>, ), + /// Not in cache and do not cache the results of the compilation. + NotCached(DistType, Duration), /// Not in cache, but the compilation result was determined to be not cacheable. - NotCacheable, + NotCacheable(DistType, Duration), /// Not in cache, but compilation failed. - CompileFailed, + CompileFailed(DistType, Duration), } /// The state of `--color` options passed to a compiler. @@ -908,8 +1077,15 @@ impl fmt::Debug for CompileResult { CompileResult::CacheMiss(ref m, ref dt, ref d, _) => { write!(f, "CompileResult::CacheMiss({:?}, {:?}, {:?}, _)", d, m, dt) } - CompileResult::NotCacheable => write!(f, "CompileResult::NotCacheable"), - CompileResult::CompileFailed => write!(f, "CompileResult::CompileFailed"), + CompileResult::NotCached(ref dt, ref d) => { + write!(f, "CompileResult::NotCached({:?}, {:?}_", dt, d) + } + CompileResult::NotCacheable(ref dt, ref d) => { + write!(f, "CompileResult::NotCacheable({:?}, {:?}_", dt, d) + } + CompileResult::CompileFailed(ref dt, ref d) => { + write!(f, "CompileResult::CompileFailed({:?}, {:?})", dt, d) + } } } } @@ -923,8 +1099,11 @@ impl PartialEq for CompileResult { (CompileResult::CacheMiss(m, dt, _, _), CompileResult::CacheMiss(n, dt2, _, _)) => { m == n && dt == dt2 } - (&CompileResult::NotCacheable, &CompileResult::NotCacheable) => true, - (&CompileResult::CompileFailed, &CompileResult::CompileFailed) => true, + (CompileResult::NotCached(dt, _), CompileResult::NotCached(dt2, _)) => dt == dt2, + (CompileResult::NotCacheable(dt, _), CompileResult::NotCacheable(dt2, _)) => dt == dt2, + (CompileResult::CompileFailed(dt, _), CompileResult::CompileFailed(dt2, _)) => { + dt == dt2 + } _ => false, } } @@ -942,6 +1121,8 @@ pub enum Cacheable { pub enum CacheControl { /// Default caching behavior. Default, + /// Do not cache the results of the compilation. + ForceNoCache, /// Ignore existing cache entries, force recompilation. ForceRecache, } @@ -984,6 +1165,28 @@ fn is_rustc_like>(p: P) -> bool { ) } +/// Returns true if the given path looks like cicc +fn is_nvidia_cicc>(p: P) -> bool { + matches!( + p.as_ref() + .file_stem() + .map(|s| s.to_string_lossy().to_lowercase()) + .as_deref(), + Some("cicc") + ) +} + +/// Returns true if the given path looks like ptxas +fn is_nvidia_ptxas>(p: P) -> bool { + matches!( + p.as_ref() + .file_stem() + .map(|s| s.to_string_lossy().to_lowercase()) + .as_deref(), + Some("ptxas") + ) +} + /// Returns true if the given path looks like a c compiler program /// /// This does not check c compilers, it only report programs that are definitely not rustc @@ -1045,6 +1248,30 @@ where let rustc_executable = if let Some(ref rustc_executable) = maybe_rustc_executable { rustc_executable + } else if is_nvidia_cicc(executable) { + debug!("Found cicc"); + return CCompiler::new( + Cicc { + // TODO: Use nvcc --version + version: Some(String::new()), + }, + executable.to_owned(), + &pool, + ) + .await + .map(|c| (Box::new(c) as Box>, None)); + } else if is_nvidia_ptxas(executable) { + debug!("Found ptxas"); + return CCompiler::new( + Ptxas { + // TODO: Use nvcc --version + version: Some(String::new()), + }, + executable.to_owned(), + &pool, + ) + .await + .map(|c| (Box::new(c) as Box>, None)); } else if is_known_c_compiler(executable) { let cc = detect_c_compiler(creator, executable, args, env.to_vec(), pool).await; return cc.map(|c| (c, None)); @@ -1234,10 +1461,13 @@ where let test = b" #if defined(__NVCC__) && defined(__NVCOMPILER) compiler_id=nvcc-nvhpc +compiler_version=__CUDACC_VER_MAJOR__.__CUDACC_VER_MINOR__.__CUDACC_VER_BUILD__ #elif defined(__NVCC__) && defined(_MSC_VER) compiler_id=nvcc-msvc +compiler_version=__CUDACC_VER_MAJOR__.__CUDACC_VER_MINOR__.__CUDACC_VER_BUILD__ #elif defined(__NVCC__) compiler_id=nvcc +compiler_version=__CUDACC_VER_MAJOR__.__CUDACC_VER_MINOR__.__CUDACC_VER_BUILD__ #elif defined(_MSC_VER) && !defined(__clang__) compiler_id=msvc #elif defined(_MSC_VER) && defined(_MT) @@ -1389,10 +1619,17 @@ compiler_version=__VERSION__ "nvcc" => NvccHostCompiler::Gcc, &_ => NvccHostCompiler::Gcc, }; + let host_compiler_version = lines + .next() + // In case the compiler didn't expand the macro. + .filter(|&line| line != "__VERSION__") + .map(str::to_owned); + return CCompiler::new( Nvcc { host_compiler, - version: version.clone(), + version, + host_compiler_version, }, executable, &pool, @@ -1460,7 +1697,6 @@ mod test { use std::io::{Cursor, Write}; use std::sync::Arc; use std::time::Duration; - use std::u64; use test_case::test_case; use tokio::runtime::Runtime; @@ -1980,6 +2216,8 @@ LLVM version: 6.0", // Write a dummy input file so the preprocessor cache mode can work std::fs::write(f.tempdir.path().join("foo.c"), "whatever").unwrap(); let storage = Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage.clone(), pool.clone()); + // Pretend to be GCC. next_command( &creator, @@ -2028,6 +2266,7 @@ LLVM version: 6.0", .block_on(async { hasher .get_cached_or_compile( + &service, None, creator.clone(), storage.clone(), @@ -2064,6 +2303,7 @@ LLVM version: 6.0", .block_on(async { hasher2 .get_cached_or_compile( + &service, None, creator, storage, @@ -2134,11 +2374,17 @@ LLVM version: 6.0", const COMPILER_STDERR: &[u8] = b"compiler stderr"; let obj = f.tempdir.path().join("foo.o"); // Dist client will do the compilation - let dist_client = Some(test_dist::OneshotClient::new( + let dist_client = test_dist::OneshotClient::new( 0, COMPILER_STDOUT.to_owned(), COMPILER_STDERR.to_owned(), - )); + ); + let service = server::SccacheService::mock_with_dist_client( + dist_client.clone(), + storage.clone(), + pool.clone(), + ); + let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { @@ -2150,7 +2396,8 @@ LLVM version: 6.0", .block_on(async { hasher .get_cached_or_compile( - dist_client.clone(), + &service, + Some(dist_client.clone()), creator.clone(), storage.clone(), arguments.clone(), @@ -2186,7 +2433,8 @@ LLVM version: 6.0", .block_on(async { hasher2 .get_cached_or_compile( - dist_client.clone(), + &service, + Some(dist_client.clone()), creator, storage, arguments, @@ -2219,6 +2467,8 @@ LLVM version: 6.0", let pool = runtime.handle().clone(); let storage = MockStorage::new(None, preprocessor_cache_mode); let storage: Arc = Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage.clone(), pool.clone()); + // Write a dummy input file so the preprocessor cache mode can work std::fs::write(f.tempdir.path().join("foo.c"), "whatever").unwrap(); // Pretend to be GCC. @@ -2268,6 +2518,7 @@ LLVM version: 6.0", storage.next_get(Err(anyhow!("Some Error"))); let (cached, res) = runtime .block_on(hasher.get_cached_or_compile( + &service, None, creator, storage, @@ -2309,6 +2560,7 @@ LLVM version: 6.0", let storage_delay = Duration::from_millis(2); let storage = MockStorage::new(Some(storage_delay), preprocessor_cache_mode); let storage: Arc = Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage.clone(), pool.clone()); // Pretend to be GCC. next_command( &creator, @@ -2358,6 +2610,7 @@ LLVM version: 6.0", storage.next_get(Ok(Cache::Hit(entry))); let (cached, _res) = runtime .block_on(hasher.get_cached_or_compile( + &service, None, creator, storage, @@ -2396,6 +2649,7 @@ LLVM version: 6.0", CacheMode::ReadWrite, ); let storage = Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage.clone(), pool.clone()); // Write a dummy input file so the preprocessor cache mode can work std::fs::write(f.tempdir.path().join("foo.c"), "whatever").unwrap(); // Pretend to be GCC. @@ -2450,6 +2704,7 @@ LLVM version: 6.0", .block_on(async { hasher .get_cached_or_compile( + &service, None, creator.clone(), storage.clone(), @@ -2478,6 +2733,7 @@ LLVM version: 6.0", fs::remove_file(&obj).unwrap(); let (cached, res) = hasher2 .get_cached_or_compile( + &service, None, creator, storage, @@ -2523,6 +2779,8 @@ LLVM version: 6.0", CacheMode::ReadWrite, ); let storage = Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage.clone(), pool.clone()); + // Pretend to be GCC. Also inject a fake object file that the subsequent // preprocessor failure should remove. let obj = f.tempdir.path().join("foo.o"); @@ -2568,6 +2826,7 @@ LLVM version: 6.0", .block_on(async { hasher .get_cached_or_compile( + &service, None, creator, storage, @@ -2667,12 +2926,19 @@ LLVM version: 6.0", }; // All these dist clients will fail, but should still result in successful compiles for dist_client in dist_clients { + let service = server::SccacheService::mock_with_dist_client( + dist_client.clone(), + storage.clone(), + pool.clone(), + ); + if obj.is_file() { fs::remove_file(&obj).unwrap(); } let hasher = hasher.clone(); let (cached, res) = hasher .get_cached_or_compile( + &service, Some(dist_client.clone()), creator.clone(), storage.clone(), diff --git a/src/compiler/diab.rs b/src/compiler/diab.rs index 093253cb3..60291b87f 100644 --- a/src/compiler/diab.rs +++ b/src/compiler/diab.rs @@ -18,7 +18,10 @@ use crate::compiler::args::{ NormalizedDisposition, PathTransformerFn, SearchableArgInfo, }; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; -use crate::compiler::{Cacheable, ColorMode, CompileCommand, CompilerArguments, Language}; +use crate::compiler::{ + CCompileCommand, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + SingleCompileCommand, +}; use crate::errors::*; use crate::mock_command::{CommandCreatorSync, RunCommand}; use crate::util::{run_input_output, OsStrExt}; @@ -53,6 +56,7 @@ impl CCompilerImpl for Diab { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { parse_arguments(arguments, cwd, &ARGS[..]) } @@ -75,7 +79,7 @@ impl CCompilerImpl for Diab { preprocess(creator, executable, parsed_args, cwd, env_vars, may_dist).await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -83,8 +87,19 @@ impl CCompilerImpl for Diab { cwd: &Path, env_vars: &[(OsString, OsString)], _rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { - generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars) + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars).map( + |(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }, + ) } } @@ -295,6 +310,7 @@ where common_args, arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -338,7 +354,11 @@ pub fn generate_compile_commands( parsed_args: &ParsedArguments, cwd: &Path, env_vars: &[(OsString, OsString)], -) -> Result<(CompileCommand, Option, Cacheable)> { +) -> Result<( + SingleCompileCommand, + Option, + Cacheable, +)> { trace!("compile"); let out_file = match parsed_args.outputs.get("obj") { @@ -355,7 +375,7 @@ pub fn generate_compile_commands( arguments.extend_from_slice(&parsed_args.preprocessor_args); arguments.extend_from_slice(&parsed_args.unhashed_args); arguments.extend_from_slice(&parsed_args.common_args); - let command = CompileCommand { + let command = SingleCompileCommand { executable: executable.to_owned(), arguments, env_vars: env_vars.to_owned(), @@ -444,6 +464,8 @@ mod test { use crate::compiler::c::ArtifactDescriptor; use crate::compiler::*; use crate::mock_command::*; + use crate::server; + use crate::test::mock_storage::MockStorage; use crate::test::utils::*; use fs::File; use std::io::Write; @@ -763,6 +785,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -770,6 +793,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -782,7 +809,7 @@ mod test { &[], ) .unwrap(); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::Yes, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); diff --git a/src/compiler/gcc.rs b/src/compiler/gcc.rs index f987e0ce0..bb8ce3a3f 100644 --- a/src/compiler/gcc.rs +++ b/src/compiler/gcc.rs @@ -14,7 +14,10 @@ use crate::compiler::args::*; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; -use crate::compiler::{clang, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language}; +use crate::compiler::{ + clang, CCompileCommand, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + SingleCompileCommand, +}; use crate::mock_command::{CommandCreatorSync, RunCommand}; use crate::util::{run_input_output, OsStrExt}; use crate::{counted_array, dist}; @@ -24,7 +27,7 @@ use fs_err as fs; use log::Level::Trace; use std::collections::HashMap; use std::env; -use std::ffi::OsString; +use std::ffi::{OsStr, OsString}; use std::io::Read; use std::path::{Path, PathBuf}; use std::process; @@ -53,6 +56,7 @@ impl CCompilerImpl for Gcc { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { parse_arguments(arguments, cwd, &ARGS[..], self.gplusplus, self.kind()) } @@ -87,11 +91,12 @@ impl CCompilerImpl for Gcc { self.kind(), rewrite_includes_only, ignorable_whitespace_flags, + language_to_gcc_arg, ) .await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -99,7 +104,14 @@ impl CCompilerImpl for Gcc { cwd: &Path, env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { generate_compile_commands( path_transformer, executable, @@ -108,7 +120,11 @@ impl CCompilerImpl for Gcc { env_vars, self.kind(), rewrite_includes_only, + language_to_gcc_arg, ) + .map(|(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }) } } @@ -127,6 +143,7 @@ ArgData! { pub PreprocessorArgument(OsString), PreprocessorArgumentPath(PathBuf), // Used for arguments that shouldn't affect the computed hash + UnhashedFlag, Unhashed(OsString), DoCompilation, Output(PathBuf), @@ -148,6 +165,7 @@ ArgData! { pub Arch(OsString), PedanticFlag, Standard(OsString), + SerializeDiagnostics(PathBuf), } use self::ArgData::*; @@ -160,7 +178,7 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ flag!("--coverage", Coverage), take_arg!("--param", OsString, Separated, PassThrough), flag!("--save-temps", TooHardFlag), - take_arg!("--serialize-diagnostics", PathBuf, Separated, PassThroughPath), + take_arg!("--serialize-diagnostics", PathBuf, Separated, SerializeDiagnostics), take_arg!("--sysroot", PathBuf, Separated, PassThroughPath), take_arg!("-A", OsString, Separated, PassThrough), take_arg!("-B", PathBuf, CanBeSeparated, PassThroughPath), @@ -210,11 +228,13 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ take_arg!("-imacros", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-imultilib", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-include", PathBuf, CanBeSeparated, PreprocessorArgumentPath), + take_arg!("-index-store-path", OsString, Separated, TooHard), take_arg!("-install_name", OsString, Separated, PassThrough), take_arg!("-iprefix", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-iquote", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-isysroot", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-isystem", PathBuf, CanBeSeparated, PreprocessorArgumentPath), + take_arg!("-ivfsstatcache", PathBuf, CanBeSeparated, PassThroughPath), take_arg!("-iwithprefix", PathBuf, CanBeSeparated, PreprocessorArgumentPath), take_arg!("-iwithprefixbefore", PathBuf, CanBeSeparated, PreprocessorArgumentPath), flag!("-nostdinc", PreprocessorArgumentFlag), @@ -270,6 +290,7 @@ where let mut language_extensions = true; // by default, GCC allows extensions let mut split_dwarf = false; let mut need_explicit_dep_target = false; + let mut dep_path = None; enum DepArgumentRequirePath { NotNeeded, Missing, @@ -283,6 +304,7 @@ where let mut xclangs: Vec = vec![]; let mut color_mode = ColorMode::Auto; let mut seen_arch = None; + let mut serialize_diagnostics = None; let dont_cache_multiarch = env::var("SCCACHE_CACHE_MULTIARCH").is_err(); // Custom iterator to expand `@` arguments which stand for reading a file @@ -360,8 +382,12 @@ where dep_flag = OsString::from(arg.flag_str().expect("Dep target flag expected")); dep_target = Some(s.clone()); } - Some(DepArgumentPath(_)) => { - need_explicit_dep_argument_path = DepArgumentRequirePath::Provided + Some(DepArgumentPath(path)) => { + need_explicit_dep_argument_path = DepArgumentRequirePath::Provided; + dep_path = Some(path.clone()); + } + Some(SerializeDiagnostics(path)) => { + serialize_diagnostics = Some(path.clone()); } Some(ExtraHashFile(_)) | Some(PassThroughFlag) @@ -370,6 +396,7 @@ where | Some(PreprocessorArgumentPath(_)) | Some(PassThrough(_)) | Some(PassThroughPath(_)) + | Some(UnhashedFlag) | Some(Unhashed(_)) => {} Some(Language(lang)) => { language = match lang.to_string_lossy().as_ref() { @@ -379,6 +406,7 @@ where "c++-header" => Some(Language::CxxHeader), "objective-c" => Some(Language::ObjectiveC), "objective-c++" => Some(Language::ObjectiveCxx), + "objective-c++-header" => Some(Language::ObjectiveCxxHeader), "cu" => Some(Language::Cuda), "rs" => Some(Language::Rust), "cuda" => Some(Language::Cuda), @@ -429,7 +457,7 @@ where | Some(PassThroughFlag) | Some(PassThrough(_)) | Some(PassThroughPath(_)) => &mut common_args, - Some(Unhashed(_)) => &mut unhashed_args, + Some(UnhashedFlag) | Some(Unhashed(_)) => &mut unhashed_args, Some(Arch(_)) => &mut arch_args, Some(ExtraHashFile(path)) => { extra_hash_files.push(cwd.join(path)); @@ -446,8 +474,12 @@ where &mut preprocessor_args } Some(DepArgumentPath(_)) | Some(NeedDepTarget) => &mut dependency_args, - Some(DoCompilation) | Some(Language(_)) | Some(Output(_)) | Some(XClang(_)) - | Some(DepTarget(_)) => continue, + Some(DoCompilation) + | Some(Language(_)) + | Some(Output(_)) + | Some(XClang(_)) + | Some(DepTarget(_)) + | Some(SerializeDiagnostics(_)) => continue, Some(TooHardFlag) | Some(TooHard(_)) => unreachable!(), None => match arg { Argument::Raw(_) => continue, @@ -506,8 +538,9 @@ where | Some(Arch(_)) | Some(PassThrough(_)) | Some(PassThroughFlag) - | Some(PassThroughPath(_)) => &mut common_args, - Some(Unhashed(_)) => &mut unhashed_args, + | Some(PassThroughPath(_)) + | Some(SerializeDiagnostics(_)) => &mut common_args, + Some(UnhashedFlag) | Some(Unhashed(_)) => &mut unhashed_args, Some(ExtraHashFile(path)) => { extra_hash_files.push(cwd.join(path)); &mut common_args @@ -575,6 +608,9 @@ where }; if split_dwarf { let dwo = output.with_extension("dwo"); + common_args.push(OsString::from( + "-D_gsplit_dwarf_path=".to_owned() + dwo.to_str().unwrap(), + )); // -gsplit-dwarf doesn't guarantee .dwo file if no -g is specified outputs.insert( "dwo", @@ -607,6 +643,27 @@ where dependency_args.push(OsString::from("-MF")); dependency_args.push(Path::new(&output).with_extension("d").into_os_string()); } + + if let Some(path) = dep_path { + outputs.insert( + "d", + ArtifactDescriptor { + path: path.clone(), + optional: false, + }, + ); + } + + if let Some(path) = serialize_diagnostics { + outputs.insert( + "dia", + ArtifactDescriptor { + path: path.clone(), + optional: false, + }, + ); + } + outputs.insert( "obj", ArtifactDescriptor { @@ -627,6 +684,7 @@ where common_args, arch_args, unhashed_args, + extra_dist_files: vec![], extra_hash_files, msvc_show_includes: false, profile_generate, @@ -636,7 +694,7 @@ where }) } -fn language_to_gcc_arg(lang: Language) -> Option<&'static str> { +pub fn language_to_gcc_arg(lang: Language) -> Option<&'static str> { match lang { Language::C => Some("c"), Language::CHeader => Some("c-header"), @@ -644,7 +702,10 @@ fn language_to_gcc_arg(lang: Language) -> Option<&'static str> { Language::CxxHeader => Some("c++-header"), Language::ObjectiveC => Some("objective-c"), Language::ObjectiveCxx => Some("objective-c++"), + Language::ObjectiveCxxHeader => Some("objective-c++-header"), Language::Cuda => Some("cu"), + Language::Ptx => None, + Language::Cubin => None, Language::Rust => None, // Let the compiler decide Language::Hip => Some("hip"), Language::GenericHeader => None, // Let the compiler decide @@ -652,7 +713,7 @@ fn language_to_gcc_arg(lang: Language) -> Option<&'static str> { } #[allow(clippy::too_many_arguments)] -fn preprocess_cmd( +fn preprocess_cmd( cmd: &mut T, parsed_args: &ParsedArguments, cwd: &Path, @@ -661,10 +722,12 @@ fn preprocess_cmd( kind: CCompilerKind, rewrite_includes_only: bool, ignorable_whitespace_flags: Vec, + language_to_arg: F, ) where + F: Fn(Language) -> Option<&'static str>, T: RunCommand, { - let language = language_to_gcc_arg(parsed_args.language); + let language = language_to_arg(parsed_args.language); if let Some(lang) = &language { cmd.arg("-x").arg(lang); } @@ -697,7 +760,6 @@ fn preprocess_cmd( // Explicitly rewrite the -arch args to be preprocessor defines of the form // __arch__ so that they affect the preprocessor output but don't cause // clang to error. - debug!("arch args before rewrite: {:?}", parsed_args.arch_args); let rewritten_arch_args = parsed_args .arch_args .iter() @@ -715,6 +777,9 @@ fn preprocess_cmd( if unique_rewritten.len() <= 1 { // don't use rewritten arch args if there is only one arch arch_args_to_use = &parsed_args.arch_args; + } else { + debug!("-arch args before rewrite: {:?}", parsed_args.arch_args); + debug!("-arch args after rewrite: {:?}", arch_args_to_use); } cmd.args(&parsed_args.preprocessor_args) @@ -728,11 +793,10 @@ fn preprocess_cmd( .env_clear() .envs(env_vars.to_vec()) .current_dir(cwd); - debug!("cmd after -arch rewrite: {:?}", cmd); } #[allow(clippy::too_many_arguments)] -pub async fn preprocess( +pub async fn preprocess( creator: &T, executable: &Path, parsed_args: &ParsedArguments, @@ -742,8 +806,10 @@ pub async fn preprocess( kind: CCompilerKind, rewrite_includes_only: bool, ignorable_whitespace_flags: Vec, + language_to_arg: F, ) -> Result where + F: Fn(Language) -> Option<&'static str>, T: CommandCreatorSync, { trace!("preprocess"); @@ -757,6 +823,7 @@ where kind, rewrite_includes_only, ignorable_whitespace_flags, + language_to_arg, ); if log_enabled!(Trace) { trace!("preprocess: {:?}", cmd); @@ -764,7 +831,8 @@ where run_input_output(cmd, None).await } -pub fn generate_compile_commands( +#[allow(clippy::too_many_arguments)] +pub fn generate_compile_commands( path_transformer: &mut dist::PathTransformer, executable: &Path, parsed_args: &ParsedArguments, @@ -772,7 +840,15 @@ pub fn generate_compile_commands( env_vars: &[(OsString, OsString)], kind: CCompilerKind, rewrite_includes_only: bool, -) -> Result<(CompileCommand, Option, Cacheable)> { + language_to_arg: F, +) -> Result<( + SingleCompileCommand, + Option, + Cacheable, +)> +where + F: Fn(Language) -> Option<&'static str>, +{ // Unused arguments #[cfg(not(feature = "dist-client"))] { @@ -781,8 +857,6 @@ pub fn generate_compile_commands( let _ = rewrite_includes_only; } - trace!("compile"); - let out_file = match parsed_args.outputs.get("obj") { Some(obj) => &obj.path, None => return Err(anyhow!("Missing object file output")), @@ -790,7 +864,7 @@ pub fn generate_compile_commands( // Pass the language explicitly as we might have gotten it from the // command line. - let language = language_to_gcc_arg(parsed_args.language); + let language = language_to_arg(parsed_args.language); let mut arguments: Vec = vec![]; if let Some(lang) = &language { arguments.extend(vec!["-x".into(), lang.into()]) @@ -801,6 +875,7 @@ pub fn generate_compile_commands( out_file.into(), ]); arguments.extend_from_slice(&parsed_args.preprocessor_args); + arguments.extend_from_slice(&parsed_args.dependency_args); arguments.extend_from_slice(&parsed_args.unhashed_args); arguments.extend_from_slice(&parsed_args.common_args); arguments.extend_from_slice(&parsed_args.arch_args); @@ -808,7 +883,18 @@ pub fn generate_compile_commands( arguments.push("--".into()); } arguments.push(parsed_args.input.clone().into()); - let command = CompileCommand { + + trace!( + "compile: {} {}", + executable.to_string_lossy(), + arguments.join(OsStr::new(" ")).to_string_lossy() + ); + + #[cfg(feature = "dist-client")] + let has_verbose_flag = arguments.contains(&OsString::from("-v")) + || arguments.contains(&OsString::from("--verbose")); + + let command = SingleCompileCommand { executable: executable.to_owned(), arguments, env_vars: env_vars.to_owned(), @@ -818,56 +904,64 @@ pub fn generate_compile_commands( #[cfg(not(feature = "dist-client"))] let dist_command = None; #[cfg(feature = "dist-client")] - let dist_command = (|| { - // https://gcc.gnu.org/onlinedocs/gcc-4.9.0/gcc/Overall-Options.html - let mut language: Option = - language_to_gcc_arg(parsed_args.language).map(|lang| lang.into()); - if !rewrite_includes_only { - match parsed_args.language { - Language::C => language = Some("cpp-output".into()), - Language::GenericHeader | Language::CHeader | Language::CxxHeader => {} - _ => language.as_mut()?.push_str("-cpp-output"), + // 1. Compilations with -v|--verbose must be run locally, since the verbose + // output is parsed by tools like CMake and must reflect the local toolchain + // 2. ClangCUDA cannot be dist-compiled because Clang has separate host and + // device preprocessor outputs and cannot compile preprocessed CUDA files. + let dist_command = if has_verbose_flag || parsed_args.language == Language::Cuda { + None + } else { + (|| { + // https://gcc.gnu.org/onlinedocs/gcc-4.9.0/gcc/Overall-Options.html + let mut language: Option = + language_to_arg(parsed_args.language).map(|lang| lang.into()); + if !rewrite_includes_only { + match parsed_args.language { + Language::C => language = Some("cpp-output".into()), + Language::GenericHeader | Language::CHeader | Language::CxxHeader => {} + _ => language.as_mut()?.push_str("-cpp-output"), + } } - } - let mut arguments: Vec = vec![]; - // Language needs to be before input - if let Some(lang) = &language { - arguments.extend(vec!["-x".into(), lang.into()]) - } - arguments.extend(vec![ - parsed_args.compilation_flag.clone().into_string().ok()?, - path_transformer.as_dist(&parsed_args.input)?, - "-o".into(), - path_transformer.as_dist(out_file)?, - ]); - if let CCompilerKind::Gcc = kind { - // From https://gcc.gnu.org/onlinedocs/gcc/Preprocessor-Options.html: - // - // -fdirectives-only - // - // [...] - // - // With -fpreprocessed, predefinition of command line and most - // builtin macros is disabled. Macros such as __LINE__, which - // are contextually dependent, are handled normally. This - // enables compilation of files previously preprocessed with -E - // -fdirectives-only. - // - // Which is exactly what we do :-) - if rewrite_includes_only && !parsed_args.suppress_rewrite_includes_only { - arguments.push("-fdirectives-only".into()); + let mut arguments: Vec = vec![]; + // Language needs to be before input + if let Some(lang) = &language { + arguments.extend(vec!["-x".into(), lang.into()]) } - arguments.push("-fpreprocessed".into()); - } - arguments.extend(dist::osstrings_to_strings(&parsed_args.common_args)?); - Some(dist::CompileCommand { - executable: path_transformer.as_dist(executable)?, - arguments, - env_vars: dist::osstring_tuples_to_strings(env_vars)?, - cwd: path_transformer.as_dist_abs(cwd)?, - }) - })(); + arguments.extend(vec![ + parsed_args.compilation_flag.clone().into_string().ok()?, + path_transformer.as_dist(&parsed_args.input)?, + "-o".into(), + path_transformer.as_dist(out_file)?, + ]); + if let CCompilerKind::Gcc = kind { + // From https://gcc.gnu.org/onlinedocs/gcc/Preprocessor-Options.html: + // + // -fdirectives-only + // + // [...] + // + // With -fpreprocessed, predefinition of command line and most + // builtin macros is disabled. Macros such as __LINE__, which + // are contextually dependent, are handled normally. This + // enables compilation of files previously preprocessed with -E + // -fdirectives-only. + // + // Which is exactly what we do :-) + if rewrite_includes_only && !parsed_args.suppress_rewrite_includes_only { + arguments.push("-fdirectives-only".into()); + } + arguments.push("-fpreprocessed".into()); + } + arguments.extend(dist::osstrings_to_strings(&parsed_args.common_args)?); + Some(dist::CompileCommand { + executable: path_transformer.as_dist(executable)?, + arguments, + env_vars: dist::osstring_tuples_to_strings(env_vars)?, + cwd: path_transformer.as_dist_abs(cwd)?, + }) + })() + }; Ok((command, dist_command, Cacheable::Yes)) } @@ -949,6 +1043,8 @@ mod test { use super::*; use crate::compiler::*; use crate::mock_command::*; + use crate::server; + use crate::test::mock_storage::MockStorage; use crate::test::utils::*; use temp_env::{with_var, with_var_unset}; @@ -1075,6 +1171,9 @@ mod test { CompilerArguments::Ok(args) => args, o => panic!("Got unexpected parse result: {:?}", o), }; + let mut common_and_arch_args = common_args.clone(); + common_and_arch_args.extend(common_args.to_vec()); + debug!("common_and_arch_args: {:?}", common_and_arch_args); assert_eq!(Some("foo.cpp"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!( @@ -1095,7 +1194,10 @@ mod test { ) ); assert!(preprocessor_args.is_empty()); - assert_eq!(ovec!["-gsplit-dwarf"], common_args); + assert!( + common_args.contains(&"-gsplit-dwarf".into()) + && common_args.contains(&"-D_gsplit_dwarf_path=foo.dwo".into()) + ); assert!(!msvc_show_includes); } @@ -1332,7 +1434,7 @@ mod test { "foo.c", "-fabc", "-MF", - "file", + "foo.o.d", "-o", "foo.o", "-MQ", @@ -1362,9 +1464,16 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); - assert_eq!(ovec!["-MF", "file"], dependency_args); + assert_eq!(ovec!["-MF", "foo.o.d"], dependency_args); assert_eq!(ovec!["-nostdinc"], preprocessor_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); @@ -1432,7 +1541,7 @@ mod test { #[test] fn test_parse_arguments_explicit_dep_target() { let args = - stringvec!["-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "file", "-o", "foo.o"]; + stringvec!["-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "foo.o.d", "-o", "foo.o"]; let ParsedArguments { input, language, @@ -1455,9 +1564,16 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); - assert_eq!(ovec!["-MF", "file"], dependency_args); + assert_eq!(ovec!["-MF", "foo.o.d"], dependency_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -1465,7 +1581,7 @@ mod test { #[test] fn test_parse_arguments_explicit_dep_target_needed() { let args = stringvec![ - "-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "file", "-o", "foo.o", "-MD" + "-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "foo.o.d", "-o", "foo.o", "-MD" ]; let ParsedArguments { input, @@ -1490,10 +1606,17 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( - ovec!["-MF", "file", "-MD", "-MT", "depfile"], + ovec!["-MF", "foo.o.d", "-MD", "-MT", "depfile"], dependency_args ); assert!(preprocessor_args.is_empty()); @@ -1504,7 +1627,7 @@ mod test { #[test] fn test_parse_arguments_explicit_mq_dep_target_needed() { let args = stringvec![ - "-c", "foo.c", "-MQ", "depfile", "-fabc", "-MF", "file", "-o", "foo.o", "-MD" + "-c", "foo.c", "-MQ", "depfile", "-fabc", "-MF", "foo.o.d", "-o", "foo.o", "-MD" ]; let ParsedArguments { input, @@ -1529,10 +1652,17 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( - ovec!["-MF", "file", "-MD", "-MQ", "depfile"], + ovec!["-MF", "foo.o.d", "-MD", "-MQ", "depfile"], dependency_args ); assert!(preprocessor_args.is_empty()); @@ -1589,6 +1719,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); // make sure the architectures were rewritten to prepocessor defines let expected_args = ovec![ @@ -1624,6 +1755,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); // make sure the architectures were rewritten to prepocessor defines let expected_args = ovec![ @@ -1658,6 +1790,7 @@ mod test { CCompilerKind::Clang, true, vec![], + language_to_gcc_arg, ); let expected_args = ovec!["-x", "c", "-E", "-frewrite-includes", "--", "foo.c"]; assert_eq!(cmd.args, expected_args); @@ -1683,6 +1816,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); // disable with extensions enabled assert!(!cmd.args.contains(&"-fdirectives-only".into())); @@ -1708,6 +1842,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); // no reason to disable it with no extensions enabled assert!(cmd.args.contains(&"-fdirectives-only".into())); @@ -1733,6 +1868,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); // disable with extensions enabled assert!(!cmd.args.contains(&"-fdirectives-only".into())); @@ -1740,7 +1876,7 @@ mod test { #[test] fn test_parse_arguments_dep_target_needed() { - let args = stringvec!["-c", "foo.c", "-fabc", "-MF", "file", "-o", "foo.o", "-MD"]; + let args = stringvec!["-c", "foo.c", "-fabc", "-MF", "foo.o.d", "-o", "foo.o", "-MD"]; let ParsedArguments { input, language, @@ -1763,9 +1899,19 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); - assert_eq!(ovec!["-MF", "file", "-MD", "-MT", "foo.o"], dependency_args); + assert_eq!( + ovec!["-MF", "foo.o.d", "-MD", "-MT", "foo.o"], + dependency_args + ); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -1884,6 +2030,24 @@ mod test { ); } + #[test] + fn test_parse_index_store_path() { + assert_eq!( + CompilerArguments::CannotCache("-index-store-path", None), + parse_arguments_( + stringvec![ + "-c", + "foo.c", + "-index-store-path", + "index.store", + "-o", + "foo.o" + ], + false + ) + ); + } + #[test] fn test_parse_arguments_multiarch_cache_disabled() { with_var_unset("SCCACHE_CACHE_MULTIARCH", || { @@ -2026,6 +2190,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -2033,6 +2198,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -2045,13 +2214,132 @@ mod test { &[], CCompilerKind::Gcc, false, + language_to_gcc_arg, ) .unwrap(); #[cfg(feature = "dist-client")] assert!(dist_command.is_some()); #[cfg(not(feature = "dist-client"))] assert!(dist_command.is_none()); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); + assert_eq!(Cacheable::Yes, cacheable); + // Ensure that we ran all processes. + assert_eq!(0, creator.lock().unwrap().children.len()); + } + + #[test] + fn test_compile_simple_verbose_short() { + let creator = new_creator(); + let f = TestFixture::new(); + let parsed_args = ParsedArguments { + input: "foo.c".into(), + double_dash_input: false, + language: Language::C, + compilation_flag: "-c".into(), + depfile: None, + outputs: vec![( + "obj", + ArtifactDescriptor { + path: "foo.o".into(), + optional: false, + }, + )] + .into_iter() + .collect(), + dependency_args: vec![], + preprocessor_args: vec![], + common_args: vec!["-v".into()], + arch_args: vec![], + unhashed_args: vec![], + extra_dist_files: vec![], + extra_hash_files: vec![], + msvc_show_includes: false, + profile_generate: false, + color_mode: ColorMode::Auto, + suppress_rewrite_includes_only: false, + too_hard_for_preprocessor_cache_mode: None, + }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); + let compiler = &f.bins[0]; + // Compiler invocation. + next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); + let mut path_transformer = dist::PathTransformer::new(); + let (command, dist_command, cacheable) = generate_compile_commands( + &mut path_transformer, + compiler, + &parsed_args, + f.tempdir.path(), + &[], + CCompilerKind::Gcc, + false, + language_to_gcc_arg, + ) + .unwrap(); + // -v should never generate a dist_command + assert!(dist_command.is_none()); + let _ = command.execute(&service, &creator).wait(); + assert_eq!(Cacheable::Yes, cacheable); + // Ensure that we ran all processes. + assert_eq!(0, creator.lock().unwrap().children.len()); + } + + #[test] + fn test_compile_simple_verbose_long() { + let creator = new_creator(); + let f = TestFixture::new(); + let parsed_args = ParsedArguments { + input: "foo.c".into(), + double_dash_input: false, + language: Language::C, + compilation_flag: "-c".into(), + depfile: None, + outputs: vec![( + "obj", + ArtifactDescriptor { + path: "foo.o".into(), + optional: false, + }, + )] + .into_iter() + .collect(), + dependency_args: vec![], + preprocessor_args: vec![], + common_args: vec!["--verbose".into()], + arch_args: vec![], + unhashed_args: vec![], + extra_dist_files: vec![], + extra_hash_files: vec![], + msvc_show_includes: false, + profile_generate: false, + color_mode: ColorMode::Auto, + suppress_rewrite_includes_only: false, + too_hard_for_preprocessor_cache_mode: None, + }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); + let compiler = &f.bins[0]; + // Compiler invocation. + next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); + let mut path_transformer = dist::PathTransformer::new(); + let (command, dist_command, cacheable) = generate_compile_commands( + &mut path_transformer, + compiler, + &parsed_args, + f.tempdir.path(), + &[], + CCompilerKind::Gcc, + false, + language_to_gcc_arg, + ) + .unwrap(); + // --verbose should never generate a dist_command + assert!(dist_command.is_none()); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::Yes, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); @@ -2075,10 +2363,11 @@ mod test { &[], CCompilerKind::Clang, false, + language_to_gcc_arg, ) .unwrap(); let expected_args = ovec!["-x", "c", "-c", "-o", "foo.o", "--", "foo.c"]; - assert_eq!(command.arguments, expected_args); + assert_eq!(command.get_arguments(), expected_args); } #[test] @@ -2135,6 +2424,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); assert!(cmd.args.contains(&"-x".into()) && cmd.args.contains(&"c++-header".into())); } @@ -2159,6 +2449,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); assert!(cmd.args.contains(&"-x".into()) && cmd.args.contains(&"c++-header".into())); } @@ -2183,6 +2474,7 @@ mod test { CCompilerKind::Gcc, true, vec![], + language_to_gcc_arg, ); assert!(!cmd.args.contains(&"-x".into())); } diff --git a/src/compiler/mod.rs b/src/compiler/mod.rs index 9c1fc471d..0b434c5a0 100644 --- a/src/compiler/mod.rs +++ b/src/compiler/mod.rs @@ -15,6 +15,7 @@ #[macro_use] mod args; mod c; +mod cicc; mod clang; #[macro_use] #[allow(clippy::module_inception)] @@ -25,10 +26,12 @@ mod msvc; mod nvcc; mod nvhpc; mod preprocessor_cache; +mod ptxas; mod rust; mod tasking_vx; #[macro_use] mod counted_array; +pub use crate::compiler::c::CCompilerKind; pub use crate::compiler::compiler::*; pub use crate::compiler::preprocessor_cache::PreprocessorCacheEntry; diff --git a/src/compiler/msvc.rs b/src/compiler/msvc.rs index 4ec7bf155..19eae82a1 100644 --- a/src/compiler/msvc.rs +++ b/src/compiler/msvc.rs @@ -15,7 +15,8 @@ use crate::compiler::args::*; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; use crate::compiler::{ - clang, gcc, write_temp_file, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + clang, gcc, write_temp_file, CCompileCommand, Cacheable, ColorMode, CompileCommand, + CompilerArguments, Language, SingleCompileCommand, }; use crate::mock_command::{CommandCreatorSync, RunCommand}; use crate::util::{encode_path, run_input_output, OsStrExt}; @@ -58,6 +59,7 @@ impl CCompilerImpl for Msvc { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { parse_arguments(arguments, cwd, self.is_clang) } @@ -91,7 +93,7 @@ impl CCompilerImpl for Msvc { .await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -99,8 +101,19 @@ impl CCompilerImpl for Msvc { cwd: &Path, env_vars: &[(OsString, OsString)], _rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { - generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars) + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars).map( + |(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }, + ) } } @@ -112,8 +125,10 @@ fn from_local_codepage(multi_byte_str: &[u8]) -> io::Result { #[cfg(windows)] pub fn from_local_codepage(multi_byte_str: &[u8]) -> io::Result { - let codepage = winapi::um::winnls::CP_OEMCP; - let flags = winapi::um::winnls::MB_ERR_INVALID_CHARS; + use windows_sys::Win32::Globalization::{MultiByteToWideChar, CP_OEMCP, MB_ERR_INVALID_CHARS}; + + let codepage = CP_OEMCP; + let flags = MB_ERR_INVALID_CHARS; // Empty string if multi_byte_str.is_empty() { @@ -121,7 +136,7 @@ pub fn from_local_codepage(multi_byte_str: &[u8]) -> io::Result { } unsafe { // Get length of UTF-16 string - let len = winapi::um::stringapiset::MultiByteToWideChar( + let len = MultiByteToWideChar( codepage, flags, multi_byte_str.as_ptr() as _, @@ -132,7 +147,7 @@ pub fn from_local_codepage(multi_byte_str: &[u8]) -> io::Result { if len > 0 { // Convert to UTF-16 let mut wstr: Vec = Vec::with_capacity(len as usize); - let len = winapi::um::stringapiset::MultiByteToWideChar( + let len = MultiByteToWideChar( codepage, flags, multi_byte_str.as_ptr() as _, @@ -664,8 +679,9 @@ pub fn parse_arguments( | Some(PassThrough(_)) | Some(PassThroughPath(_)) | Some(PedanticFlag) - | Some(Standard(_)) => &mut common_args, - Some(Unhashed(_)) => &mut unhashed_args, + | Some(Standard(_)) + | Some(SerializeDiagnostics(_)) => &mut common_args, + Some(UnhashedFlag) | Some(Unhashed(_)) => &mut unhashed_args, Some(ProfileGenerate) => { profile_generate = true; @@ -817,6 +833,7 @@ pub fn parse_arguments( common_args, arch_args: vec![], unhashed_args, + extra_dist_files: vec![], extra_hash_files, msvc_show_includes: show_includes, profile_generate, @@ -832,10 +849,10 @@ fn normpath(path: &str) -> String { use std::os::windows::ffi::OsStringExt; use std::os::windows::io::AsRawHandle; use std::ptr; - use winapi::um::fileapi::GetFinalPathNameByHandleW; + use windows_sys::Win32::Storage::FileSystem::GetFinalPathNameByHandleW; File::open(path) .and_then(|f| { - let handle = f.as_raw_handle(); + let handle = f.as_raw_handle() as _; let size = unsafe { GetFinalPathNameByHandleW(handle, ptr::null_mut(), 0, 0) }; if size == 0 { return Err(io::Error::last_os_error()); @@ -906,7 +923,8 @@ pub fn preprocess_cmd( } // Windows SDK generates C4668 during preprocessing, but compiles fine. // Read for more info: https://github.com/mozilla/sccache/issues/1725 - cmd.arg("/wd4668"); + // And here: https://github.com/mozilla/sccache/issues/2250 + cmd.arg("/WX-"); } if rewrite_includes_only && is_clang { @@ -1023,7 +1041,11 @@ fn generate_compile_commands( parsed_args: &ParsedArguments, cwd: &Path, env_vars: &[(OsString, OsString)], -) -> Result<(CompileCommand, Option, Cacheable)> { +) -> Result<( + SingleCompileCommand, + Option, + Cacheable, +)> { #[cfg(not(feature = "dist-client"))] let _ = path_transformer; @@ -1059,7 +1081,7 @@ fn generate_compile_commands( arguments.push("--".into()); } arguments.push(parsed_args.input.clone().into()); - let command = CompileCommand { + let command = SingleCompileCommand { executable: executable.to_owned(), arguments, env_vars: env_vars.to_owned(), @@ -1356,6 +1378,8 @@ mod test { use super::*; use crate::compiler::*; use crate::mock_command::*; + use crate::server; + use crate::test::mock_storage::MockStorage; use crate::test::utils::*; fn parse_arguments(arguments: Vec) -> CompilerArguments { @@ -2440,6 +2464,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -2447,6 +2472,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -2463,7 +2492,7 @@ mod test { assert!(dist_command.is_some()); #[cfg(not(feature = "dist-client"))] assert!(dist_command.is_none()); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::Yes, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); @@ -2525,6 +2554,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -2532,6 +2562,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -2548,7 +2582,7 @@ mod test { assert!(dist_command.is_some()); #[cfg(not(feature = "dist-client"))] assert!(dist_command.is_none()); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::No, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); @@ -2582,7 +2616,7 @@ mod test { #[cfg(windows)] fn local_oem_codepage_conversions() { use crate::util::wide_char_to_multi_byte; - use winapi::um::winnls::GetOEMCP; + use windows_sys::Win32::Globalization::GetOEMCP; let current_oemcp = unsafe { GetOEMCP() }; // We don't control the local OEM codepage so test only if it is one of: diff --git a/src/compiler/nvcc.rs b/src/compiler/nvcc.rs index 3915acea7..bc24d0e59 100644 --- a/src/compiler/nvcc.rs +++ b/src/compiler/nvcc.rs @@ -19,20 +19,30 @@ use crate::compiler::args::*; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; use crate::compiler::gcc::ArgData::*; use crate::compiler::{ - gcc, write_temp_file, Cacheable, CompileCommand, CompilerArguments, Language, + self, gcc, get_compiler_info, write_temp_file, CCompileCommand, Cacheable, CompileCommand, + CompileCommandImpl, CompilerArguments, Language, +}; +use crate::mock_command::{ + exit_status, CommandChild, CommandCreator, CommandCreatorSync, ExitStatusValue, RunCommand, }; -use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; use crate::util::{run_input_output, OsStrExt}; -use crate::{counted_array, dist}; +use crate::{counted_array, dist, protocol, server}; use async_trait::async_trait; use fs::File; use fs_err as fs; +use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt}; +use itertools::Itertools; use log::Level::Trace; -use std::ffi::OsString; -use std::future::Future; -use std::io::{self, Write}; +use regex::Regex; +use std::collections::HashMap; +use std::ffi::{OsStr, OsString}; +use std::future::{Future, IntoFuture}; +use std::io::{self, BufRead, Read, Write}; +#[cfg(unix)] +use std::os::unix::process::ExitStatusExt; use std::path::{Path, PathBuf}; use std::process; +use which::which_in; use crate::errors::*; @@ -47,6 +57,7 @@ pub enum NvccHostCompiler { #[derive(Clone, Debug)] pub struct Nvcc { pub host_compiler: NvccHostCompiler, + pub host_compiler_version: Option, pub version: Option, } @@ -59,15 +70,54 @@ impl CCompilerImpl for Nvcc { false } fn version(&self) -> Option { - self.version.clone() + let nvcc_ver = self.version.clone().unwrap_or_default(); + let host_ver = self.host_compiler_version.clone().unwrap_or_default(); + let both_ver = [nvcc_ver, host_ver] + .iter() + .filter(|x| !x.is_empty()) + .join("-"); + if both_ver.is_empty() { + None + } else { + Some(both_ver) + } } fn parse_arguments( &self, arguments: &[OsString], cwd: &Path, + env_vars: &[(OsString, OsString)], ) -> CompilerArguments { + let mut arguments = arguments.to_vec(); + + if let Some(flags) = env_vars + .iter() + .find(|(k, _)| k == "NVCC_PREPEND_FLAGS") + .and_then(|(_, p)| p.to_str()) + { + arguments = shlex::split(flags) + .unwrap_or_default() + .iter() + .map(|s| s.clone().into_arg_os_string()) + .chain(arguments.iter().cloned()) + .collect::>(); + } + + if let Some(flags) = env_vars + .iter() + .find(|(k, _)| k == "NVCC_APPEND_FLAGS") + .and_then(|(_, p)| p.to_str()) + { + arguments.extend( + shlex::split(flags) + .unwrap_or_default() + .iter() + .map(|s| s.clone().into_arg_os_string()), + ); + } + let parsed_args = gcc::parse_arguments( - arguments, + &arguments, cwd, (&gcc::ARGS[..], &ARGS[..]), false, @@ -75,13 +125,27 @@ impl CCompilerImpl for Nvcc { ); match parsed_args { - CompilerArguments::Ok(pargs) => { - if pargs.compilation_flag != "-c" { - let mut new_args = pargs.clone(); - new_args.common_args.push(pargs.compilation_flag); - return CompilerArguments::Ok(new_args); + CompilerArguments::Ok(mut parsed_args) => { + match parsed_args.compilation_flag.to_str() { + Some("") => { /* no compile flag is valid */ } + Some(flag) => { + // Add the compilation flag to `parsed_args.common_args` so + // it's considered when computing the hash. + // + // Consider the following cases: + // $ sccache nvcc x.cu -o x.bin + // $ sccache nvcc x.cu -o x.cu.o -c + // $ sccache nvcc x.cu -o x.ptx -ptx + // $ sccache nvcc x.cu -o x.cubin -cubin + // + // The preprocessor output for all four are identical, so + // without including the compilation flag in the hasher's + // inputs, the same hash would be generated for all four. + parsed_args.common_args.push(flag.into()); + } + _ => unreachable!(), } - CompilerArguments::Ok(pargs) + CompilerArguments::Ok(parsed_args) } CompilerArguments::CannotCache(_, _) | CompilerArguments::NotCompilation => parsed_args, } @@ -102,6 +166,12 @@ impl CCompilerImpl for Nvcc { where T: CommandCreatorSync, { + let env_vars = env_vars + .iter() + .filter(|(k, _)| k != "NVCC_PREPEND_FLAGS" && k != "NVCC_APPEND_FLAGS") + .cloned() + .collect::>(); + let language = match parsed_args.language { Language::C => Ok("c"), Language::Cxx => Ok("c++"), @@ -113,84 +183,95 @@ impl CCompilerImpl for Nvcc { let initialize_cmd_and_args = || { let mut command = creator.clone().new_command_sync(executable); - command.args(&parsed_args.preprocessor_args); - command.args(&parsed_args.common_args); - //We need to add "-rdc=true" if we are compiling with `-dc` - //So that the preprocessor has the correct implicit defines - if parsed_args.compilation_flag == "-dc" { - command.arg("-rdc=true"); - } - command.arg("-x").arg(language).arg(&parsed_args.input); - command - }; - - let dep_before_preprocessor = || { - //NVCC doesn't support generating both the dependency information - //and the preprocessor output at the same time. So if we have - //need for both we need separate compiler invocations - let mut dep_cmd = initialize_cmd_and_args(); - let mut transformed_deps = vec![]; - for item in parsed_args.dependency_args.iter() { - if item == "-MD" { - transformed_deps.push(OsString::from("-M")); - } else if item == "-MMD" { - transformed_deps.push(OsString::from("-MM")); - } else { - transformed_deps.push(item.clone()); - } - } - dep_cmd - .args(&transformed_deps) + .current_dir(cwd) .env_clear() - .envs(env_vars.to_vec()) - .current_dir(cwd); + .envs(env_vars.clone()) + .args(&parsed_args.preprocessor_args) + .args(&parsed_args.common_args) + .arg("-x") + .arg(language) + .arg(&parsed_args.input); + command + }; + let dependencies_command = || { + // NVCC doesn't support generating both the dependency information + // and the preprocessor output at the same time. So if we have + // need for both, we need separate compiler invocations + let mut dependency_cmd = initialize_cmd_and_args(); + dependency_cmd.args( + &parsed_args + .dependency_args + .iter() + .map(|arg| match arg.to_str().unwrap_or_default() { + "-MD" | "--generate-dependencies-with-compile" => "-M", + "-MMD" | "--generate-nonsystem-dependencies-with-compile" => "-MM", + arg => arg, + }) + // protect against duplicate -M and -MM flags after transform + .unique() + .collect::>(), + ); if log_enabled!(Trace) { - trace!("dep-gen command: {:?}", dep_cmd); + let output_file_name = &parsed_args + .outputs + .get("obj") + .context("Missing object file output") + .unwrap() + .path + .file_name() + .unwrap(); + + trace!( + "[{}]: dependencies command: {:?}", + output_file_name.to_string_lossy(), + dependency_cmd + ); } - dep_cmd + dependency_cmd }; - trace!("preprocess"); - let mut cmd = initialize_cmd_and_args(); + let preprocessor_command = || { + let mut preprocess_cmd = initialize_cmd_and_args(); + // NVCC only supports `-E` when it comes after preprocessor and common flags. + preprocess_cmd.arg("-E"); + preprocess_cmd.arg(match self.host_compiler { + // nvc/nvc++ don't support eliding line numbers + NvccHostCompiler::Nvhpc => "", + // msvc requires the `-EP` flag to elide line numbers + NvccHostCompiler::Msvc => "-Xcompiler=-EP", + // other host compilers are presumed to match `gcc` behavior + NvccHostCompiler::Gcc => "-Xcompiler=-P", + }); + if log_enabled!(Trace) { + let output_file_name = &parsed_args + .outputs + .get("obj") + .context("Missing object file output") + .unwrap() + .path + .file_name() + .unwrap(); - //NVCC only supports `-E` when it comes after preprocessor - //and common flags. - // - // nvc/nvc++ don't support no line numbers to console - // msvc requires the `-EP` flag to output no line numbers to console - // other host compilers are presumed to match `gcc` behavior - let no_line_num_flag = match self.host_compiler { - NvccHostCompiler::Nvhpc => "", - NvccHostCompiler::Msvc => "-Xcompiler=-EP", - NvccHostCompiler::Gcc => "-Xcompiler=-P", + trace!( + "[{}]: preprocessor command: {:?}", + output_file_name.to_string_lossy(), + preprocess_cmd + ); + } + preprocess_cmd }; - cmd.arg("-E") - .arg(no_line_num_flag) - .env_clear() - .envs(env_vars.to_vec()) - .current_dir(cwd); - if log_enabled!(Trace) { - trace!("preprocess: {:?}", cmd); - } - //Need to chain the dependency generation and the preprocessor - //to emulate a `proper` front end + // Chain dependency generation and the preprocessor command to emulate a `proper` front end if !parsed_args.dependency_args.is_empty() { - let first = run_input_output(dep_before_preprocessor(), None); - let second = run_input_output(cmd, None); - // TODO: If we need to chain these to emulate a frontend, shouldn't - // we explicitly wait on the first one before starting the second one? - // (rather than via which drives these concurrently) - let (_f, s) = futures::future::try_join(first, second).await?; - Ok(s) - } else { - run_input_output(cmd, None).await + run_input_output(dependencies_command(), None).await?; } + + run_input_output(preprocessor_command(), None).await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -198,16 +279,1086 @@ impl CCompilerImpl for Nvcc { cwd: &Path, env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { - gcc::generate_compile_commands( - path_transformer, + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + generate_compile_commands(parsed_args, executable, cwd, env_vars, &self.host_compiler).map( + |(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }, + ) + } +} + +pub fn generate_compile_commands( + parsed_args: &ParsedArguments, + executable: &Path, + cwd: &Path, + env_vars: &[(OsString, OsString)], + host_compiler: &NvccHostCompiler, +) -> Result<(NvccCompileCommand, Option, Cacheable)> { + let mut unhashed_args = parsed_args.unhashed_args.clone(); + + let keep_dir = { + let mut keep = false; + let mut keep_dir = None; + // Remove all occurrences of `-keep` and `-keep-dir`, but save the keep dir for copying to later + loop { + if let Some(idx) = unhashed_args + .iter() + .position(|x| x == "-keep-dir" || x == "--keep-dir") + { + let dir = PathBuf::from(unhashed_args[idx + 1].as_os_str()); + let dir = if dir.is_absolute() { + dir + } else { + cwd.join(dir) + }; + unhashed_args.splice(idx..(idx + 2), []); + keep_dir = Some(dir); + continue; + } else if let Some(idx) = unhashed_args.iter().position(|x| { + x == "-keep" || x == "--keep" || x == "-save-temps" || x == "--save-temps" + }) { + keep = true; + unhashed_args.splice(idx..(idx + 1), []); + if keep_dir.is_none() { + keep_dir = Some(cwd.to_path_buf()) + } + continue; + } + break; + } + // Match nvcc behavior where intermediate files are kept if: + // * Only `-keep` is specified (files copied to cwd) + // * Both `-keep -keep-dir=` are specified (files copied to ) + // nvcc does _not_ keep intermediate files if `-keep-dir=` is specified without `-keep` + keep.then_some(()).and(keep_dir) + }; + + let num_parallel = { + let mut num_parallel = 1; + // Remove all occurrences of `-t=` or `--threads` because it's incompatible with --dryrun + // Prefer the last occurrence of `-t=` or `--threads` to match nvcc behavior + loop { + if let Some(idx) = unhashed_args.iter().position(|x| x.starts_with("-t")) { + let arg = unhashed_args.get(idx); + if let Some(arg) = arg.and_then(|arg| arg.to_str()) { + let range = if arg.contains('=') { + 3..arg.len() + } else { + 2..arg.len() + }; + if let Ok(arg) = arg[range].parse::() { + num_parallel = arg; + } + } + unhashed_args.splice(idx..(idx + 1), []); + continue; + } + if let Some(idx) = unhashed_args.iter().position(|x| x == "--threads") { + let arg = unhashed_args.get(idx + 1); + if let Some(arg) = arg.and_then(|arg| arg.to_str()) { + if let Ok(arg) = arg.parse::() { + num_parallel = arg; + } + } + unhashed_args.splice(idx..(idx + 2), []); + continue; + } + break; + } + num_parallel + }; + + let env_vars = env_vars + .iter() + .filter(|(k, _)| k != "NVCC_PREPEND_FLAGS" && k != "NVCC_APPEND_FLAGS") + .cloned() + .collect::>(); + + let temp_dir = tempfile::Builder::new() + .prefix("sccache_nvcc") + .tempdir() + .unwrap() + .into_path(); + + let mut arguments = vec![]; + + if let Some(lang) = gcc::language_to_gcc_arg(parsed_args.language) { + arguments.extend(vec!["-x".into(), lang.into()]) + } + + let output = &parsed_args + .outputs + .get("obj") + .context("Missing object file output") + .unwrap() + .path; + + arguments.extend(vec![ + "-o".into(), + // Canonicalize the output path if the compile flag indicates we won't + // produce an object file. Since we run cicc and ptxas in a temp dir, + // but we run the host compiler in `cwd` (the dir from which sccache was + // executed), cicc/ptxas `-o` argument should point at the real out path + // that's potentially relative to `cwd`. + match parsed_args.compilation_flag.to_str() { + Some("-c") | Some("--compile") // compile to object + | Some("-dc") | Some("--device-c") // compile to object with -rdc=true + | Some("-dw") | Some("--device-w") // compile to object with -rdc=false + => output.clone().into(), + _ => { + if output.is_absolute() { + output.clone().into() + } else { + cwd.join(output).into() + } + } + }, + ]); + + arguments.extend_from_slice(&parsed_args.preprocessor_args); + arguments.extend_from_slice(&unhashed_args); + arguments.extend_from_slice(&parsed_args.common_args); + arguments.extend_from_slice(&parsed_args.arch_args); + if parsed_args.double_dash_input { + arguments.push("--".into()); + } + + // Canonicalize here so the absolute path to the input is in the + // preprocessor output instead of paths relative to `cwd`. + // + // Since cicc's input is the post-processed source run through cudafe++'s + // transforms, its cache key is sensitive to the preprocessor output. The + // preprocessor embeds the name of the input file in comments, so without + // canonicalizing here, cicc will get cache misses on otherwise identical + // input that should produce a cache hit. + arguments.push( + (if parsed_args.input.is_absolute() { + parsed_args.input.clone() + } else { + cwd.join(&parsed_args.input).canonicalize().unwrap() + }) + .into(), + ); + + let command = NvccCompileCommand { + temp_dir, + keep_dir, + num_parallel, + executable: executable.to_owned(), + arguments, + env_vars, + cwd: cwd.to_owned(), + host_compiler: host_compiler.clone(), + // Only here so we can include it in logs + output_file_name: output.file_name().unwrap().to_owned(), + }; + + Ok(( + command, + None, + // Never assume the outer `nvcc` call is cacheable. We must decompose the nvcc call into + // its constituent subcommands with `--dryrun` and only cache the final build product. + // + // Always decomposing `nvcc --dryrun` is the only way to ensure caching nvcc invocations + // is fully sound, because the `nvcc -E` preprocessor output is not sufficient to detect + // all source code changes. + // + // Specifically, `nvcc -E` always defines __CUDA_ARCH__, which means changes to host-only + // code guarded by an `#ifndef __CUDA_ARCH__` will _not_ be captured in `nvcc -E` output. + Cacheable::No, + )) +} + +#[derive(Clone, Debug)] +pub struct NvccCompileCommand { + pub temp_dir: PathBuf, + pub keep_dir: Option, + pub num_parallel: usize, + pub executable: PathBuf, + pub arguments: Vec, + pub env_vars: Vec<(OsString, OsString)>, + pub cwd: PathBuf, + pub host_compiler: NvccHostCompiler, + pub output_file_name: OsString, +} + +#[async_trait] +impl CompileCommandImpl for NvccCompileCommand { + fn get_executable(&self) -> PathBuf { + self.executable.clone() + } + fn get_arguments(&self) -> Vec { + self.arguments.clone() + } + fn get_env_vars(&self) -> Vec<(OsString, OsString)> { + self.env_vars.clone() + } + fn get_cwd(&self) -> PathBuf { + self.cwd.clone() + } + + async fn execute( + &self, + service: &server::SccacheService, + creator: &T, + ) -> Result + where + T: CommandCreatorSync, + { + let NvccCompileCommand { + temp_dir, + keep_dir, + num_parallel, + executable, + arguments, + env_vars, + cwd, + host_compiler, + output_file_name, + } = self; + + let nvcc_subcommand_groups = group_nvcc_subcommands_by_compilation_stage( + creator, executable, - parsed_args, + arguments, cwd, + temp_dir.as_path(), + keep_dir.clone(), env_vars, - self.kind(), - rewrite_includes_only, + host_compiler, + output_file_name, ) + .await?; + + let maybe_keep_temps_then_clean = || { + // If the caller passed `-keep` or `-keep-dir`, copy the + // temp files to the requested location. We do this because we + // override `-keep` and `-keep-dir` in our `nvcc --dryrun` call. + let maybe_keep_temps = keep_dir.as_ref().and_then(|dst| { + fs::create_dir_all(dst) + .and_then(|_| fs::read_dir(temp_dir)) + .and_then(|files| { + files + .filter_map(|path| path.ok()) + .filter_map(|path| { + path.file_name() + .to_str() + .map(|file| (path.path(), file.to_owned())) + }) + .try_fold((), |res, (path, file)| fs::rename(path, dst.join(file))) + }) + .ok() + }); + + maybe_keep_temps + .map_or_else( + || fs::remove_dir_all(temp_dir).ok(), + |_| fs::remove_dir_all(temp_dir).ok(), + ) + .unwrap_or(()); + }; + + let mut output = process::Output { + status: process::ExitStatus::default(), + stdout: vec![], + stderr: vec![], + }; + + let n = nvcc_subcommand_groups.len(); + let cuda_front_end_range = if n > 0 { 0..1 } else { 0..0 }; + let final_assembly_range = if n > 1 { n - 1..n } else { 0..0 }; + let device_compile_range = if n > 2 { 1..n - 1 } else { 0..0 }; + + let num_parallel = device_compile_range.len().min(*num_parallel).max(1); + + for command_group_chunks in [ + nvcc_subcommand_groups[cuda_front_end_range].chunks(1), + // compile multiple device architectures in parallel when `nvcc -t=N` is specified + nvcc_subcommand_groups[device_compile_range].chunks(num_parallel), + nvcc_subcommand_groups[final_assembly_range].chunks(1), + ] { + for command_groups in command_group_chunks { + let results = futures::future::join_all(command_groups.iter().map(|commands| { + run_nvcc_subcommands_group(service, creator, cwd, commands, output_file_name) + })) + .await; + + for result in results { + output = aggregate_output(output, result.unwrap_or_else(error_to_output)); + } + + if !output.status.success() { + output.stdout.shrink_to_fit(); + output.stderr.shrink_to_fit(); + maybe_keep_temps_then_clean(); + return Err(ProcessError(output).into()); + } + } + } + + output.stdout.shrink_to_fit(); + output.stderr.shrink_to_fit(); + maybe_keep_temps_then_clean(); + Ok(output) + } +} + +#[derive(Clone, Debug)] +pub struct NvccGeneratedSubcommand { + pub exe: PathBuf, + pub args: Vec, + pub cwd: PathBuf, + pub env_vars: Vec<(OsString, OsString)>, + pub cacheable: Cacheable, +} + +#[allow(clippy::too_many_arguments)] +async fn group_nvcc_subcommands_by_compilation_stage( + creator: &T, + executable: &Path, + arguments: &[OsString], + cwd: &Path, + tmp: &Path, + keep_dir: Option, + env_vars: &[(OsString, OsString)], + host_compiler: &NvccHostCompiler, + output_file_name: &OsStr, +) -> Result>> +where + T: CommandCreatorSync, +{ + // Run `nvcc --dryrun` twice to ensure the commands are correct + // relative to the directory where they're run. + // + // All the "nvcc" commands (cudafe++, cicc, ptxas, nvlink, fatbinary) + // are run in the temp dir, so their arguments should be relative to + // the temp dir, e.g. `cudafe++ [...] "x.cpp4.ii"` + // + // All the host compiler invocations are run in the original `cwd` where + // sccache was invoked. Arguments will be relative to the cwd, except + // any arguments that reference nvcc-generated files should be absolute + // to the temp dir, e.g. `gcc -E [...] x.cu -o /tmp/dir/x.cpp4.ii` + + // Roughly equivalent to: + // ```shell + // cat <(nvcc --dryrun --keep \ + // | nl -n ln -s ' ' -w 1 \ + // | grep -P "^[0-9]+ (cicc|ptxas|cudafe|nvlink|fatbinary)") \ + // \ + // <(nvcc --dryrun --keep --keep-dir /tmp/dir \ + // | nl -n ln -s ' ' -w 1 \ + // | grep -P -v "^[0-9]+ (cicc|ptxas|cudafe|nvlink|fatbinary)") \ + // \ + // | sort -k 1n + // ``` + + let mut env_vars_1 = env_vars.to_vec(); + let mut env_vars_2 = env_vars.to_vec(); + + let is_nvcc_exe = + |exe: &str| matches!(exe, "cicc" | "ptxas" | "cudafe++" | "nvlink" | "fatbinary"); + + let (nvcc_commands, host_commands) = futures::future::try_join( + // Get the nvcc compile command lines with paths relative to `tmp` + select_nvcc_subcommands( + creator, + executable, + cwd, + &mut env_vars_1, + keep_dir.is_none(), + arguments, + is_nvcc_exe, + host_compiler, + output_file_name, + ), + // Get the host compile command lines with paths relative to `cwd` and absolute paths to `tmp` + select_nvcc_subcommands( + creator, + executable, + cwd, + &mut env_vars_2, + keep_dir.is_none(), + &[arguments, &["--keep-dir".into(), tmp.into()][..]].concat(), + |exe| !is_nvcc_exe(exe), + host_compiler, + output_file_name, + ), + ) + .await?; + + drop(env_vars_2); + let env_vars = env_vars_1; + + // Now zip the two lists of commands again by sorting on original line index. + // Transform to tuples that include the dir in which each command should run. + let all_commands = nvcc_commands + .iter() + // Run cudafe++, nvlink, cicc, ptxas, and fatbinary in `tmp` + .map(|(idx, exe, args)| (idx, tmp, exe, args)) + .chain( + host_commands + .iter() + // Run host preprocessing and compilation steps in `cwd` + .map(|(idx, exe, args)| (idx, cwd, exe, args)), + ) + .sorted_by(|a, b| Ord::cmp(&a.0, &b.0)); + + // Create groups of commands that should be run sequential relative to each other, + // but can optionally be run in parallel to other groups if the user requested via + // `nvcc --threads`. + + let preprocessor_flag = match host_compiler { + NvccHostCompiler::Msvc => "-P", + _ => "-E", + } + .to_owned(); + + let gen_module_id_file_flag = "--gen_module_id_file".to_owned(); + let mut cuda_front_end_group = Vec::::new(); + let mut final_assembly_group = Vec::::new(); + let mut device_compile_groups = HashMap::>::new(); + + for (_, dir, exe, args) in all_commands { + let mut args = args.clone(); + + if let (env_vars, cacheable, Some(group)) = match exe.file_stem().and_then(|s| s.to_str()) { + // fatbinary and nvlink are not cacheable + Some("fatbinary") | Some("nvlink") => ( + env_vars.clone(), + Cacheable::No, + Some(&mut final_assembly_group), + ), + // cicc and ptxas are cacheable + Some("cicc") => { + let group = device_compile_groups.get_mut(&args[args.len() - 3]); + (env_vars.clone(), Cacheable::Yes, group) + } + Some("ptxas") => { + let group = device_compile_groups.values_mut().find(|cmds| { + if let Some(cicc) = cmds.last() { + if let Some(cicc_out) = cicc.args.last() { + return cicc_out == &args[args.len() - 3]; + } + } + false + }); + (env_vars.clone(), Cacheable::Yes, group) + } + // cudafe++ is not cacheable + Some("cudafe++") => { + // Fix for CTK < 12.0: + // Add `--gen_module_id_file` if the cudafe++ args include `--module_id_file_name` + if !args.contains(&gen_module_id_file_flag) { + if let Some(idx) = args.iter().position(|x| x == "--module_id_file_name") { + // Insert `--gen_module_id_file` just before `--module_id_file_name` to match nvcc behavior + args.splice(idx..idx, [gen_module_id_file_flag.clone()]); + } + } + ( + env_vars.clone(), + Cacheable::No, + Some(&mut cuda_front_end_group), + ) + } + _ => { + // All generated host compiler commands include one of these defines. + // If one of these isn't present, this command is either a new binary + // in the CTK that we don't know about, or a line like `rm x_dlink.reg.c` + // that nvcc generates in certain cases. + if !args.iter().any(|arg| { + arg.starts_with("-D__CUDACC__") + || arg.starts_with("-D__NVCC__") + || arg.starts_with("-D__CUDA_ARCH__") + || arg.starts_with("-D__CUDA_ARCH_LIST__") + }) { + continue; + } + if args.contains(&preprocessor_flag) { + // Each preprocessor step represents the start of a new command group + if let Some(out_file) = if cfg!(target_os = "windows") { + args.iter() + .find(|x| x.starts_with("-Fi")) + .and_then(|x| x.strip_prefix("-Fi")) + } else { + args.iter() + .position(|x| x == "-o") + .and_then(|i| args.get(i + 1).map(|o| o.as_str())) + } + .map(PathBuf::from) + .and_then(|out_path| { + out_path + .file_name() + .and_then(|out_name| out_name.to_str()) + .map(|out_name| out_name.to_owned()) + }) + .and_then(|out_name| { + // If the output file ends with... + // * .cpp1.ii - cicc/ptxas input + // * .cpp4.ii - cudafe++ input + if out_name.ends_with(".cpp1.ii") { + Some(out_name.to_owned()) + } else { + None + } + }) { + let new_device_compile_group = vec![]; + device_compile_groups.insert(out_file.clone(), new_device_compile_group); + ( + env_vars.clone(), + Cacheable::No, + device_compile_groups.get_mut(&out_file), + ) + } else { + ( + env_vars.clone(), + Cacheable::No, + Some(&mut cuda_front_end_group), + ) + } + } else { + // Cache the host compiler calls, since we've marked the outer `nvcc` call + // as non-cacheable. This ensures `sccache nvcc ...` _always_ decomposes the + // nvcc call into its constituent subcommands with `--dryrun`, but only caches + // the final build product once. + // + // Always decomposing `nvcc --dryrun` is the only way to ensure caching nvcc invocations + // is fully sound, because the `nvcc -E` preprocessor output is not sufficient to detect + // all source code changes. + // + // Specifically, `nvcc -E` always defines __CUDA_ARCH__, which means changes to host-only + // code guarded by an `#ifndef __CUDA_ARCH__` will _not_ be captured in `nvcc -E` output. + ( + env_vars + .iter() + .chain( + [ + // HACK: This compilation will look like a C/C++ compilation, + // but we want to report it in the stats as a CUDA compilation. + // The SccacheService API doesn't have a great way to specify this + // case, so we set a special envvar here that it can read when the + // compilation is finished. + ("__SCCACHE_THIS_IS_A_CUDA_COMPILATION__".into(), "".into()), + ] + .iter(), + ) + .cloned() + .collect::>(), + Cacheable::Yes, + Some(&mut final_assembly_group), + ) + } + } + } { + if log_enabled!(log::Level::Trace) { + trace!( + "[{}]: transformed nvcc command: \"{}\"", + output_file_name.to_string_lossy(), + [ + &[format!("cd {} &&", dir.to_string_lossy()).to_string()], + &[exe.to_str().unwrap_or_default().to_string()][..], + &args[..] + ] + .concat() + .join(" ") + ); + } + + group.push(NvccGeneratedSubcommand { + exe: exe.clone(), + args: args.clone(), + cwd: dir.into(), + env_vars, + cacheable, + }); + } + } + + let mut command_groups = vec![]; + + command_groups.push(cuda_front_end_group); + command_groups.extend(device_compile_groups.into_values()); + command_groups.push(final_assembly_group); + + Ok(command_groups) +} + +#[allow(clippy::too_many_arguments)] +async fn select_nvcc_subcommands( + creator: &T, + executable: &Path, + cwd: &Path, + env_vars: &mut Vec<(OsString, OsString)>, + remap_filenames: bool, + arguments: &[OsString], + select_subcommand: F, + host_compiler: &NvccHostCompiler, + output_file_name: &OsStr, +) -> Result)>> +where + F: Fn(&str) -> bool, + T: CommandCreatorSync, +{ + if log_enabled!(log::Level::Trace) { + trace!( + "[{}]: nvcc dryrun command: {:?}", + output_file_name.to_string_lossy(), + [ + &[executable.to_str().unwrap_or_default().to_string()][..], + &dist::osstrings_to_strings(arguments).unwrap_or_default()[..], + &["--dryrun".into(), "--keep".into()][..] + ] + .concat() + .join(" ") + ); + } + + let mut nvcc_dryrun_cmd = creator.clone().new_command_sync(executable); + + nvcc_dryrun_cmd + .args(&[arguments, &["--dryrun".into(), "--keep".into()][..]].concat()) + .env_clear() + .current_dir(cwd) + .envs(env_vars.to_vec()); + + let nvcc_dryrun_output = run_input_output(nvcc_dryrun_cmd, None).await?; + + let mut ext_counts = HashMap::::new(); + let mut old_to_new = HashMap::::new(); + let is_valid_line_re = Regex::new(r"^#\$ (.*)$").unwrap(); + let is_envvar_line_re = Regex::new(r"^([_A-Z]+)=(.*)$").unwrap(); + + let mut dryrun_env_vars = Vec::<(OsString, OsString)>::new(); + let mut dryrun_env_vars_re_map = HashMap::::new(); + + let mut lines = Vec::<(usize, PathBuf, Vec)>::new(); + + #[cfg(unix)] + let reader = std::io::BufReader::new(&nvcc_dryrun_output.stderr[..]); + #[cfg(windows)] + let reader = std::io::BufReader::new(&nvcc_dryrun_output.stdout[..]); + + for pair in reader.lines().enumerate() { + let (idx, line) = pair; + // Select lines that match the `#$ ` prefix from nvcc --dryrun + let line = match select_valid_dryrun_lines(&is_valid_line_re, &line?) { + Ok(line) => line, + // Ignore lines that don't start with `#$ `. For some reason, nvcc + // on Windows prints the name of the input file without the prefix + Err(err) => continue, + }; + + let maybe_exe_and_args = fold_env_vars_or_split_into_exe_and_args( + &is_envvar_line_re, + &mut dryrun_env_vars, + &mut dryrun_env_vars_re_map, + cwd, + &line, + host_compiler, + )?; + + let (exe, mut args) = match maybe_exe_and_args { + Some(exe_and_args) => exe_and_args, + _ => continue, + }; + + // Remap nvcc's generated file names to deterministic names + if remap_filenames { + args = remap_generated_filenames(&args, &mut old_to_new, &mut ext_counts); + } + + match exe.file_stem().and_then(|s| s.to_str()) { + None => continue, + Some(exe_name) => { + if select_subcommand(exe_name) { + lines.push((idx, exe, args)); + } + } + } + } + + for pair in dryrun_env_vars { + env_vars.splice( + if let Some(idx) = env_vars.iter().position(|(k, _)| *k == pair.0) { + idx..idx + 1 + } else { + env_vars.len()..env_vars.len() + }, + [pair], + ); + } + + Ok(lines) +} + +fn select_valid_dryrun_lines(re: &Regex, line: &str) -> Result { + match re.captures(line) { + Some(caps) => { + let (_, [rest]) = caps.extract(); + Ok(rest.to_string()) + } + _ => Err(anyhow!("nvcc error: {:?}", line)), + } +} + +fn fold_env_vars_or_split_into_exe_and_args( + re: &Regex, + env_vars: &mut Vec<(OsString, OsString)>, + env_var_re_map: &mut HashMap, + cwd: &Path, + line: &str, + host_compiler: &NvccHostCompiler, +) -> Result)>> { + fn envvar_in_shell_format(var: &str) -> String { + if cfg!(target_os = "windows") { + format!("%{}%", var) // %CICC_PATH% + } else { + format!("${}", var) // $CICC_PATH + } + } + + fn envvar_in_shell_format_re(var: &str) -> Regex { + Regex::new( + &(if cfg!(target_os = "windows") { + regex::escape(&envvar_in_shell_format(var)) + } else { + regex::escape(&envvar_in_shell_format(var)) + r"[^\w]" + }), + ) + .unwrap() + } + + // Intercept the environment variable lines and add them to the env_vars list + if let Some(var) = re.captures(line) { + let (_, [var, val]) = var.extract(); + + env_var_re_map + .entry(var.to_owned()) + .or_insert_with_key(|var| envvar_in_shell_format_re(var)); + + env_vars.push((var.into(), val.into())); + + return Ok(None); + } + + // The rest of the lines are subcommands, so parse into a vec of [cmd, args..] + + let mut line = if cfg!(target_os = "windows") { + let line = line + .replace("\"\"", "\"") + .replace(r"\\?\", "") + .replace('\\', "/") + .replace(r"//?/", ""); + match host_compiler { + NvccHostCompiler::Msvc => line.replace(" -E ", " -P ").replace(" > ", " -Fi"), + _ => line, + } + } else { + line.to_owned() + }; + + // Expand envvars in nvcc subcommands, i.e. "$CICC_PATH/cicc ..." or "%CICC_PATH%/cicc" + if let Some(env_vars) = dist::osstring_tuples_to_strings(env_vars) { + for (var, val) in env_vars { + if let Some(re) = env_var_re_map.get(&var) { + if re.is_match(&line) { + line = line.replace(&envvar_in_shell_format(&var), &val); + } + } + } + } + + let args = match shlex::split(&line) { + Some(args) => args, + None => return Err(anyhow!("Could not parse shell line")), + }; + + let (exe, args) = match args.split_first() { + Some(exe_and_args) => exe_and_args, + None => return Err(anyhow!("Could not split shell line")), + }; + + let env_path = env_vars + .iter() + .find(|(k, _)| k == "PATH") + .map(|(_, p)| p.to_owned()) + .unwrap(); + + let exe = which_in(exe, env_path.into(), cwd)?; + + Ok(Some((exe.clone(), args.to_vec()))) +} + +fn remap_generated_filenames( + args: &[String], + old_to_new: &mut HashMap, + ext_counts: &mut HashMap, +) -> Vec { + args.iter() + .map(|arg| { + // Special case for MSVC's preprocess output file name flag + let arg_is_msvc_preprocessor_output = arg.starts_with("-Fi"); + + let arg = if arg_is_msvc_preprocessor_output { + arg.trim_start_matches("-Fi").to_owned() + } else { + arg.to_owned() + }; + + // If the argument doesn't start with `-` and is a file that + // ends in one of the below extensions, rename the file to an + // auto-incrementing stable name + let maybe_extension = (!arg.starts_with('-')) + .then(|| { + [ + ".cpp1.ii", + ".cpp4.ii", + ".cudafe1.c", + ".cudafe1.cpp", + ".cudafe1.stub.c", + ] + .iter() + .find(|ext| arg.ends_with(*ext)) + .copied() + }) + .unwrap_or(None); + + // If the argument is a file that ends in one of the above extensions: + // * If it's our first time seeing this file, create a unique name for it + // * If we've seen this file before, lookup its unique name in the hash map + // + // This ensures stable names are in cudafe++ output and #include directives, + // eliminating one source of false-positive cache misses. + let arg = match maybe_extension { + Some(extension) => { + old_to_new + .entry(arg) + .or_insert_with_key(|arg| { + // Initialize or update the number of files with a given extension: + // compute_70.cudafe1.stub.c -> x_0.cudafe1.stub.c + // compute_60.cudafe1.stub.c -> x_1.cudafe1.stub.c + // etc. + let count = ext_counts + .entry(extension.into()) + .and_modify(|c| *c += 1) + .or_insert(0) + .to_string(); + // Return `/tmp/dir/x_{count}.{ext}` as the new name, i.e. `/tmp/dir/x_0.cudafe1.stub.c` + PathBuf::from(arg) + .parent() + .unwrap_or(Path::new("")) + // Don't use the count as the first character of the file name, because the file name + // may be used as an identifier (via the __FILE__ macro) and identifiers with leading + // digits are not valid in C/C++, i.e. `x_0.cudafe1.cpp` instead of `0.cudafe1.cpp`. + .join("x_".to_owned() + &count + extension) + .to_string_lossy() + .to_string() + }) + .to_owned() + } + None => { + // If the argument isn't a file name with one of our extensions, + // it may _reference_ files we've renamed. Go through and replace + // all old names with their new stable names. + // + // Sort by string length descending so we don't accidentally replace + // `zzz.cudafe1.cpp` with the new name for `zzz.cudafe1.c`. + // + // For example, if we have these renames: + // + // compute_70.cudafe1.cpp -> x_0.cudafe1.cpp + // compute_70.cudafe1.c -> x_2.cudafe1.c + // + // `compute_70.cudafe1.cpp` should be replaced with `x_0.cudafe1.cpp`, not `x_2.cudafe1.c` + // + let mut arg = arg.clone(); + for (old, new) in old_to_new + .iter() + .sorted_by(|a, b| b.0.len().cmp(&a.0.len())) + { + arg = arg.replace(old, new); + } + arg + } + }; + + if arg_is_msvc_preprocessor_output { + format!("-Fi{}", arg) + } else { + arg + } + }) + .collect::>() +} + +async fn run_nvcc_subcommands_group( + service: &server::SccacheService, + creator: &T, + cwd: &Path, + commands: &[NvccGeneratedSubcommand], + output_file_name: &OsStr, +) -> Result +where + T: CommandCreatorSync, +{ + let mut output = process::Output { + status: process::ExitStatus::default(), + stdout: vec![], + stderr: vec![], + }; + + for cmd in commands { + let NvccGeneratedSubcommand { + exe, + args, + cwd, + env_vars, + cacheable, + } = cmd; + + if log_enabled!(log::Level::Trace) { + trace!( + "[{}]: run_commands_sequential cwd={:?}, cmd=\"{}\"", + output_file_name.to_string_lossy(), + cwd, + [ + vec![exe.clone().into_os_string().into_string().unwrap()], + args.to_vec() + ] + .concat() + .join(" ") + ); + } + + let out = match cacheable { + Cacheable::No => { + let mut cmd = creator.clone().new_command_sync(exe); + + cmd.args(args) + .current_dir(cwd) + .env_clear() + .envs(env_vars.to_vec()); + + run_input_output(cmd, None) + .await + .unwrap_or_else(error_to_output) + } + Cacheable::Yes => { + let srvc = service.clone(); + let args = dist::strings_to_osstrings(args); + + match srvc + .compiler_info(exe.clone(), cwd.to_owned(), &args, env_vars) + .await + { + Err(err) => error_to_output(err), + Ok(compiler) => match compiler.parse_arguments(&args, cwd, env_vars) { + CompilerArguments::NotCompilation => Err(anyhow!("Not compilation")), + CompilerArguments::CannotCache(why, extra_info) => Err(extra_info + .map_or_else( + || anyhow!("Cannot cache({}): {:?} {:?}", why, exe, args), + |desc| { + anyhow!("Cannot cache({}, {}): {:?} {:?}", why, desc, exe, args) + }, + )), + CompilerArguments::Ok(hasher) => { + srvc.start_compile_task( + compiler, + hasher, + args, + cwd.to_owned(), + env_vars + .iter() + .chain([("SCCACHE_DIRECT".into(), "false".into())].iter()) + .cloned() + .collect::>(), + ) + .await + } + } + .map_or_else(error_to_output, |res| compile_result_to_output(exe, res)), + } + } + }; + + output = aggregate_output(output, out); + + if !output.status.success() { + break; + } + } + + Ok(output) +} + +fn aggregate_output(lhs: process::Output, rhs: process::Output) -> process::Output { + process::Output { + status: exit_status( + std::cmp::max(status_to_code(lhs.status), status_to_code(rhs.status)) + as ExitStatusValue, + ), + stdout: [lhs.stdout, rhs.stdout].concat(), + stderr: [lhs.stderr, rhs.stderr].concat(), + } +} + +fn error_to_output(err: Error) -> process::Output { + match err.downcast::() { + Ok(ProcessError(out)) => out, + Err(err) => process::Output { + status: exit_status(1 as ExitStatusValue), + stdout: vec![], + stderr: err.to_string().into_bytes(), + }, + } +} + +fn compile_result_to_output(exe: &Path, res: protocol::CompileFinished) -> process::Output { + if let Some(signal) = res.signal { + return process::Output { + status: exit_status(signal as ExitStatusValue), + stdout: res.stdout, + stderr: [ + format!( + "{} terminated (signal: {})", + exe.file_stem().unwrap().to_string_lossy(), + signal + ) + .as_bytes(), + &res.stderr, + ] + .concat(), + }; + } + process::Output { + status: exit_status(res.retcode.unwrap_or(0) as ExitStatusValue), + stdout: res.stdout, + stderr: res.stderr, + } +} + +#[cfg(unix)] +fn status_to_code(res: process::ExitStatus) -> ExitStatusValue { + if res.success() { + 0 as ExitStatusValue + } else { + res.signal().or(res.code()).unwrap_or(1) as ExitStatusValue + } +} + +#[cfg(windows)] +fn status_to_code(res: process::ExitStatus) -> ExitStatusValue { + if res.success() { + 0 as ExitStatusValue + } else { + res.code().unwrap_or(1) as ExitStatusValue } } @@ -219,14 +1370,21 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ take_arg!("--compiler-bindir", OsString, CanBeSeparated('='), PassThrough), take_arg!("--compiler-options", OsString, CanBeSeparated('='), PreprocessorArgument), flag!("--cubin", DoCompilation), + take_arg!("--default-stream", OsString, CanBeSeparated('='), PassThrough), + flag!("--device-c", DoCompilation), + flag!("--device-w", DoCompilation), flag!("--expt-extended-lambda", PreprocessorArgumentFlag), flag!("--expt-relaxed-constexpr", PreprocessorArgumentFlag), flag!("--extended-lambda", PreprocessorArgumentFlag), flag!("--fatbin", DoCompilation), take_arg!("--generate-code", OsString, CanBeSeparated('='), PassThrough), + flag!("--generate-dependencies-with-compile", NeedDepTarget), + flag!("--generate-nonsystem-dependencies-with-compile", NeedDepTarget), take_arg!("--gpu-architecture", OsString, CanBeSeparated('='), PassThrough), take_arg!("--gpu-code", OsString, CanBeSeparated('='), PassThrough), take_arg!("--include-path", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), + flag!("--keep", UnhashedFlag), + take_arg!("--keep-dir", OsString, CanBeSeparated('='), Unhashed), take_arg!("--linker-options", OsString, CanBeSeparated('='), PassThrough), take_arg!("--maxrregcount", OsString, CanBeSeparated('='), PassThrough), flag!("--no-host-device-initializer-list", PreprocessorArgumentFlag), @@ -236,8 +1394,10 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ flag!("--ptx", DoCompilation), take_arg!("--ptxas-options", OsString, CanBeSeparated('='), PassThrough), take_arg!("--relocatable-device-code", OsString, CanBeSeparated('='), PreprocessorArgument), + flag!("--save-temps", UnhashedFlag), take_arg!("--system-include", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), take_arg!("--threads", OsString, CanBeSeparated('='), Unhashed), + take_arg!("--x", OsString, CanBeSeparated('='), Language), take_arg!("-Werror", OsString, CanBeSeparated('='), PreprocessorArgument), take_arg!("-Xarchive", OsString, CanBeSeparated('='), PassThrough), @@ -250,18 +1410,24 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ take_arg!("-code", OsString, CanBeSeparated('='), PassThrough), flag!("-cubin", DoCompilation), flag!("-dc", DoCompilation), + take_arg!("-default-stream", OsString, CanBeSeparated('='), PassThrough), + flag!("-dw", DoCompilation), flag!("-expt-extended-lambda", PreprocessorArgumentFlag), flag!("-expt-relaxed-constexpr", PreprocessorArgumentFlag), flag!("-extended-lambda", PreprocessorArgumentFlag), flag!("-fatbin", DoCompilation), take_arg!("-gencode", OsString, CanBeSeparated('='), PassThrough), take_arg!("-isystem", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), + flag!("-keep", UnhashedFlag), + take_arg!("-keep-dir", OsString, CanBeSeparated('='), Unhashed), take_arg!("-maxrregcount", OsString, CanBeSeparated('='), PassThrough), flag!("-nohdinitlist", PreprocessorArgumentFlag), flag!("-optix-ir", DoCompilation), flag!("-ptx", DoCompilation), take_arg!("-rdc", OsString, CanBeSeparated('='), PreprocessorArgument), - take_arg!("-t", OsString, CanBeSeparated('='), Unhashed), + flag!("-save-temps", UnhashedFlag), + take_arg!("-t", OsString, CanBeSeparated, Unhashed), + take_arg!("-t=", OsString, Concatenated, Unhashed), take_arg!("-x", OsString, CanBeSeparated('='), Language), ]); @@ -279,25 +1445,28 @@ mod test { let arguments = arguments.iter().map(OsString::from).collect::>(); Nvcc { host_compiler: NvccHostCompiler::Gcc, + host_compiler_version: None, version: None, } - .parse_arguments(&arguments, ".".as_ref()) + .parse_arguments(&arguments, ".".as_ref(), &[]) } fn parse_arguments_msvc(arguments: Vec) -> CompilerArguments { let arguments = arguments.iter().map(OsString::from).collect::>(); Nvcc { host_compiler: NvccHostCompiler::Msvc, + host_compiler_version: None, version: None, } - .parse_arguments(&arguments, ".".as_ref()) + .parse_arguments(&arguments, ".".as_ref(), &[]) } fn parse_arguments_nvc(arguments: Vec) -> CompilerArguments { let arguments = arguments.iter().map(OsString::from).collect::>(); Nvcc { host_compiler: NvccHostCompiler::Nvhpc, + host_compiler_version: None, version: None, } - .parse_arguments(&arguments, ".".as_ref()) + .parse_arguments(&arguments, ".".as_ref(), &[]) } macro_rules! parses { @@ -341,7 +1510,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert!(a.common_args.is_empty()); + assert_eq!(ovec!["-c"], a.common_args); } #[test] @@ -360,7 +1529,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert!(a.common_args.is_empty()); + assert_eq!(ovec!["-c"], a.common_args); } #[test] @@ -379,7 +1548,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert!(a.common_args.is_empty()); + assert_eq!(ovec!["-c"], a.common_args); } fn test_parse_arguments_simple_cu_msvc() { @@ -397,7 +1566,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert!(a.common_args.is_empty()); + assert_eq!(ovec!["-c"], a.common_args); } #[test] @@ -416,7 +1585,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert_eq!(ovec!["-ccbin", "gcc"], a.common_args); + assert_eq!(ovec!["-ccbin", "gcc", "-c"], a.common_args); } #[test] @@ -435,15 +1604,16 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert_eq!(ovec!["-ccbin", "/usr/bin/"], a.common_args); + assert_eq!(ovec!["-ccbin", "/usr/bin/", "-c"], a.common_args); } #[test] fn test_parse_threads_argument_simple_cu() { let a = parses!( - "-t=1", + "-t1", + "-t=2", "-t", - "2", + "3", "--threads=1", "--threads=2", "-c", @@ -465,7 +1635,7 @@ mod test { ); assert!(a.preprocessor_args.is_empty()); assert_eq!( - ovec!["-t=1", "-t=2", "--threads", "1", "--threads", "2"], + ovec!["-t1", "-t=2", "-t3", "--threads", "1", "--threads", "2"], a.unhashed_args ); } @@ -486,7 +1656,7 @@ mod test { ) ); assert!(a.preprocessor_args.is_empty()); - assert!(a.common_args.is_empty()); + assert_eq!(ovec!["-c"], a.common_args); } #[test] @@ -593,7 +1763,7 @@ mod test { a.preprocessor_args ); assert!(a.dependency_args.is_empty()); - assert_eq!(ovec!["-fabc"], a.common_args); + assert_eq!(ovec!["-fabc", "-c"], a.common_args); } #[test] @@ -613,13 +1783,20 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( ovec!["-MD", "-MF", "foo.o.d", "-MT", "foo.o"], a.dependency_args ); - assert_eq!(ovec!["-fabc"], a.common_args); + assert_eq!(ovec!["-fabc", "-c"], a.common_args); } #[test] @@ -647,7 +1824,7 @@ mod test { ); assert!(a.preprocessor_args.is_empty()); assert_eq!( - ovec!["--generate-code", "arch=compute_61,code=sm_61"], + ovec!["--generate-code", "arch=compute_61,code=sm_61", "-c"], a.common_args ); } @@ -699,7 +1876,8 @@ mod test { "-Xnvlink", "--suppress-stack-size-warning", "-Xcudafe", - "--display_error_number" + "--display_error_number", + "-c" ], a.common_args ); @@ -736,7 +1914,7 @@ mod test { a.preprocessor_args ); assert_eq!( - ovec!["-forward-unknown-to-host-compiler", "-std=c++14"], + ovec!["-forward-unknown-to-host-compiler", "-std=c++14", "-c"], a.common_args ); } diff --git a/src/compiler/nvhpc.rs b/src/compiler/nvhpc.rs index a1fad1878..507e5d1a3 100644 --- a/src/compiler/nvhpc.rs +++ b/src/compiler/nvhpc.rs @@ -19,7 +19,7 @@ use crate::compiler::args::*; use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; use crate::compiler::gcc::ArgData::*; use crate::compiler::{ - gcc, write_temp_file, Cacheable, CompileCommand, CompilerArguments, Language, + gcc, write_temp_file, CCompileCommand, Cacheable, CompileCommand, CompilerArguments, Language, }; use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; use crate::util::{run_input_output, OsStrExt}; @@ -59,6 +59,7 @@ impl CCompilerImpl for Nvhpc { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { gcc::parse_arguments( arguments, @@ -156,7 +157,7 @@ impl CCompilerImpl for Nvhpc { } } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -164,7 +165,14 @@ impl CCompilerImpl for Nvhpc { cwd: &Path, env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { gcc::generate_compile_commands( path_transformer, executable, @@ -173,7 +181,11 @@ impl CCompilerImpl for Nvhpc { env_vars, self.kind(), rewrite_includes_only, + gcc::language_to_gcc_arg, ) + .map(|(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }) } } @@ -227,7 +239,7 @@ mod test { nvcplusplus: false, version: None, } - .parse_arguments(&arguments, ".".as_ref()) + .parse_arguments(&arguments, ".".as_ref(), &[]) } macro_rules! parses { @@ -340,6 +352,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( diff --git a/src/compiler/ptxas.rs b/src/compiler/ptxas.rs new file mode 100644 index 000000000..1f46f3c10 --- /dev/null +++ b/src/compiler/ptxas.rs @@ -0,0 +1,110 @@ +// Copyright 2016 Mozilla Foundation +// SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![allow(unused_imports, dead_code, unused_variables)] + +use crate::compiler::args::*; +use crate::compiler::c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}; +use crate::compiler::cicc; +use crate::compiler::{ + CCompileCommand, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + SingleCompileCommand, +}; +use crate::{counted_array, dist}; + +use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; + +use async_trait::async_trait; + +use std::collections::HashMap; +use std::ffi::OsString; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process; + +use crate::errors::*; + +/// A unit struct on which to implement `CCompilerImpl`. +#[derive(Clone, Debug)] +pub struct Ptxas { + pub version: Option, +} + +#[async_trait] +impl CCompilerImpl for Ptxas { + fn kind(&self) -> CCompilerKind { + CCompilerKind::Ptxas + } + fn plusplus(&self) -> bool { + true + } + fn version(&self) -> Option { + self.version.clone() + } + fn parse_arguments( + &self, + arguments: &[OsString], + cwd: &Path, + _env_vars: &[(OsString, OsString)], + ) -> CompilerArguments { + cicc::parse_arguments(arguments, cwd, Language::Cubin, &ARGS[..]) + } + #[allow(clippy::too_many_arguments)] + async fn preprocess( + &self, + _creator: &T, + _executable: &Path, + parsed_args: &ParsedArguments, + cwd: &Path, + _env_vars: &[(OsString, OsString)], + _may_dist: bool, + _rewrite_includes_only: bool, + _preprocessor_cache_mode: bool, + ) -> Result + where + T: CommandCreatorSync, + { + cicc::preprocess(cwd, parsed_args).await + } + fn generate_compile_commands( + &self, + path_transformer: &mut dist::PathTransformer, + executable: &Path, + parsed_args: &ParsedArguments, + cwd: &Path, + env_vars: &[(OsString, OsString)], + _rewrite_includes_only: bool, + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + cicc::generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars) + .map(|(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }) + } +} + +use cicc::ArgData::*; + +counted_array!(pub static ARGS: [ArgInfo; _] = [ + take_arg!("-arch", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-m", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-o", PathBuf, Separated, Output), +]); diff --git a/src/compiler/rust.rs b/src/compiler/rust.rs index c72fdc51b..771896ff1 100644 --- a/src/compiler/rust.rs +++ b/src/compiler/rust.rs @@ -15,8 +15,9 @@ use crate::cache::{FileObjectSource, Storage}; use crate::compiler::args::*; use crate::compiler::{ - c::ArtifactDescriptor, Cacheable, ColorMode, Compilation, CompileCommand, Compiler, - CompilerArguments, CompilerHasher, CompilerKind, CompilerProxy, HashResult, Language, + c::ArtifactDescriptor, CCompileCommand, Cacheable, ColorMode, Compilation, CompileCommand, + Compiler, CompilerArguments, CompilerHasher, CompilerKind, CompilerProxy, HashResult, Language, + SingleCompileCommand, }; #[cfg(feature = "dist-client")] use crate::compiler::{DistPackagers, OutputsRewriter}; @@ -34,6 +35,7 @@ use fs_err as fs; use log::Level::Trace; use once_cell::sync::Lazy; #[cfg(feature = "dist-client")] +use semver::Version; #[cfg(feature = "dist-client")] use std::borrow::Borrow; use std::borrow::Cow; @@ -159,7 +161,25 @@ pub struct ParsedArguments { crate_types: CrateTypes, /// If dependency info is being emitted, the name of the dep info file. dep_info: Option, - /// If gcno info is being emitted, the name of the gcno file. + /// If profile info is being emitted, the path of the profile. + /// + /// This could be filled while `-Cprofile-use` been enabled. + /// + /// We need to add the profile into our outputs to enable distributed compilation. + /// We don't need to track `profile-generate` since it's users work to make sure + /// the `profdata` been generated from profraw files. + /// + /// For more information, see https://doc.rust-lang.org/rustc/profile-guided-optimization.html + profile: Option, + /// If `-Z profile` has been enabled, we will use a GCC-compatible, gcov-based + /// coverage implementation. + /// + /// This is not supported in latest stable rust anymore, but we still keep it here + /// for the old nightly rustc. + /// + /// We need to add the profile into our outputs to enable distributed compilation. + /// + /// For more information, see https://doc.rust-lang.org/rustc/instrument-coverage.html gcno: Option, /// rustc says that emits .rlib for --emit=metadata /// https://github.com/rust-lang/rust/issues/54852 @@ -168,6 +188,8 @@ pub struct ParsedArguments { color_mode: ColorMode, /// Whether `--json` was passed to this invocation. has_json: bool, + /// A `--target` parameter that specifies a path to a JSON file. + target_json: Option, } /// A struct on which to hang a `Compilation` impl. @@ -978,7 +1000,6 @@ impl IntoArg for ArgTarget { ArgData! { TooHardFlag, - TooHard(OsString), TooHardPath(PathBuf), NotCompilationFlag, NotCompilation(OsString), @@ -1023,7 +1044,7 @@ counted_array!(static ARGS: [ArgInfo; _] = [ take_arg!("--out-dir", PathBuf, CanBeSeparated('='), OutDir), take_arg!("--pretty", OsString, CanBeSeparated('='), NotCompilation), take_arg!("--print", OsString, CanBeSeparated('='), NotCompilation), - take_arg!("--remap-path-prefix", OsString, CanBeSeparated('='), TooHard), + take_arg!("--remap-path-prefix", OsString, CanBeSeparated('='), PassThrough), take_arg!("--sysroot", PathBuf, CanBeSeparated('='), TooHardPath), take_arg!("--target", ArgTarget, CanBeSeparated('='), Target), take_arg!("--unpretty", OsString, CanBeSeparated('='), NotCompilation), @@ -1059,12 +1080,14 @@ fn parse_arguments(arguments: &[OsString], cwd: &Path) -> CompilerArguments = vec![]; let mut color_mode = ColorMode::Auto; let mut has_json = false; - let mut profile = false; + let mut profile = None; + let mut gcno = false; + let mut target_json = None; for arg in ArgsIter::new(arguments.iter().cloned(), &ARGS[..]) { let arg = try_or_cannot_cache!(arg, "argument parse"); match arg.get_data() { - Some(TooHardFlag) | Some(TooHard(_)) | Some(TooHardPath(_)) => { + Some(TooHardFlag) | Some(TooHardPath(_)) => { cannot_cache!(arg.flag_str().expect("Can't be Argument::Raw/UnknownFlag",)) } Some(NotCompilationFlag) | Some(NotCompilation(_)) => { @@ -1114,6 +1137,7 @@ fn parse_arguments(arguments: &[OsString], cwd: &Path) -> CompilerArguments extra_filename = Some(value.to_owned()), ("extra-filename", None) => cannot_cache!("extra-filename"), + ("profile-use", Some(v)) => profile = Some(v.to_string()), // Incremental compilation makes a mess of sccache's entire world // view. It produces additional compiler outputs that we don't cache, // and just letting rustc do its work in incremental mode is likely @@ -1128,7 +1152,7 @@ fn parse_arguments(arguments: &[OsString], cwd: &Path) -> CompilerArguments match value.as_deref() { Some("y") | Some("yes") | Some("on") | None if opt == "profile" => { - profile = true; + gcno = true; } _ => (), }, @@ -1145,7 +1169,8 @@ fn parse_arguments(arguments: &[OsString], cwd: &Path) -> CompilerArguments (), Some(Target(target)) => match target { - ArgTarget::Path(_) | ArgTarget::Unsure(_) => cannot_cache!("target"), + ArgTarget::Path(json_path) => target_json = Some(json_path.to_owned()), + ArgTarget::Unsure(_) => cannot_cache!("target unsure"), ArgTarget::Name(_) => (), }, None => { @@ -1220,8 +1245,11 @@ fn parse_arguments(arguments: &[OsString], cwd: &Path) -> CompilerArguments CompilerArguments, _cache_control: CacheControl, - ) -> Result { + ) -> Result> { let RustHasher { executable, host, @@ -1311,7 +1341,9 @@ where dep_info, emit, has_json, + profile, gcno, + target_json, .. }, } = *self; @@ -1367,11 +1399,33 @@ where let abs_staticlibs = staticlibs.iter().map(|s| cwd.join(s)).collect::>(); let staticlib_hashes = hash_all_archives(&abs_staticlibs, pool); - let ((source_files, source_hashes, mut env_deps), extern_hashes, staticlib_hashes) = futures::try_join!( + // Hash the content of the specified target json file, if any. + let mut target_json_files = Vec::new(); + if let Some(path) = &target_json { + trace!( + "[{}]: hashing target json file {}", + crate_name, + path.display() + ); + let abs_target_json = cwd.join(path); + target_json_files.push(abs_target_json); + } + + let target_json_hash = hash_all(&target_json_files, pool); + + // Perform all hashing operations on the files. + let ( + (source_files, source_hashes, mut env_deps), + extern_hashes, + staticlib_hashes, + target_json_hash, + ) = futures::try_join!( source_files_and_hashes_and_env_deps, extern_hashes, - staticlib_hashes + staticlib_hashes, + target_json_hash )?; + // If you change any of the inputs to the hash, you should change `CACHE_VERSION`. let mut m = Digest::new(); // Hash inputs: @@ -1397,6 +1451,10 @@ where // in those paths (rlibs and static libs used in the compilation) are used as hash // inputs below. .filter(|&(arg, _)| !(arg == "--extern" || arg == "-L" || arg == "--out-dir")) + // We also exclude `--target` if it specifies a path to a .json file. The file content + // is used as hash input below. + // If `--target` specifies a string, it continues to be hashed as part of the arguments. + .filter(|&(arg, _)| target_json.is_none() || arg != "--target") // A few argument types were not passed in a deterministic order // by older versions of cargo: --extern, -L, --cfg. We'll filter the rest of those // out, sort them, and append them to the rest of the arguments. @@ -1414,14 +1472,16 @@ where // 4. The digest of all source files (this includes src file from cmdline). // 5. The digest of all files listed on the commandline (self.externs). // 6. The digest of all static libraries listed on the commandline (self.staticlibs). + // 7. The digest of the content of the target json file specified via `--target` (if any). for h in source_hashes .into_iter() .chain(extern_hashes) .chain(staticlib_hashes) + .chain(target_json_hash) { m.update(h.as_bytes()); } - // 7. Environment variables: Hash all environment variables listed in the rustc dep-info + // 8. Environment variables: Hash all environment variables listed in the rustc dep-info // output. Additionally also has all environment variables starting with `CARGO_`, // since those are not listed in dep-info but affect cacheability. env_deps.sort(); @@ -1439,16 +1499,25 @@ where .collect(); env_vars.sort(); for (var, val) in env_vars.iter() { + if !var.starts_with("CARGO_") { + continue; + } + // CARGO_MAKEFLAGS will have jobserver info which is extremely non-cacheable. - if var.starts_with("CARGO_") && var != "CARGO_MAKEFLAGS" { - var.hash(&mut HashToDigest { digest: &mut m }); - m.update(b"="); - val.hash(&mut HashToDigest { digest: &mut m }); + // CARGO_REGISTRIES_*_TOKEN contains non-cacheable secrets. + // Registry override config doesn't need to be hashed, because deps' package IDs + // already uniquely identify the relevant registries. + if var == "CARGO_MAKEFLAGS" || var.starts_with("CARGO_REGISTRIES_") { + continue; } + + var.hash(&mut HashToDigest { digest: &mut m }); + m.update(b"="); + val.hash(&mut HashToDigest { digest: &mut m }); } - // 8. The cwd of the compile. This will wind up in the rlib. + // 9. The cwd of the compile. This will wind up in the rlib. cwd.hash(&mut HashToDigest { digest: &mut m }); - // 9. The version of the compiler. + // 10. The version of the compiler. version.hash(&mut HashToDigest { digest: &mut m }); // Turn arguments into a simple Vec to calculate outputs. @@ -1535,6 +1604,16 @@ where } else { None }; + if let Some(profile) = profile { + let p = output_dir.join(&profile); + outputs.insert( + profile.to_string_lossy().into_owned(), + ArtifactDescriptor { + path: p, + optional: true, + }, + ); + } if let Some(gcno) = gcno { let p = output_dir.join(&gcno); outputs.insert( @@ -1600,12 +1679,16 @@ where } } -impl Compilation for RustCompilation { +impl Compilation for RustCompilation { fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, _rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { + ) -> Result<( + Box>, + Option, + Cacheable, + )> { let RustCompilation { ref executable, ref arguments, @@ -1627,7 +1710,7 @@ impl Compilation for RustCompilation { trace!("[{}]: compile", crate_name); - let command = CompileCommand { + let command = SingleCompileCommand { executable: executable.to_owned(), arguments: arguments .iter() @@ -1742,7 +1825,7 @@ impl Compilation for RustCompilation { }) })(); - Ok((command, dist_command, Cacheable::Yes)) + Ok((CCompileCommand::new(command), dist_command, Cacheable::Yes)) } #[cfg(feature = "dist-client")] @@ -2288,6 +2371,7 @@ impl Meter for DepsSize { struct RlibDepReader { cache: Mutex>, executable: PathBuf, + ls_arg: String, } #[cfg(feature = "dist-client")] @@ -2313,12 +2397,6 @@ impl RlibDepReader { stderr, } = cmd.output()?; - if !status.success() { - bail!( - "Failed to compile a minimal rlib with {}", - executable.display() - ) - } if !stdout.is_empty() { bail!( "rustc stdout non-empty when compiling a minimal rlib: {:?}", @@ -2331,6 +2409,12 @@ impl RlibDepReader { String::from_utf8_lossy(&stderr) ) } + if !status.success() { + bail!( + "Failed to compile a minimal rlib with {}", + executable.display() + ) + } // The goal of this cache is to avoid repeated lookups when building a single project. Let's budget 3MB. // Allowing for a 100 byte path, 50 dependencies per rlib and 20 characters per crate name, this roughly @@ -2341,10 +2425,12 @@ impl RlibDepReader { // can cache information from about 570 rlibs - easily enough for a single project. const CACHE_SIZE: u64 = 3 * 1024 * 1024; let cache = LruCache::with_meter(CACHE_SIZE, DepsSize); + let rustc_version = Self::get_rustc_version(&executable, env_vars)?; let rlib_dep_reader = RlibDepReader { cache: Mutex::new(cache), executable, + ls_arg: Self::get_correct_ls_arg(rustc_version), }; if let Err(e) = rlib_dep_reader.discover_rlib_deps(env_vars, &temp_rlib) { bail!("Failed to read deps from minimal rlib: {}", e) @@ -2353,6 +2439,56 @@ impl RlibDepReader { Ok(rlib_dep_reader) } + fn get_rustc_version( + executable: &PathBuf, + env_vars: &[(OsString, OsString)], + ) -> Result { + let mut cmd = process::Command::new(executable); + cmd.arg("--version").env_clear().envs(env_vars.to_vec()); + + let process::Output { + status, + stdout, + stderr, + } = cmd.output()?; + + if !status.success() { + bail!("Failed to get rustc version with {}", executable.display()) + } + if stdout.is_empty() { + bail!("rustc stdout empty when parsing version") + } + if !stderr.is_empty() { + bail!( + "rustc stderr non-empty when parsing version: {:?}", + String::from_utf8_lossy(&stderr) + ) + } + + Self::parse_rustc_version(&stdout) + } + + fn parse_rustc_version(stdout: &[u8]) -> Result { + let stdout_string = String::from_utf8_lossy(stdout); + let rustc_version: Vec<&str> = stdout_string.split_whitespace().collect(); + if rustc_version[0] != "rustc" { + bail!( + "Expected rustc string in rustc version with {:?}", + String::from_utf8_lossy(stdout) + ) + } + + Ok(Version::parse(rustc_version[1])?) + } + + fn get_correct_ls_arg(version: Version) -> String { + if version.major <= 1 && version.minor <= 74 { + String::from("ls") + } else { + String::from("ls=root") + } + } + fn discover_rlib_deps( &self, env_vars: &[(OsString, OsString)], @@ -2374,7 +2510,7 @@ impl RlibDepReader { trace!("Discovering dependencies of {}", rlib.display()); let mut cmd = process::Command::new(&self.executable); - cmd.args(["-Z", "ls"]) + cmd.args(["-Z", &self.ls_arg]) .arg(rlib) .env_clear() .envs(env_vars.to_vec()) @@ -3110,6 +3246,79 @@ proc_macro false assert_eq!(res[2], "lucet_runtime_macros"); } + #[cfg(feature = "dist-client")] + #[test] + fn test_rlib_dep_reader_call() { + let cargo_home = std::env::var("CARGO_HOME"); + assert!(cargo_home.is_ok()); + + let mut env_vars = vec![]; + if let Some(rustup_home) = std::env::var_os("RUSTUP_HOME") { + env_vars.push(("RUSTUP_HOME".into(), rustup_home)); + } + + let mut rustc_path = PathBuf::from(cargo_home.unwrap()); + rustc_path.push("bin"); + rustc_path.push("rustc"); + + let rlib_dep_reader = RlibDepReader::new_with_check(rustc_path, &env_vars); + let is_ok = rlib_dep_reader.is_ok(); + // Unwrap so the error is reported in the test output + let _ = rlib_dep_reader.unwrap(); + assert!(is_ok); + } + + #[cfg(feature = "dist-client")] + #[test] + fn test_rlib_dep_reader_parse_rustc_version() { + let v0 = RlibDepReader::parse_rustc_version("rustc 1.2.3 aaaa".as_bytes()); + assert!(v0.is_ok()); + let v0 = v0.unwrap(); + assert_eq!(v0.major, 1); + assert_eq!(v0.minor, 2); + assert_eq!(v0.patch, 3); + + assert!(RlibDepReader::parse_rustc_version("rutc 1.2.3 aaaa".as_bytes()).is_err()); + assert!(RlibDepReader::parse_rustc_version("1.2.3".as_bytes()).is_err()); + } + + #[cfg(feature = "dist-client")] + #[test] + fn test_rlib_dep_reader_get_correct_ls_arg() { + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(0, 73, 0)), + "ls" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(1, 73, 0)), + "ls" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(1, 73, 1)), + "ls" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(1, 74, 0)), + "ls" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(1, 74, 1)), + "ls" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(1, 75, 0)), + "ls=root" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(2, 73, 0)), + "ls=root" + ); + assert_eq!( + RlibDepReader::get_correct_ls_arg(Version::new(2, 74, 0)), + "ls=root" + ); + } + fn mock_dep_info(creator: &Arc>, dep_srcs: &[&str]) { // Mock the `rustc --emit=dep-info` process by writing // a dep-info file. @@ -3213,7 +3422,9 @@ proc_macro false emit, color_mode: ColorMode::Auto, has_json: false, + profile: None, gcno: None, + target_json: None, }, }); let creator = new_creator(); @@ -3229,6 +3440,10 @@ proc_macro false (OsString::from("CARGO_PKG_NAME"), OsString::from("foo")), (OsString::from("FOO"), OsString::from("bar")), (OsString::from("CARGO_BLAH"), OsString::from("abc")), + ( + OsString::from("CARGO_REGISTRIES_A_TOKEN"), + OsString::from("ignored"), + ), ] .to_vec(), false, @@ -3582,4 +3797,75 @@ proc_macro false assert_eq!(h.gcno, Some("foo-a1b6419f8321841f.gcno".into())); } + + #[test] + fn test_parse_remap_path_prefix() { + let h = parses!( + "--crate-name", + "foo", + "--crate-type", + "lib", + "./src/lib.rs", + "--emit=dep-info,link", + "--out-dir", + "/out", + "--remap-path-prefix", + "/home/test=~", + "--remap-path-prefix", + "/root=~" + ); + assert!(h.arguments.contains(&Argument::WithValue( + "--remap-path-prefix", + ArgData::PassThrough(OsString::from("/home/test=~")), + ArgDisposition::Separated + ))); + assert!(h.arguments.contains(&Argument::WithValue( + "--remap-path-prefix", + ArgData::PassThrough(OsString::from("/root=~")), + ArgDisposition::Separated + ))); + } + + #[test] + fn test_parse_target() { + // Parse a --target argument that is a string (not a path to a .json file). + let h = parses!( + "--crate-name", + "foo", + "--crate-type", + "lib", + "./src/lib.rs", + "--emit=dep-info,link", + "--out-dir", + "/out", + "--target", + "string" + ); + assert!(h.arguments.contains(&Argument::WithValue( + "--target", + ArgData::Target(ArgTarget::Name("string".to_owned())), + ArgDisposition::Separated + ))); + assert!(h.target_json.is_none()); + + // Parse a --target argument that is a path. + let h = parses!( + "--crate-name", + "foo", + "--crate-type", + "lib", + "./src/lib.rs", + "--emit=dep-info,link", + "--out-dir", + "/out", + "--target", + "/path/to/target.json" + ); + assert!(h.arguments.contains(&Argument::WithValue( + "--target", + ArgData::Target(ArgTarget::Path(PathBuf::from("/path/to/target.json"))), + ArgDisposition::Separated + ))); + assert_eq!(h.target_json, Some(PathBuf::from("/path/to/target.json"))); + } } diff --git a/src/compiler/tasking_vx.rs b/src/compiler/tasking_vx.rs index 0fd4bd10c..ce07857be 100644 --- a/src/compiler/tasking_vx.rs +++ b/src/compiler/tasking_vx.rs @@ -20,7 +20,8 @@ use crate::{ NormalizedDisposition, PathTransformerFn, SearchableArgInfo, }, c::{ArtifactDescriptor, CCompilerImpl, CCompilerKind, ParsedArguments}, - Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + CCompileCommand, Cacheable, ColorMode, CompileCommand, CompilerArguments, Language, + SingleCompileCommand, }, counted_array, dist, errors::*, @@ -58,6 +59,7 @@ impl CCompilerImpl for TaskingVX { &self, arguments: &[OsString], cwd: &Path, + _env_vars: &[(OsString, OsString)], ) -> CompilerArguments { parse_arguments(arguments, cwd, &ARGS[..]) } @@ -88,7 +90,7 @@ impl CCompilerImpl for TaskingVX { .await } - fn generate_compile_commands( + fn generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, executable: &Path, @@ -96,8 +98,19 @@ impl CCompilerImpl for TaskingVX { cwd: &Path, env_vars: &[(OsString, OsString)], _rewrite_includes_only: bool, - ) -> Result<(CompileCommand, Option, Cacheable)> { - generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars) + ) -> Result<( + Box>, + Option, + Cacheable, + )> + where + T: CommandCreatorSync, + { + generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars).map( + |(command, dist_command, cacheable)| { + (CCompileCommand::new(command), dist_command, cacheable) + }, + ) } } @@ -276,6 +289,7 @@ where common_args, arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -353,7 +367,11 @@ fn generate_compile_commands( parsed_args: &ParsedArguments, cwd: &Path, env_vars: &[(OsString, OsString)], -) -> Result<(CompileCommand, Option, Cacheable)> { +) -> Result<( + SingleCompileCommand, + Option, + Cacheable, +)> { trace!("compile"); let out_file = match parsed_args.outputs.get("obj") { @@ -370,7 +388,7 @@ fn generate_compile_commands( arguments.extend_from_slice(&parsed_args.preprocessor_args); arguments.extend_from_slice(&parsed_args.unhashed_args); arguments.extend_from_slice(&parsed_args.common_args); - let command = CompileCommand { + let command = SingleCompileCommand { executable: executable.to_owned(), arguments, env_vars: env_vars.to_owned(), @@ -389,6 +407,8 @@ mod test { use crate::compiler::c::ArtifactDescriptor; use crate::compiler::*; use crate::mock_command::*; + use crate::server; + use crate::test::mock_storage::MockStorage; use crate::test::utils::*; fn parse_arguments_(arguments: Vec) -> CompilerArguments { @@ -702,6 +722,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: vec![], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -709,6 +730,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -721,7 +746,7 @@ mod test { &[], ) .unwrap(); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::Yes, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); @@ -751,6 +776,7 @@ mod test { common_args: vec![], arch_args: vec![], unhashed_args: ovec!["--threads", "2"], + extra_dist_files: vec![], extra_hash_files: vec![], msvc_show_includes: false, profile_generate: false, @@ -758,6 +784,10 @@ mod test { suppress_rewrite_includes_only: false, too_hard_for_preprocessor_cache_mode: None, }; + let runtime = single_threaded_runtime(); + let storage = MockStorage::new(None, false); + let storage: std::sync::Arc = std::sync::Arc::new(storage); + let service = server::SccacheService::mock_with_storage(storage, runtime.handle().clone()); let compiler = &f.bins[0]; // Compiler invocation. next_command(&creator, Ok(MockChild::new(exit_status(0), "", ""))); @@ -774,7 +804,7 @@ mod test { ovec!["-c", "foo.cu", "-o", "foo.o", "--threads", "2"], command.arguments ); - let _ = command.execute(&creator).wait(); + let _ = command.execute(&service, &creator).wait(); assert_eq!(Cacheable::Yes, cacheable); // Ensure that we ran all processes. assert_eq!(0, creator.lock().unwrap().children.len()); diff --git a/src/config.rs b/src/config.rs index a2a12060f..8d56bc106 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1342,22 +1342,25 @@ fn test_s3_no_credentials_conflict_0() { #[test] #[serial] -fn test_s3_no_credentials_conflict_1() { +#[cfg(feature = "s3")] +fn test_s3_no_credentials_conflict() { env::set_var("SCCACHE_S3_NO_CREDENTIALS", "true"); env::set_var("SCCACHE_BUCKET", "my-bucket"); env::set_var("AWS_ACCESS_KEY_ID", "aws-access-key-id"); env::set_var("AWS_SECRET_ACCESS_KEY", "aws-secret-access-key"); - let error = config_from_env().unwrap_err(); - assert_eq!( - "If setting S3 credentials, SCCACHE_S3_NO_CREDENTIALS must not be set.", - error.to_string() - ); + let cfg = config_from_env(); env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); env::remove_var("SCCACHE_BUCKET"); env::remove_var("AWS_ACCESS_KEY_ID"); env::remove_var("AWS_SECRET_ACCESS_KEY"); + + let error = cfg.unwrap_err(); + assert_eq!( + "If setting S3 credentials, SCCACHE_S3_NO_CREDENTIALS must not be set.", + error.to_string() + ); } #[test] @@ -1366,14 +1369,16 @@ fn test_s3_no_credentials_invalid() { env::set_var("SCCACHE_S3_NO_CREDENTIALS", "yes"); env::set_var("SCCACHE_BUCKET", "my-bucket"); - let error = config_from_env().unwrap_err(); + let cfg = config_from_env(); + + env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); + env::remove_var("SCCACHE_BUCKET"); + + let error = cfg.unwrap_err(); assert_eq!( "SCCACHE_S3_NO_CREDENTIALS must be 'true', 'on', '1', 'false', 'off' or '0'.", error.to_string() ); - - env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); - env::remove_var("SCCACHE_BUCKET"); } #[test] @@ -1382,7 +1387,12 @@ fn test_s3_no_credentials_valid_true() { env::set_var("SCCACHE_S3_NO_CREDENTIALS", "true"); env::set_var("SCCACHE_BUCKET", "my-bucket"); - let env_cfg = config_from_env().unwrap(); + let cfg = config_from_env(); + + env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); + env::remove_var("SCCACHE_BUCKET"); + + let env_cfg = cfg.unwrap(); match env_cfg.cache.s3 { Some(S3CacheConfig { ref bucket, @@ -1394,9 +1404,6 @@ fn test_s3_no_credentials_valid_true() { } None => unreachable!(), }; - - env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); - env::remove_var("SCCACHE_BUCKET"); } #[test] @@ -1405,7 +1412,12 @@ fn test_s3_no_credentials_valid_false() { env::set_var("SCCACHE_S3_NO_CREDENTIALS", "false"); env::set_var("SCCACHE_BUCKET", "my-bucket"); - let env_cfg = config_from_env().unwrap(); + let cfg = config_from_env(); + + env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); + env::remove_var("SCCACHE_BUCKET"); + + let env_cfg = cfg.unwrap(); match env_cfg.cache.s3 { Some(S3CacheConfig { ref bucket, @@ -1417,18 +1429,23 @@ fn test_s3_no_credentials_valid_false() { } None => unreachable!(), }; - - env::remove_var("SCCACHE_S3_NO_CREDENTIALS"); - env::remove_var("SCCACHE_BUCKET"); } #[test] +#[serial] +#[cfg(feature = "gcs")] fn test_gcs_service_account() { env::set_var("SCCACHE_GCS_BUCKET", "my-bucket"); env::set_var("SCCACHE_GCS_SERVICE_ACCOUNT", "my@example.com"); env::set_var("SCCACHE_GCS_RW_MODE", "READ_WRITE"); - let env_cfg = config_from_env().unwrap(); + let cfg = config_from_env(); + + env::remove_var("SCCACHE_GCS_BUCKET"); + env::remove_var("SCCACHE_GCS_SERVICE_ACCOUNT"); + env::remove_var("SCCACHE_GCS_RW_MODE"); + + let env_cfg = cfg.unwrap(); match env_cfg.cache.gcs { Some(GCSCacheConfig { ref bucket, @@ -1442,10 +1459,6 @@ fn test_gcs_service_account() { } None => unreachable!(), }; - - env::remove_var("SCCACHE_GCS_BUCKET"); - env::remove_var("SCCACHE_GCS_SERVICE_ACCOUNT"); - env::remove_var("SCCACHE_GCS_RW_MODE"); } #[test] diff --git a/src/dist/http.rs b/src/dist/http.rs index 621fe8277..3d6617a9a 100644 --- a/src/dist/http.rs +++ b/src/dist/http.rs @@ -1078,7 +1078,7 @@ mod client { use super::urls; use crate::errors::*; - const REQUEST_TIMEOUT_SECS: u64 = 600; + const REQUEST_TIMEOUT_SECS: u64 = 1200; const CONNECT_TIMEOUT_SECS: u64 = 5; pub struct Client { diff --git a/src/dist/mod.rs b/src/dist/mod.rs index 93a6859cf..a55b7c1e3 100644 --- a/src/dist/mod.rs +++ b/src/dist/mod.rs @@ -300,6 +300,7 @@ pub fn osstrings_to_strings(osstrings: &[OsString]) -> Option> { .map(|arg| arg.clone().into_string().ok()) .collect::>() } + pub fn osstring_tuples_to_strings( osstring_tuples: &[(OsString, OsString)], ) -> Option> { @@ -309,9 +310,18 @@ pub fn osstring_tuples_to_strings( .collect::>() } +pub fn strings_to_osstrings(strings: &[String]) -> Vec { + strings + .iter() + .map(|arg| std::ffi::OsStr::new(arg).to_os_string()) + .collect::>() +} + // TODO: TryFrom -pub fn try_compile_command_to_dist(command: compiler::CompileCommand) -> Option { - let compiler::CompileCommand { +pub fn try_compile_command_to_dist( + command: compiler::SingleCompileCommand, +) -> Option { + let compiler::SingleCompileCommand { executable, arguments, env_vars, diff --git a/src/jobserver.rs b/src/jobserver.rs index c0df054db..c7ea6a931 100644 --- a/src/jobserver.rs +++ b/src/jobserver.rs @@ -8,6 +8,68 @@ use futures::StreamExt; use crate::errors::*; +// The execution model of sccache is that on the first run it spawns a server +// in the background and detaches it. +// When normally executing the rust compiler from either cargo or make, it +// will use cargo/make's jobserver and limit its resource usage accordingly. +// When executing the rust compiler through the sccache server, that jobserver +// is not available, and spawning as many rustc as there are CPUs can lead to +// a quadratic use of the CPU resources (each rustc spawning as many threads +// as there are CPUs). +// One way around this issue is to inherit the jobserver from cargo or make +// when the sccache server is spawned, but that means that in some cases, the +// cargo or make process can't terminate until the sccache server terminates +// after its idle timeout (which also never happens if SCCACHE_IDLE_TIMEOUT=0). +// Also, if the sccache server ends up shared between multiple runs of +// cargo/make, then which jobserver is used doesn't make sense anymore. +// Ideally, the sccache client would give a handle to the jobserver it has +// access to, so that the rust compiler would "just" use the jobserver it +// would have used if it had run without sccache, but that adds some extra +// complexity, and requires to use Unix domain sockets. +// What we do instead is to arbitrary use our own jobserver. +// Unfortunately, that doesn't absolve us from having to deal with the original +// jobserver, because make may give us file descriptors to its pipes, and the +// simple fact of keeping them open can block it. +// So if it does give us those file descriptors, close the preemptively. +// +// unsafe because it can use the wrong fds. +#[cfg(not(windows))] +pub unsafe fn discard_inherited_jobserver() { + if let Some(value) = ["CARGO_MAKEFLAGS", "MAKEFLAGS", "MFLAGS"] + .into_iter() + .find_map(|env| std::env::var(env).ok()) + { + if let Some(auth) = value.rsplit(' ').find_map(|arg| { + arg.strip_prefix("--jobserver-auth=") + .or_else(|| arg.strip_prefix("--jobserver-fds=")) + }) { + if !auth.starts_with("fifo:") { + let mut parts = auth.splitn(2, ','); + let read = parts.next().unwrap(); + let write = match parts.next() { + Some(w) => w, + None => return, + }; + let read = read.parse().unwrap(); + let write = write.parse().unwrap(); + if read < 0 || write < 0 { + return; + } + unsafe { + if libc::fcntl(read, libc::F_GETFD) == -1 { + return; + } + if libc::fcntl(write, libc::F_GETFD) == -1 { + return; + } + libc::close(read); + libc::close(write); + } + } + } + } +} + #[derive(Clone)] pub struct Client { helper: Option>, @@ -20,12 +82,8 @@ pub struct Acquired { } impl Client { - // unsafe because `from_env` is unsafe (can use the wrong fds) - pub unsafe fn new() -> Client { - match jobserver::Client::from_env() { - Some(c) => Client::_new(c, true), - None => Client::new_num(num_cpus::get()), - } + pub fn new() -> Client { + Client::new_num(num_cpus::get()) } pub fn new_num(num: usize) -> Client { diff --git a/src/lib.rs b/src/lib.rs index a893a155f..8d3001c1c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -43,6 +43,7 @@ pub mod dist; mod jobserver; pub mod lru_disk_cache; mod mock_command; +mod net; mod protocol; pub mod server; #[doc(hidden)] diff --git a/src/net.rs b/src/net.rs new file mode 100644 index 000000000..79d350f68 --- /dev/null +++ b/src/net.rs @@ -0,0 +1,182 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! The module is used to provide abstraction over TCP socket and UDS. + +use std::fmt; +#[cfg(any(target_os = "linux", target_os = "android"))] +use std::os::linux::net::SocketAddrExt; + +use futures::{Future, TryFutureExt}; +use tokio::io::{AsyncRead, AsyncWrite}; + +// A unify version of `std::net::SocketAddr` and Unix domain socket. +#[derive(Debug)] +pub enum SocketAddr { + Net(std::net::SocketAddr), + // This could work on Windows in the future. See also rust-lang/rust#56533. + #[cfg(unix)] + Unix(std::path::PathBuf), + #[cfg(any(target_os = "linux", target_os = "android"))] + UnixAbstract(Vec), +} + +impl fmt::Display for SocketAddr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SocketAddr::Net(addr) => write!(f, "{}", addr), + #[cfg(unix)] + SocketAddr::Unix(p) => write!(f, "{}", p.display()), + #[cfg(any(target_os = "linux", target_os = "android"))] + SocketAddr::UnixAbstract(p) => write!(f, "\\x00{}", p.escape_ascii()), + } + } +} + +impl SocketAddr { + /// Get a Net address that with IP part set to "127.0.0.1". + #[inline] + pub fn with_port(port: u16) -> Self { + SocketAddr::Net(std::net::SocketAddr::from(([127, 0, 0, 1], port))) + } + + #[inline] + pub fn as_net(&self) -> Option<&std::net::SocketAddr> { + match self { + SocketAddr::Net(addr) => Some(addr), + #[cfg(unix)] + _ => None, + } + } + + /// Parse a string as a unix domain socket. + /// + /// The string should follow the format of `self.to_string()`. + #[cfg(unix)] + pub fn parse_uds(s: &str) -> std::io::Result { + // Parse abstract socket address first as it can contain any chars. + #[cfg(any(target_os = "linux", target_os = "android"))] + { + if s.starts_with("\\x00") { + // Rust abstract path expects no prepand '\x00'. + let data = crate::util::ascii_unescape_default(&s.as_bytes()[4..])?; + return Ok(SocketAddr::UnixAbstract(data)); + } + } + let path = std::path::PathBuf::from(s); + Ok(SocketAddr::Unix(path)) + } + + #[cfg(unix)] + pub fn is_unix_path(&self) -> bool { + matches!(self, SocketAddr::Unix(_)) + } + + #[cfg(not(unix))] + pub fn is_unix_path(&self) -> bool { + false + } +} + +// A helper trait to unify the behavior of TCP and UDS listener. +pub trait Acceptor { + type Socket: AsyncRead + AsyncWrite + Unpin + Send; + + fn accept(&self) -> impl Future> + Send; + fn local_addr(&self) -> tokio::io::Result>; +} + +impl Acceptor for tokio::net::TcpListener { + type Socket = tokio::net::TcpStream; + + #[inline] + fn accept(&self) -> impl Future> + Send { + tokio::net::TcpListener::accept(self).and_then(|(s, _)| futures::future::ok(s)) + } + + #[inline] + fn local_addr(&self) -> tokio::io::Result> { + tokio::net::TcpListener::local_addr(self).map(|a| Some(SocketAddr::Net(a))) + } +} + +// A helper trait to unify the behavior of TCP and UDS stream. +pub trait Connection: std::io::Read + std::io::Write { + fn try_clone(&self) -> std::io::Result>; +} + +impl Connection for std::net::TcpStream { + #[inline] + fn try_clone(&self) -> std::io::Result> { + let stream = std::net::TcpStream::try_clone(self)?; + Ok(Box::new(stream)) + } +} + +// Helper function to create a stream. Uses dynamic dispatch to make code more +// readable. +pub fn connect(addr: &SocketAddr) -> std::io::Result> { + match addr { + SocketAddr::Net(addr) => { + std::net::TcpStream::connect(addr).map(|s| Box::new(s) as Box) + } + #[cfg(unix)] + SocketAddr::Unix(p) => { + std::os::unix::net::UnixStream::connect(p).map(|s| Box::new(s) as Box) + } + #[cfg(any(target_os = "linux", target_os = "android"))] + SocketAddr::UnixAbstract(p) => { + let sock = std::os::unix::net::SocketAddr::from_abstract_name(p)?; + std::os::unix::net::UnixStream::connect_addr(&sock) + .map(|s| Box::new(s) as Box) + } + } +} + +#[cfg(unix)] +mod unix_imp { + use futures::TryFutureExt; + + use super::*; + + impl Acceptor for tokio::net::UnixListener { + type Socket = tokio::net::UnixStream; + + #[inline] + fn accept(&self) -> impl Future> + Send { + tokio::net::UnixListener::accept(self).and_then(|(s, _)| futures::future::ok(s)) + } + + #[inline] + fn local_addr(&self) -> tokio::io::Result> { + let addr = tokio::net::UnixListener::local_addr(self)?; + if let Some(p) = addr.as_pathname() { + return Ok(Some(SocketAddr::Unix(p.to_path_buf()))); + } + // TODO: support get addr from abstract socket. + // tokio::net::SocketAddr needs to support `as_abstract_name`. + // #[cfg(any(target_os = "linux", target_os = "android"))] + // if let Some(p) = addr.0.as_abstract_name() { + // return Ok(SocketAddr::UnixAbstract(p.to_vec())); + // } + Ok(None) + } + } + + impl Connection for std::os::unix::net::UnixStream { + #[inline] + fn try_clone(&self) -> std::io::Result> { + let stream = std::os::unix::net::UnixStream::try_clone(self)?; + Ok(Box::new(stream)) + } + } +} diff --git a/src/server.rs b/src/server.rs index 094dc4ac9..0620e61b9 100644 --- a/src/server.rs +++ b/src/server.rs @@ -38,15 +38,16 @@ use futures::{future, stream, Sink, SinkExt, Stream, StreamExt, TryFutureExt}; use number_prefix::NumberPrefix; use serde::{Deserialize, Serialize}; use std::cell::Cell; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::env; -use std::ffi::{OsStr, OsString}; +use std::ffi::OsString; use std::future::Future; use std::io::{self, Write}; use std::marker::Unpin; #[cfg(feature = "dist-client")] use std::mem; -use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; +#[cfg(any(target_os = "linux", target_os = "android"))] +use std::os::linux::net::SocketAddrExt; use std::path::PathBuf; use std::pin::Pin; use std::process::{ExitStatus, Output}; @@ -55,12 +56,10 @@ use std::task::{Context, Poll, Waker}; use std::time::Duration; #[cfg(feature = "dist-client")] use std::time::Instant; -use std::u64; use tokio::sync::Mutex; use tokio::sync::RwLock; use tokio::{ io::{AsyncRead, AsyncWrite}, - net::TcpListener, runtime::Runtime, time::{self, sleep, Sleep}, }; @@ -81,8 +80,8 @@ const DIST_CLIENT_RECREATE_TIMEOUT: Duration = Duration::from_secs(30); /// Result of background server startup. #[derive(Debug, Serialize, Deserialize)] pub enum ServerStartup { - /// Server started successfully on `port`. - Ok { port: u16 }, + /// Server started successfully on `addr`. + Ok { addr: String }, /// Server Addr already in suse AddrInUse, /// Timed out waiting for server startup. @@ -145,7 +144,7 @@ pub struct DistClientContainer { } #[cfg(feature = "dist-client")] -struct DistClientConfig { +pub struct DistClientConfig { // Reusable items tied to an SccacheServer instance pool: tokio::runtime::Handle, @@ -159,7 +158,7 @@ struct DistClientConfig { } #[cfg(feature = "dist-client")] -enum DistClientState { +pub enum DistClientState { #[cfg(feature = "dist-client")] Some(Box, Arc), #[cfg(feature = "dist-client")] @@ -183,6 +182,11 @@ impl DistClientContainer { Self {} } + #[cfg(feature = "dist-client")] + pub fn new_with_state(_: DistClientState) -> Self { + Self {} + } + pub async fn reset_state(&self) {} pub async fn get_status(&self) -> DistInfo { @@ -213,6 +217,13 @@ impl DistClientContainer { } } + #[cfg(feature = "dist-client")] + pub fn new_with_state(state: DistClientState) -> Self { + Self { + state: futures::lock::Mutex::new(state), + } + } + pub fn new_disabled() -> Self { Self { state: futures::lock::Mutex::new(DistClientState::Disabled), @@ -401,12 +412,12 @@ thread_local! { static PANIC_LOCATION: Cell> = const { Cell::new(None) }; } -/// Start an sccache server, listening on `port`. +/// Start an sccache server, listening on `addr`. /// /// Spins an event loop handling client connections until a client /// requests a shutdown. -pub fn start_server(config: &Config, port: u16) -> Result<()> { - info!("start_server: port: {}", port); +pub fn start_server(config: &Config, addr: &crate::net::SocketAddr) -> Result<()> { + info!("start_server: {addr}"); let panic_hook = std::panic::take_hook(); std::panic::set_hook(Box::new(move |info| { PANIC_LOCATION.with(|l| { @@ -417,7 +428,7 @@ pub fn start_server(config: &Config, port: u16) -> Result<()> { }); panic_hook(info) })); - let client = unsafe { Client::new() }; + let client = Client::new(); let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .worker_threads(std::cmp::max(20, 2 * num_cpus::get())) @@ -467,59 +478,105 @@ pub fn start_server(config: &Config, port: u16) -> Result<()> { _ => raw_storage, }; - let res = - SccacheServer::::new(port, runtime, client, dist_client, storage); + let res = (|| -> io::Result<_> { + match addr { + crate::net::SocketAddr::Net(addr) => { + trace!("binding TCP {addr}"); + let l = runtime.block_on(tokio::net::TcpListener::bind(addr))?; + let srv = + SccacheServer::<_>::with_listener(l, runtime, client, dist_client, storage); + Ok(( + srv.local_addr().unwrap(), + Box::new(move |f| srv.run(f)) as Box _>, + )) + } + #[cfg(unix)] + crate::net::SocketAddr::Unix(path) => { + trace!("binding unix socket {}", path.display()); + // Unix socket will report addr in use on any unlink file. + let _ = std::fs::remove_file(path); + let l = { + let _guard = runtime.enter(); + tokio::net::UnixListener::bind(path)? + }; + let srv = + SccacheServer::<_>::with_listener(l, runtime, client, dist_client, storage); + Ok(( + srv.local_addr().unwrap(), + Box::new(move |f| srv.run(f)) as Box _>, + )) + } + #[cfg(any(target_os = "linux", target_os = "android"))] + crate::net::SocketAddr::UnixAbstract(p) => { + trace!("binding abstract unix socket {}", p.escape_ascii()); + let abstract_addr = std::os::unix::net::SocketAddr::from_abstract_name(p)?; + let l = std::os::unix::net::UnixListener::bind_addr(&abstract_addr)?; + l.set_nonblocking(true)?; + let l = { + let _guard = runtime.enter(); + tokio::net::UnixListener::from_std(l)? + }; + let srv = + SccacheServer::<_>::with_listener(l, runtime, client, dist_client, storage); + Ok(( + srv.local_addr() + .unwrap_or_else(|| crate::net::SocketAddr::UnixAbstract(p.to_vec())), + Box::new(move |f| srv.run(f)) as Box _>, + )) + } + } + })(); match res { - Ok(srv) => { - let port = srv.port(); - info!("server started, listening on port {}", port); - notify_server_startup(¬ify, ServerStartup::Ok { port })?; - srv.run(future::pending::<()>())?; + Ok((addr, run)) => { + info!("server started, listening on {addr}"); + notify_server_startup( + ¬ify, + ServerStartup::Ok { + addr: addr.to_string(), + }, + )?; + run(future::pending::<()>())?; Ok(()) } Err(e) => { error!("failed to start server: {}", e); - match e.downcast_ref::() { - Some(io_err) if io::ErrorKind::AddrInUse == io_err.kind() => { - notify_server_startup(¬ify, ServerStartup::AddrInUse)?; - } - Some(io_err) if cfg!(windows) && Some(10013) == io_err.raw_os_error() => { - // 10013 is the "WSAEACCES" error, which can occur if the requested port - // has been allocated for other purposes, such as winNAT or Hyper-V. - let windows_help_message = - "A Windows port exclusion is blocking use of the configured port.\nTry setting SCCACHE_SERVER_PORT to a new value."; - let reason: String = format!("{windows_help_message}\n{e}"); - notify_server_startup(¬ify, ServerStartup::Err { reason })?; - } - _ => { - let reason = e.to_string(); - notify_server_startup(¬ify, ServerStartup::Err { reason })?; - } - }; - Err(e) + if io::ErrorKind::AddrInUse == e.kind() { + notify_server_startup(¬ify, ServerStartup::AddrInUse)?; + } else if cfg!(windows) && Some(10013) == e.raw_os_error() { + // 10013 is the "WSAEACCES" error, which can occur if the requested port + // has been allocated for other purposes, such as winNAT or Hyper-V. + let windows_help_message = + "A Windows port exclusion is blocking use of the configured port.\nTry setting SCCACHE_SERVER_PORT to a new value."; + let reason: String = format!("{windows_help_message}\n{e}"); + notify_server_startup(¬ify, ServerStartup::Err { reason })?; + } else { + let reason = e.to_string(); + notify_server_startup(¬ify, ServerStartup::Err { reason })?; + } + Err(e.into()) } } } -pub struct SccacheServer { +pub struct SccacheServer { runtime: Runtime, - listener: TcpListener, + listener: A, rx: mpsc::Receiver, timeout: Duration, service: SccacheService, wait: WaitUntilZero, } -impl SccacheServer { +impl SccacheServer { pub fn new( port: u16, runtime: Runtime, client: Client, dist_client: DistClientContainer, storage: Arc, - ) -> Result> { - let addr = SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), port); - let listener = runtime.block_on(TcpListener::bind(&SocketAddr::V4(addr)))?; + ) -> Result { + let addr = crate::net::SocketAddr::with_port(port); + let listener = runtime.block_on(tokio::net::TcpListener::bind(addr.as_net().unwrap()))?; Ok(Self::with_listener( listener, @@ -529,14 +586,16 @@ impl SccacheServer { storage, )) } +} +impl SccacheServer { pub fn with_listener( - listener: TcpListener, + listener: A, runtime: Runtime, client: Client, dist_client: DistClientContainer, storage: Arc, - ) -> SccacheServer { + ) -> Self { // Prepare the service which we'll use to service all incoming TCP // connections. let (tx, rx) = mpsc::channel(1); @@ -580,8 +639,8 @@ impl SccacheServer { /// Returns the port that this server is bound to #[allow(dead_code)] - pub fn port(&self) -> u16 { - self.listener.local_addr().unwrap().port() + pub fn local_addr(&self) -> Option { + self.listener.local_addr().unwrap() } /// Runs this server to completion. @@ -593,6 +652,7 @@ impl SccacheServer { where F: Future, C: Send, + A::Socket: 'static, { let SccacheServer { runtime, @@ -607,7 +667,7 @@ impl SccacheServer { // connections in separate tasks. let server = async move { loop { - let (socket, _) = listener.accept().await?; + let socket = listener.accept().await?; trace!("incoming connection"); let conn = service.clone().bind(socket).map_err(|res| { error!("Failed to bind socket: {}", res); @@ -709,7 +769,7 @@ impl CompilerCacheEntry { } /// Service implementation for sccache #[derive(Clone)] -struct SccacheService +pub struct SccacheService where C: Send, { @@ -756,7 +816,7 @@ where } type SccacheRequest = Message>; -type SccacheResponse = Message>; +type SccacheResponse = Message> + Send>>>; /// Messages sent from all services to the main event loop indicating activity. /// @@ -865,6 +925,60 @@ where } } + pub fn mock_with_storage( + storage: Arc, + rt: tokio::runtime::Handle, + ) -> SccacheService { + let (tx, _) = mpsc::channel(1); + let (_, info) = WaitUntilZero::new(); + let client = Client::new_num(1); + let dist_client = DistClientContainer::new_disabled(); + SccacheService { + stats: Arc::default(), + dist_client: Arc::new(dist_client), + storage, + compilers: Arc::default(), + compiler_proxies: Arc::default(), + rt, + creator: C::new(&client), + tx, + info, + } + } + + #[cfg(feature = "dist-client")] + pub fn mock_with_dist_client( + dist_client: Arc, + storage: Arc, + rt: tokio::runtime::Handle, + ) -> SccacheService { + let (tx, _) = mpsc::channel(1); + let (_, info) = WaitUntilZero::new(); + let client = Client::new_num(1); + SccacheService { + stats: Arc::default(), + dist_client: Arc::new(DistClientContainer::new_with_state(DistClientState::Some( + Box::new(DistClientConfig { + pool: rt.clone(), + scheduler_url: None, + auth: config::DistAuth::Token { token: "".into() }, + cache_dir: "".into(), + toolchain_cache_size: 0, + toolchains: vec![], + rewrite_includes_only: false, + }), + dist_client, + ))), + storage, + compilers: Arc::default(), + compiler_proxies: Arc::default(), + rt: rt.clone(), + creator: C::new(&client), + tx, + info, + } + } + fn bind(self, socket: T) -> impl Future> + Send + Sized + 'static where T: AsyncRead + AsyncWrite + Unpin + Send + 'static, @@ -889,15 +1003,18 @@ where stream .err_into::() .and_then(move |input| me.clone().call(input)) - .and_then(move |message| async move { - let fut = match message { + .and_then(move |response| async move { + let fut = match response { Message::WithoutBody(message) => { let stream = stream::once(async move { Ok(Frame::Message { message }) }); Either::Left(stream) } Message::WithBody(message, body) => { let stream = stream::once(async move { Ok(Frame::Message { message }) }) - .chain(body.map_ok(|chunk| Frame::Body { chunk: Some(chunk) })) + .chain( + body.into_stream() + .map_ok(|chunk| Frame::Body { chunk: Some(chunk) }), + ) .chain(stream::once(async move { Ok(Frame::Body { chunk: None }) })); Either::Right(stream) } @@ -944,7 +1061,7 @@ where /// Look up compiler info from the cache for the compiler `path`. /// If not cached, determine the compiler type and cache the result. - async fn compiler_info( + pub async fn compiler_info( &self, path: PathBuf, cwd: PathBuf, @@ -1125,11 +1242,17 @@ where match c.parse_arguments(&cmd, &cwd, &env_vars) { CompilerArguments::Ok(hasher) => { debug!("parse_arguments: Ok: {:?}", cmd); - self.stats.lock().await.requests_executed += 1; - let (tx, rx) = Body::pair(); - self.start_compile_task(c, hasher, cmd, cwd, env_vars, tx); - let res = CompileResponse::CompileStarted; - return Message::WithBody(Response::Compile(res), rx); + + let body = self + .clone() + .start_compile_task(c, hasher, cmd, cwd, env_vars) + .and_then(|res| async { Ok(Response::CompileFinished(res)) }) + .boxed(); + + return Message::WithBody( + Response::Compile(CompileResponse::CompileStarted), + body, + ); } CompilerArguments::CannotCache(why, extra_info) => { if let Some(extra_info) = extra_info { @@ -1159,192 +1282,234 @@ where /// Given compiler arguments `arguments`, look up /// a compile result in the cache or execute the compilation and store /// the result in the cache. - fn start_compile_task( - &self, + pub async fn start_compile_task( + self, compiler: Box>, hasher: Box>, arguments: Vec, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, - mut tx: mpsc::Sender>, - ) { - let force_recache = env_vars - .iter() - .any(|(k, _v)| k.as_os_str() == OsStr::new("SCCACHE_RECACHE")); - let cache_control = if force_recache { + ) -> Result { + self.stats.lock().await.requests_executed += 1; + + let force_recache = env_vars.iter().any(|(k, _v)| k == "SCCACHE_RECACHE"); + let force_no_cache = env_vars.iter().any(|(k, _v)| k == "SCCACHE_NO_CACHE"); + + let cache_control = if force_no_cache { + CacheControl::ForceNoCache + } else if force_recache { CacheControl::ForceRecache } else { CacheControl::Default }; + let out_pretty = hasher.output_pretty().into_owned(); let color_mode = hasher.color_mode(); + + let (kind, lang) = { + // HACK: See note in src/compiler/nvcc.rs + if env_vars + .iter() + .any(|(k, _)| k == "__SCCACHE_THIS_IS_A_CUDA_COMPILATION__") + { + ( + CompilerKind::C(crate::compiler::CCompilerKind::Nvcc), + Language::Cuda, + ) + } else { + (compiler.kind(), hasher.language()) + } + }; + let me = self.clone(); - let kind = compiler.kind(); - let lang = hasher.language(); - let creator = self.creator.clone(); - let storage = self.storage.clone(); - let pool = self.rt.clone(); - - let task = async move { - let dist_client = me.dist_client.get_client().await; - let result = match dist_client { - Ok(client) => std::panic::AssertUnwindSafe(hasher.get_cached_or_compile( - client, - creator, - storage, - arguments, - cwd, - env_vars, - cache_control, - pool, - )) - .catch_unwind() - .await - .map_err(|e| { - let panic = e - .downcast_ref::<&str>() - .map(|s| &**s) - .or_else(|| e.downcast_ref::().map(|s| &**s)) - .unwrap_or("An unknown panic was caught."); - let thread = std::thread::current(); - let thread_name = thread.name().unwrap_or("unnamed"); - if let Some((file, line, column)) = PANIC_LOCATION.with(|l| l.take()) { - anyhow!( - "thread '{thread_name}' panicked at {file}:{line}:{column}: {panic}" + + self.rt + .spawn(async move { + + let result = match me.dist_client.get_client().await { + Ok(client) => { + std::panic::AssertUnwindSafe(hasher + .get_cached_or_compile( + &me, + client, + me.creator.clone(), + me.storage.clone(), + arguments, + cwd, + env_vars, + cache_control, + me.rt.clone(), + ) ) - } else { - anyhow!("thread '{thread_name}' panicked: {panic}") + .catch_unwind() + .await + .map_err(|e| { + let panic = e + .downcast_ref::<&str>() + .map(|s| &**s) + .or_else(|| e.downcast_ref::().map(|s| &**s)) + .unwrap_or("An unknown panic was caught."); + let thread = std::thread::current(); + let thread_name = thread.name().unwrap_or("unnamed"); + if let Some((file, line, column)) = PANIC_LOCATION.with(|l| l.take()) { + anyhow!("thread '{thread_name}' panicked at {file}:{line}:{column}: {panic}") + } else { + anyhow!("thread '{thread_name}' panicked: {panic}") + } + }) + .and_then(std::convert::identity) } - }) - .and_then(std::convert::identity), - Err(e) => Err(e), - }; - let mut cache_write = None; - let mut res = CompileFinished { - color_mode, - ..Default::default() - }; - match result { - Ok((compiled, out)) => { - let mut stats = me.stats.lock().await; - match compiled { - CompileResult::Error => { - debug!("compile result: cache error"); - - stats.cache_errors.increment(&kind, &lang); - } - CompileResult::CacheHit(duration) => { - debug!("compile result: cache hit"); + Err(e) => Err(e), + }; - stats.cache_hits.increment(&kind, &lang); - stats.cache_read_hit_duration += duration; - } - CompileResult::CacheMiss(miss_type, dist_type, duration, future) => { - debug!("compile result: cache miss"); - - match dist_type { - DistType::NoDist => {} - DistType::Ok(id) => { - let server = id.addr().to_string(); - let server_count = - stats.dist_compiles.entry(server).or_insert(0); - *server_count += 1; - } - DistType::Error => stats.dist_errors += 1, + let mut cache_write = None; + let mut res = CompileFinished { + color_mode, + ..Default::default() + }; + + let mut stats = me.stats.lock().await; + + match result { + Ok((compiled, out)) => { + + let mut dist_type = DistType::NoDist; + + match compiled { + CompileResult::Error => { + debug!("compile result: cache error"); + + stats.cache_errors.increment(&kind, &lang); } - match miss_type { - MissType::Normal => {} - MissType::ForcedRecache => { - stats.forced_recaches += 1; - } - MissType::TimedOut => { - stats.cache_timeouts += 1; - } - MissType::CacheReadError => { - stats.cache_errors.increment(&kind, &lang); + CompileResult::CacheHit(duration) => { + debug!("compile result: cache hit"); + + stats.cache_hits.increment(&kind, &lang); + stats.cache_read_hit_duration += duration; + } + CompileResult::CacheMiss(miss_type, dt, duration, future) => { + debug!("[{}]: compile result: cache miss", out_pretty); + dist_type = dt; + + match miss_type { + MissType::Normal => {} + MissType::ForcedNoCache => {} + MissType::ForcedRecache => { + stats.forced_recaches += 1; + } + MissType::TimedOut => { + stats.cache_timeouts += 1; + } + MissType::CacheReadError => { + stats.cache_errors.increment(&kind, &lang); + } } + stats.compilations += 1; + stats.cache_misses.increment(&kind, &lang); + stats.compiler_write_duration += duration; + debug!("stats after compile result: {stats:?}"); + cache_write = Some(future); } - stats.cache_misses.increment(&kind, &lang); - stats.compiler_write_duration += duration; - debug!("stats after compile result: {stats:?}"); - cache_write = Some(future); + CompileResult::NotCached(dt, duration) => { + debug!("[{}]: compile result: not cached", out_pretty); + dist_type = dt; + stats.compilations += 1; + stats.compiler_write_duration += duration; + } + CompileResult::NotCacheable(dt, duration) => { + debug!("[{}]: compile result: not cacheable", out_pretty); + dist_type = dt; + stats.compilations += 1; + stats.compiler_write_duration += duration; + stats.non_cacheable_compilations += 1; + } + CompileResult::CompileFailed(dt, duration) => { + debug!("[{}]: compile result: compile failed", out_pretty); + dist_type = dt; + stats.compilations += 1; + stats.compiler_write_duration += duration; + stats.compile_fails += 1; + } + }; + + match dist_type { + DistType::NoDist => {} + DistType::Ok(id) => { + let server = id.addr().to_string(); + let server_count = stats.dist_compiles.entry(server).or_insert(0); + *server_count += 1; + } + DistType::Error => stats.dist_errors += 1, } - CompileResult::NotCacheable => { - debug!("compile result: not cacheable"); - stats.cache_misses.increment(&kind, &lang); - stats.non_cacheable_compilations += 1; - } - CompileResult::CompileFailed => { - debug!("compile result: compile failed"); + // Make sure the write guard has been dropped ASAP. + drop(stats); - stats.compile_fails += 1; - } - }; - // Make sure the write guard has been dropped ASAP. - drop(stats); - - let Output { - status, - stdout, - stderr, - } = out; - trace!("CompileFinished retcode: {}", status); - match status.code() { - Some(code) => res.retcode = Some(code), - None => res.signal = Some(get_signal(status)), - }; - res.stdout = stdout; - res.stderr = stderr; - } - Err(err) => { - let mut stats = me.stats.lock().await; - match err.downcast::() { - Ok(ProcessError(output)) => { - debug!("Compilation failed: {:?}", output); - stats.compile_fails += 1; - match output.status.code() { - Some(code) => res.retcode = Some(code), - None => res.signal = Some(get_signal(output.status)), - }; - res.stdout = output.stdout; - res.stderr = output.stderr; - } - Err(err) => match err.downcast::() { - Ok(HttpClientError(msg)) => { - me.dist_client.reset_state().await; - let errmsg = - format!("[{:?}] http error status: {}", out_pretty, msg); - error!("{}", errmsg); - res.retcode = Some(1); - res.stderr = errmsg.as_bytes().to_vec(); - } - Err(err) => { - use std::fmt::Write; + let Output { + status, + stdout, + stderr, + } = out; - error!("[{:?}] fatal error: {}", out_pretty, err); + trace!("CompileFinished retcode: {}", status); - let mut error = "sccache: encountered fatal error\n".to_string(); - let _ = writeln!(error, "sccache: error: {}", err); - for e in err.chain() { - error!("[{:?}] \t{}", out_pretty, e); - let _ = writeln!(error, "sccache: caused by: {}", e); - } - stats.cache_errors.increment(&kind, &lang); - //TODO: figure out a better way to communicate this? - res.retcode = Some(-2); - res.stderr = error.into_bytes(); + match status.code() { + Some(code) => res.retcode = Some(code), + None => res.signal = Some(get_signal(status)), + }; + + res.stdout = stdout; + res.stderr = stderr; + } + Err(err) => { + match err.downcast::() { + Ok(ProcessError(output)) => { + debug!("Compilation failed: {:?}", output); + stats.compile_fails += 1; + // Make sure the write guard has been dropped ASAP. + drop(stats); + + match output.status.code() { + Some(code) => res.retcode = Some(code), + None => res.signal = Some(get_signal(output.status)), + }; + res.stdout = output.stdout; + res.stderr = output.stderr; } - }, + Err(err) => match err.downcast::() { + Ok(HttpClientError(msg)) => { + // Make sure the write guard has been dropped ASAP. + drop(stats); + me.dist_client.reset_state().await; + let errmsg = format!("[{:?}] http error status: {}", out_pretty, msg); + error!("{}", errmsg); + res.retcode = Some(1); + res.stderr = errmsg.as_bytes().to_vec(); + } + Err(err) => { + stats.cache_errors.increment(&kind, &lang); + // Make sure the write guard has been dropped ASAP. + drop(stats); + + use std::fmt::Write; + + error!("[{:?}] fatal error: {}", out_pretty, err); + + let mut error = "sccache: encountered fatal error\n".to_string(); + let _ = writeln!(error, "sccache: error: {}", err); + for e in err.chain() { + error!("[{:?}] \t{}", out_pretty, e); + let _ = writeln!(error, "sccache: caused by: {}", e); + } + //TODO: figure out a better way to communicate this? + res.retcode = Some(-2); + res.stderr = error.into_bytes(); + } + }, + } } - } - }; - let send = tx - .send(Ok(Response::CompileFinished(res))) - .map_err(|e| anyhow!("send on finish failed").context(e)); + }; - let me = me.clone(); - let cache_write = async move { if let Some(cache_write) = cache_write { match cache_write.await { Err(e) => { @@ -1364,18 +1529,11 @@ where } } } - Ok(()) - }; - - futures::future::try_join(send, cache_write).await?; - - Ok::<_, Error>(()) - }; - self.rt.spawn(async move { - task.await - .unwrap_or_else(|e| warn!("Failed to execute task: {:?}", e)); - }); + Ok(res) + }) + .map_err(anyhow::Error::new) + .await? } } @@ -1448,7 +1606,9 @@ pub struct ServerStats { pub cache_write_duration: Duration, /// The total time spent reading cache hits. pub cache_read_hit_duration: Duration, - /// The total time spent reading cache misses. + /// The number of compilations performed. + pub compilations: u64, + /// The total time spent compiling. pub compiler_write_duration: Duration, /// The count of compilation failures. pub compile_fails: u64, @@ -1501,6 +1661,7 @@ impl Default for ServerStats { cache_writes: u64::default(), cache_write_duration: Duration::new(0, 0), cache_read_hit_duration: Duration::new(0, 0), + compilations: u64::default(), compiler_write_duration: Duration::new(0, 0), compile_fails: u64::default(), not_cached: HashMap::new(), @@ -1510,11 +1671,23 @@ impl Default for ServerStats { } } +pub trait ServerStatsWriter { + fn write(&mut self, text: &str); +} + +pub struct StdoutServerStatsWriter; + +impl ServerStatsWriter for StdoutServerStatsWriter { + fn write(&mut self, text: &str) { + println!("{text}"); + } +} + impl ServerStats { - /// Print stats to stdout in a human-readable format. + /// Print stats in a human-readable format. /// /// Return the formatted width of each of the (name, value) columns. - fn print(&self, advanced: bool) -> (usize, usize) { + fn print(&self, writer: &mut T, advanced: bool) -> (usize, usize) { macro_rules! set_stat { ($vec:ident, $var:expr, $name:expr) => {{ // name, value, suffix length @@ -1570,17 +1743,22 @@ impl ServerStats { set_lang_stat!(stats_vec, self.cache_hits, "Cache hits"); set_lang_stat!(stats_vec, self.cache_misses, "Cache misses"); } + + self.set_percentage_stats(&mut stats_vec, advanced); + set_stat!(stats_vec, self.cache_timeouts, "Cache timeouts"); set_stat!(stats_vec, self.cache_read_errors, "Cache read errors"); set_stat!(stats_vec, self.forced_recaches, "Forced recaches"); set_stat!(stats_vec, self.cache_write_errors, "Cache write errors"); - set_stat!(stats_vec, self.compile_fails, "Compilation failures"); if advanced { set_compiler_stat!(stats_vec, self.cache_errors, "Cache errors"); } else { set_lang_stat!(stats_vec, self.cache_errors, "Cache errors"); } + set_stat!(stats_vec, self.compilations, "Compilations"); + set_stat!(stats_vec, self.compile_fails, "Compilation failures"); + set_stat!( stats_vec, self.non_cacheable_compilations, @@ -1610,7 +1788,7 @@ impl ServerStats { set_duration_stat!( stats_vec, self.compiler_write_duration, - self.cache_misses.all(), + self.compilations, "Average compiler" ); set_duration_stat!( @@ -1627,45 +1805,110 @@ impl ServerStats { let name_width = stats_vec.iter().map(|(n, _, _)| n.len()).max().unwrap(); let stat_width = stats_vec.iter().map(|(_, s, _)| s.len()).max().unwrap(); for (name, stat, suffix_len) in stats_vec { - println!( + writer.write(&format!( "{:stat_width$}", name, stat, name_width = name_width, stat_width = stat_width + suffix_len - ); + )); } if !self.dist_compiles.is_empty() { - println!("\nSuccessful distributed compiles"); + writer.write("\nSuccessful distributed compiles"); let mut counts: Vec<_> = self.dist_compiles.iter().collect(); counts.sort_by(|(_, c1), (_, c2)| c1.cmp(c2).reverse()); for (reason, count) in counts { - println!( + writer.write(&format!( " {:stat_width$}", reason, count, name_width = name_width - 2, - stat_width = stat_width - ); + stat_width = stat_width, + )); } } if !self.not_cached.is_empty() { - println!("\nNon-cacheable reasons:"); + writer.write("\nNon-cacheable reasons:"); let mut counts: Vec<_> = self.not_cached.iter().collect(); counts.sort_by(|(_, c1), (_, c2)| c1.cmp(c2).reverse()); for (reason, count) in counts { - println!( + writer.write(&format!( "{:stat_width$}", reason, count, name_width = name_width, - stat_width = stat_width - ); + stat_width = stat_width, + )); } - println!(); + writer.write(""); } (name_width, stat_width) } + + fn set_percentage_stats(&self, stats_vec: &mut Vec<(String, String, usize)>, advanced: bool) { + set_percentage_stat( + stats_vec, + self.cache_hits.all(), + self.cache_misses.all() + self.cache_hits.all(), + "Cache hits rate", + ); + + let (stats_hits, stats_misses): (Vec<_>, Vec<_>) = if advanced { + ( + self.cache_hits.adv_counts.iter().collect(), + self.cache_misses.adv_counts.iter().collect(), + ) + } else { + ( + self.cache_hits.counts.iter().collect(), + self.cache_misses.counts.iter().collect(), + ) + }; + + let mut all_languages: HashSet<&String> = HashSet::new(); + for (lang, _) in &stats_hits { + all_languages.insert(lang); + } + for (lang, _) in &stats_misses { + all_languages.insert(lang); + } + + let mut all_languages: Vec<&String> = all_languages.into_iter().collect(); + all_languages.sort(); + + for lang in all_languages { + let count_hits = stats_hits + .iter() + .find(|&&(l, _)| l == lang) + .map_or(0, |&(_, &count)| count); + + let count_misses = stats_misses + .iter() + .find(|&&(l, _)| l == lang) + .map_or(0, |&(_, &count)| count); + + set_percentage_stat( + stats_vec, + count_hits, + count_hits + count_misses, + &format!("Cache hits rate ({})", lang), + ); + } + } +} + +fn set_percentage_stat( + vec: &mut Vec<(String, String, usize)>, + count_hits: u64, + total: u64, + name: &str, +) { + if total == 0 { + vec.push((name.to_string(), "-".to_string(), 0)); + } else { + let ratio = count_hits as f64 / total as f64; + vec.push((name.to_string(), format!("{:.2} %", ratio * 100.0), 2)); + } } impl ServerInfo { @@ -1700,7 +1943,7 @@ impl ServerInfo { /// Print info to stdout in a human-readable format. pub fn print(&self, advanced: bool) { - let (name_width, stat_width) = self.stats.print(advanced); + let (name_width, stat_width) = self.stats.print(&mut StdoutServerStatsWriter, advanced); println!( "{: { receiver: mpsc::Receiver>, } -impl Body { - fn pair() -> (mpsc::Sender>, Self) { - let (tx, rx) = mpsc::channel(0); - (tx, Body { receiver: rx }) - } -} - impl futures::Stream for Body { type Item = Result; fn poll_next( @@ -1919,7 +2155,7 @@ struct WaitUntilZero { #[derive(Clone)] #[allow(dead_code)] -struct ActiveInfo { +pub struct ActiveInfo { info: Arc>, } @@ -1978,3 +2214,108 @@ fn waits_until_zero() { drop(active2); assert_eq!(wait.now_or_never(), Some(())); } + +#[cfg(test)] +mod tests { + use super::*; + + struct StringWriter { + buffer: String, + } + + impl StringWriter { + fn new() -> StringWriter { + StringWriter { + buffer: String::new(), + } + } + + fn get_output(self) -> String { + self.buffer + } + } + + impl ServerStatsWriter for StringWriter { + fn write(&mut self, text: &str) { + self.buffer.push_str(&format!("{}\n", text)); + } + } + + #[test] + fn test_print_cache_hits_rate_default_server_stats() { + let stats = ServerStats::default(); + + let mut writer = StringWriter::new(); + stats.print(&mut writer, false); + + let output = writer.get_output(); + + assert!(output.contains("Cache hits rate -")); + } + + #[test] + fn test_print_cache_hits_rate_server_stats() { + let mut cache_hits_counts = HashMap::new(); + cache_hits_counts.insert("Rust".to_string(), 100); + cache_hits_counts.insert("C/C++".to_string(), 200); + + let mut cache_misses_counts = HashMap::new(); + cache_misses_counts.insert("Rust".to_string(), 50); + cache_misses_counts.insert("Cuda".to_string(), 300); + + let stats = ServerStats { + cache_hits: PerLanguageCount { + counts: cache_hits_counts, + ..Default::default() + }, + cache_misses: PerLanguageCount { + counts: cache_misses_counts, + ..Default::default() + }, + ..Default::default() + }; + + let mut writer = StringWriter::new(); + stats.print(&mut writer, false); + + let output = writer.get_output(); + + assert!(output.contains("Cache hits rate 46.15 %")); + assert!(output.contains("Cache hits rate (C/C++) 100.00 %")); + assert!(output.contains("Cache hits rate (Cuda) 0.00 %")); + assert!(output.contains("Cache hits rate (Rust) 66.67 %")); + } + + #[test] + fn test_print_cache_hits_rate_advanced_server_stats() { + let mut cache_hits_counts = HashMap::new(); + cache_hits_counts.insert("rust".to_string(), 50); + cache_hits_counts.insert("c/c++ [clang]".to_string(), 30); + + let mut cache_misses_counts = HashMap::new(); + cache_misses_counts.insert("rust".to_string(), 100); + cache_misses_counts.insert("cuda".to_string(), 70); + + let stats = ServerStats { + cache_hits: PerLanguageCount { + adv_counts: cache_hits_counts, + ..Default::default() + }, + cache_misses: PerLanguageCount { + adv_counts: cache_misses_counts, + ..Default::default() + }, + ..Default::default() + }; + + let mut writer = StringWriter::new(); + stats.print(&mut writer, true); + + let output = writer.get_output(); + + assert!(output.contains("Cache hits rate -")); + assert!(output.contains("Cache hits rate (c/c++ [clang]) 100.00 %")); + assert!(output.contains("Cache hits rate (cuda) 0.00 %")); + assert!(output.contains("Cache hits rate (rust) 33.33 %")); + } +} diff --git a/src/test/tests.rs b/src/test/tests.rs index d5c3ee0e9..fad140229 100644 --- a/src/test/tests.rs +++ b/src/test/tests.rs @@ -58,7 +58,7 @@ fn run_server_thread( cache_dir: &Path, options: T, ) -> ( - u16, + crate::net::SocketAddr, Sender, Arc>, thread::JoinHandle<()>, @@ -88,30 +88,30 @@ where CacheMode::ReadWrite, )); - let client = unsafe { Client::new() }; + let client = Client::new(); let srv = SccacheServer::new(0, runtime, client, dist_client, storage).unwrap(); - let mut srv: SccacheServer>> = srv; - assert!(srv.port() > 0); + let mut srv: SccacheServer<_, Arc>> = srv; + let addr = srv.local_addr().unwrap(); + assert!(matches!(addr, crate::net::SocketAddr::Net(a) if a.port() > 0)); if let Some(options) = options { if let Some(timeout) = options.idle_timeout { srv.set_idle_timeout(Duration::from_millis(timeout)); } } - let port = srv.port(); let creator = srv.command_creator().clone(); - tx.send((port, creator)).unwrap(); + tx.send((addr, creator)).unwrap(); srv.run(shutdown_rx).unwrap(); }); - let (port, creator) = rx.recv().unwrap(); - (port, shutdown_tx, creator, handle) + let (addr, creator) = rx.recv().unwrap(); + (addr, shutdown_tx, creator, handle) } #[test] fn test_server_shutdown() { let f = TestFixture::new(); - let (port, _sender, _storage, child) = run_server_thread(f.tempdir.path(), None); + let (addr, _sender, _storage, child) = run_server_thread(f.tempdir.path(), None); // Connect to the server. - let conn = connect_to_server(port).unwrap(); + let conn = connect_to_server(&addr).unwrap(); // Ask it to shut down request_shutdown(conn).unwrap(); // Ensure that it shuts down. @@ -123,7 +123,7 @@ fn test_server_shutdown() { fn test_server_shutdown_no_idle() { let f = TestFixture::new(); // Set a ridiculously low idle timeout. - let (port, _sender, _storage, child) = run_server_thread( + let (addr, _sender, _storage, child) = run_server_thread( f.tempdir.path(), ServerOptions { idle_timeout: Some(0), @@ -131,7 +131,7 @@ fn test_server_shutdown_no_idle() { }, ); - let conn = connect_to_server(port).unwrap(); + let conn = connect_to_server(&addr).unwrap(); request_shutdown(conn).unwrap(); child.join().unwrap(); } @@ -157,9 +157,9 @@ fn test_server_idle_timeout() { #[test] fn test_server_stats() { let f = TestFixture::new(); - let (port, sender, _storage, child) = run_server_thread(f.tempdir.path(), None); + let (addr, sender, _storage, child) = run_server_thread(f.tempdir.path(), None); // Connect to the server. - let conn = connect_to_server(port).unwrap(); + let conn = connect_to_server(&addr).unwrap(); // Ask it for stats. let info = request_stats(conn).unwrap(); assert_eq!(0, info.stats.compile_requests); @@ -174,9 +174,9 @@ fn test_server_stats() { #[test] fn test_server_unsupported_compiler() { let f = TestFixture::new(); - let (port, sender, server_creator, child) = run_server_thread(f.tempdir.path(), None); + let (addr, sender, server_creator, child) = run_server_thread(f.tempdir.path(), None); // Connect to the server. - let conn = connect_to_server(port).unwrap(); + let conn = connect_to_server(&addr).unwrap(); { let mut c = server_creator.lock().unwrap(); // fail rust driver check @@ -226,13 +226,13 @@ fn test_server_compile() { let _ = env_logger::try_init(); let f = TestFixture::new(); let gcc = f.mk_bin("gcc").unwrap(); - let (port, sender, server_creator, child) = run_server_thread(f.tempdir.path(), None); + let (addr, sender, server_creator, child) = run_server_thread(f.tempdir.path(), None); // Connect to the server. const PREPROCESSOR_STDOUT: &[u8] = b"preprocessor stdout"; const PREPROCESSOR_STDERR: &[u8] = b"preprocessor stderr"; const STDOUT: &[u8] = b"some stdout"; const STDERR: &[u8] = b"some stderr"; - let conn = connect_to_server(port).unwrap(); + let conn = connect_to_server(&addr).unwrap(); // Write a dummy input file so the preprocessor cache mode can work std::fs::write(f.tempdir.path().join("file.c"), "whatever").unwrap(); { @@ -308,6 +308,7 @@ fn test_server_port_in_use() { "SCCACHE_SERVER_PORT", listener.local_addr().unwrap().port().to_string(), ) + .env_remove("SCCACHE_SERVER_UDS") .output() .unwrap(); assert!(!output.status.success()); diff --git a/src/test/utils.rs b/src/test/utils.rs index 19a182c8b..d8531be6c 100644 --- a/src/test/utils.rs +++ b/src/test/utils.rs @@ -83,7 +83,7 @@ macro_rules! assert_map_contains { } pub fn new_creator() -> Arc> { - let client = unsafe { Client::new() }; + let client = Client::new(); Arc::new(Mutex::new(MockCommandCreator::new(&client))) } diff --git a/src/util.rs b/src/util.rs index 4fc45af2e..7174681d6 100644 --- a/src/util.rs +++ b/src/util.rs @@ -636,15 +636,19 @@ pub fn decode_path(bytes: &[u8]) -> std::io::Result { #[cfg(windows)] pub fn decode_path(bytes: &[u8]) -> std::io::Result { - let codepage = winapi::um::winnls::CP_OEMCP; - let flags = winapi::um::winnls::MB_ERR_INVALID_CHARS; + use windows_sys::Win32::Globalization::{CP_OEMCP, MB_ERR_INVALID_CHARS}; + + let codepage = CP_OEMCP; + let flags = MB_ERR_INVALID_CHARS; Ok(OsString::from_wide(&multi_byte_to_wide_char(codepage, flags, bytes)?).into()) } #[cfg(windows)] pub fn wide_char_to_multi_byte(wide_char_str: &[u16]) -> std::io::Result> { - let codepage = winapi::um::winnls::CP_OEMCP; + use windows_sys::Win32::Globalization::{WideCharToMultiByte, CP_OEMCP}; + + let codepage = CP_OEMCP; let flags = 0; // Empty string if wide_char_str.is_empty() { @@ -652,7 +656,7 @@ pub fn wide_char_to_multi_byte(wide_char_str: &[u16]) -> std::io::Result } unsafe { // Get length of multibyte string - let len = winapi::um::stringapiset::WideCharToMultiByte( + let len = WideCharToMultiByte( codepage, flags, wide_char_str.as_ptr(), @@ -666,7 +670,7 @@ pub fn wide_char_to_multi_byte(wide_char_str: &[u16]) -> std::io::Result if len > 0 { // Convert from UTF-16 to multibyte let mut astr: Vec = Vec::with_capacity(len as usize); - let len = winapi::um::stringapiset::WideCharToMultiByte( + let len = WideCharToMultiByte( codepage, flags, wide_char_str.as_ptr(), @@ -695,19 +699,21 @@ pub fn wide_char_to_multi_byte(wide_char_str: &[u16]) -> std::io::Result /// See https://msdn.microsoft.com/en-us/library/windows/desktop/dd319072(v=vs.85).aspx /// for more details. pub fn multi_byte_to_wide_char( - codepage: winapi::shared::minwindef::DWORD, - flags: winapi::shared::minwindef::DWORD, + codepage: u32, + flags: u32, multi_byte_str: &[u8], ) -> std::io::Result> { + use windows_sys::Win32::Globalization::MultiByteToWideChar; + if multi_byte_str.is_empty() { return Ok(vec![]); } unsafe { // Get length of UTF-16 string - let len = winapi::um::stringapiset::MultiByteToWideChar( + let len = MultiByteToWideChar( codepage, flags, - multi_byte_str.as_ptr() as winapi::um::winnt::LPSTR, + multi_byte_str.as_ptr(), multi_byte_str.len() as i32, std::ptr::null_mut(), 0, @@ -715,10 +721,10 @@ pub fn multi_byte_to_wide_char( if len > 0 { // Convert to UTF-16 let mut wstr: Vec = Vec::with_capacity(len as usize); - let len = winapi::um::stringapiset::MultiByteToWideChar( + let len = MultiByteToWideChar( codepage, flags, - multi_byte_str.as_ptr() as winapi::um::winnt::LPSTR, + multi_byte_str.as_ptr(), multi_byte_str.len() as i32, wstr.as_mut_ptr(), len, @@ -836,6 +842,7 @@ impl<'a> Hasher for HashToDigest<'a> { /// Pipe `cmd`'s stdio to `/dev/null`, unless a specific env var is set. #[cfg(not(windows))] pub fn daemonize() -> Result<()> { + use crate::jobserver::discard_inherited_jobserver; use daemonize::Daemonize; use std::env; use std::mem; @@ -847,6 +854,10 @@ pub fn daemonize() -> Result<()> { } } + unsafe { + discard_inherited_jobserver(); + } + static mut PREV_SIGSEGV: *mut libc::sigaction = 0 as *mut _; static mut PREV_SIGBUS: *mut libc::sigaction = 0 as *mut _; static mut PREV_SIGILL: *mut libc::sigaction = 0 as *mut _; @@ -939,6 +950,67 @@ pub fn new_reqwest_blocking_client() -> reqwest::blocking::Client { .expect("http client must build with success") } +fn unhex(b: u8) -> std::io::Result { + match b { + b'0'..=b'9' => Ok(b - b'0'), + b'a'..=b'f' => Ok(b - b'a' + 10), + b'A'..=b'F' => Ok(b - b'A' + 10), + _ => Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "invalid hex digit", + )), + } +} + +/// A reverse version of std::ascii::escape_default +pub fn ascii_unescape_default(s: &[u8]) -> std::io::Result> { + let mut out = Vec::with_capacity(s.len() + 4); + let mut offset = 0; + while offset < s.len() { + let c = s[offset]; + if c == b'\\' { + offset += 1; + if offset >= s.len() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "incomplete escape", + )); + } + let c = s[offset]; + match c { + b'n' => out.push(b'\n'), + b'r' => out.push(b'\r'), + b't' => out.push(b'\t'), + b'\'' => out.push(b'\''), + b'"' => out.push(b'"'), + b'\\' => out.push(b'\\'), + b'x' => { + offset += 1; + if offset + 1 >= s.len() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "incomplete hex escape", + )); + } + let v = unhex(s[offset])? << 4 | unhex(s[offset + 1])?; + out.push(v); + offset += 1; + } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "invalid escape", + )); + } + } + } else { + out.push(c); + } + offset += 1; + } + Ok(out) +} + #[cfg(test)] mod tests { use super::{OsStrExt, TimeMacroFinder}; @@ -1049,4 +1121,46 @@ mod tests { finder.find_time_macros(b"TIMESTAMP__ This is larger than the haystack"); assert!(finder.found_timestamp()); } + + #[test] + fn test_ascii_unescape_default() { + let mut alphabet = r#"\\'"\t\n\r"#.as_bytes().to_vec(); + alphabet.push(b'a'); + alphabet.push(b'1'); + alphabet.push(0); + alphabet.push(0xff); + let mut input = vec![]; + let mut output = vec![]; + let mut alphabet_indexes = [0; 3]; + let mut tested_cases = 0; + // Following loop may test duplicated inputs, but it's not a problem + loop { + input.clear(); + output.clear(); + for idx in alphabet_indexes { + if idx < alphabet.len() { + input.push(alphabet[idx]); + } + } + if input.is_empty() { + break; + } + output.extend(input.as_slice().escape_ascii()); + let result = super::ascii_unescape_default(&output).unwrap(); + assert_eq!(input, result, "{:?}", output); + tested_cases += 1; + for idx in &mut alphabet_indexes { + *idx += 1; + if *idx > alphabet.len() { + // Use `>` so we can test various input length. + *idx = 0; + } else { + break; + } + } + } + assert_eq!(tested_cases, (alphabet.len() + 1).pow(3) - 1); + let empty_result = super::ascii_unescape_default(&[]).unwrap(); + assert!(empty_result.is_empty(), "{:?}", empty_result); + } } diff --git a/tests/cache_hit_rate.rs b/tests/cache_hit_rate.rs new file mode 100644 index 000000000..58f5526f1 --- /dev/null +++ b/tests/cache_hit_rate.rs @@ -0,0 +1,118 @@ +pub mod helpers; + +use std::process::Command; + +use anyhow::Result; +use assert_cmd::assert::OutputAssertExt; +use helpers::{cargo_clean, SccacheTest, CARGO, CRATE_DIR}; +use predicates::{boolean::PredicateBooleanExt, str::PredicateStrExt}; +use serial_test::serial; + +#[test] +#[serial] +fn test_cache_hit_rate() -> Result<()> { + let test_info = SccacheTest::new(None)?; + + Command::new(CARGO.as_os_str()) + .args(["build", "--color=never"]) + .envs(test_info.env.iter().cloned()) + .current_dir(CRATE_DIR.as_os_str()) + .assert() + .try_stderr(predicates::str::contains("\x1b[").from_utf8().not())? + .try_success()?; + + test_info + .show_text_stats(false)? + .try_stdout( + predicates::str::is_match(r"Cache hits rate\s+0\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_stdout( + predicates::str::is_match(r"Cache hits rate \(Rust\)\s+0\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_success()?; + + // Clean it so we can build it again. + cargo_clean(&test_info)?; + + Command::new(CARGO.as_os_str()) + .args(["run", "--color=always"]) + .envs(test_info.env.iter().cloned()) + .current_dir(CRATE_DIR.as_os_str()) + .assert() + .try_stderr(predicates::str::contains("\x1b[").from_utf8())? + .try_success()?; + + test_info + .show_text_stats(false)? + .try_stdout( + predicates::str::is_match(r"Cache hits rate\s+50\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_stdout( + predicates::str::is_match(r"Cache hits rate \(Rust\)\s+50\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_success()?; + + Ok(()) +} + +#[test] +#[serial] +fn test_adv_cache_hit_rate() -> Result<()> { + let test_info = SccacheTest::new(None)?; + + Command::new(CARGO.as_os_str()) + .args(["build", "--color=never"]) + .envs(test_info.env.iter().cloned()) + .current_dir(CRATE_DIR.as_os_str()) + .assert() + .try_stderr(predicates::str::contains("\x1b[").from_utf8().not())? + .try_success()?; + + test_info + .show_text_stats(true)? + .try_stdout( + predicates::str::is_match(r"Cache hits rate\s+0\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_stdout( + predicates::str::is_match(r"Cache hits rate \(rust\)\s+0\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_success()?; + + cargo_clean(&test_info)?; + + Command::new(CARGO.as_os_str()) + .args(["run", "--color=always"]) + .envs(test_info.env.iter().cloned()) + .current_dir(CRATE_DIR.as_os_str()) + .assert() + .try_stderr(predicates::str::contains("\x1b[").from_utf8())? + .try_success()?; + + test_info + .show_text_stats(true)? + .try_stdout( + predicates::str::is_match(r"Cache hits rate\s+50\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_stdout( + predicates::str::is_match(r"Cache hits rate \(rust\)\s+50\.00\s%") + .unwrap() + .from_utf8(), + )? + .try_success()?; + + Ok(()) +} diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs new file mode 100644 index 000000000..3fc36bf10 --- /dev/null +++ b/tests/helpers/mod.rs @@ -0,0 +1,142 @@ +use anyhow::{Context, Result}; +use assert_cmd::assert::OutputAssertExt; +use chrono::Local; +use fs_err as fs; +use log::trace; +use once_cell::sync::Lazy; +use std::convert::Infallible; +use std::ffi::OsString; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; + +pub static CRATE_DIR: Lazy = + Lazy::new(|| Path::new(file!()).parent().unwrap().join("../test-crate")); +pub static CARGO: Lazy = Lazy::new(|| std::env::var_os("CARGO").unwrap()); +pub static SCCACHE_BIN: Lazy = Lazy::new(|| assert_cmd::cargo::cargo_bin("sccache")); +/// Ensures the logger is only initialized once. Panics if initialization fails. +static LOGGER: Lazy> = Lazy::new(|| { + env_logger::Builder::new() + .format(|f, record| { + writeln!( + f, + "{} [{}] - {}", + Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"), + record.level(), + record.args() + ) + }) + .parse_env("RUST_LOG") + .init(); + Ok(()) +}); + +/// Used as a test setup fixture. The drop implementation cleans up after a _successful_ test. +/// We catch the panic to ensure that the drop runs and the TempDir is cleaned up. +pub struct SccacheTest<'a> { + /// Tempdir used for Sccache cache and cargo output. It is kept in the struct only to have the + /// destructor run when SccacheTest goes out of scope, but is never used otherwise. + #[allow(dead_code)] + pub tempdir: tempfile::TempDir, + pub env: Vec<(&'a str, std::ffi::OsString)>, +} + +impl SccacheTest<'_> { + pub fn new(additional_envs: Option<&[(&'static str, std::ffi::OsString)]>) -> Result { + assert!(LOGGER.is_ok()); + + // Create a temp directory to use for the disk cache. + let tempdir = tempfile::Builder::new() + .prefix("sccache_test_rust_cargo") + .tempdir() + .context("Failed to create tempdir")?; + let cache_dir = tempdir.path().join("cache"); + fs::create_dir(&cache_dir)?; + let cargo_dir = tempdir.path().join("cargo"); + fs::create_dir(&cargo_dir)?; + + // Ensure there's no existing sccache server running. + stop_sccache()?; + + trace!("sccache --start-server"); + + Command::new(SCCACHE_BIN.as_os_str()) + .arg("--start-server") + .env("SCCACHE_DIR", &cache_dir) + .assert() + .try_success() + .context("Failed to start sccache server")?; + + let mut env = vec![ + ("CARGO_TARGET_DIR", cargo_dir.as_os_str().to_owned()), + ("RUSTC_WRAPPER", SCCACHE_BIN.as_os_str().to_owned()), + // Explicitly disable incremental compilation because sccache is unable to cache it at + // the time of writing. + ("CARGO_INCREMENTAL", OsString::from("0")), + ("TEST_ENV_VAR", OsString::from("1")), + ]; + + if let Some(vec) = additional_envs { + env.extend_from_slice(vec); + } + + Ok(SccacheTest { + tempdir, + env: env.to_owned(), + }) + } + + /// Show the statistics for sccache. This will be called at the end of a test and making this + /// an associated function will ensure that the struct lives until the end of the test. + pub fn show_stats(&self) -> assert_cmd::assert::AssertResult { + trace!("sccache --show-stats"); + + Command::new(SCCACHE_BIN.as_os_str()) + .args(["--show-stats", "--stats-format=json"]) + .assert() + .try_success() + } + + pub fn show_text_stats(&self, advanced: bool) -> assert_cmd::assert::AssertResult { + let cmd = if advanced { + "--show-adv-stats" + } else { + "--show-stats" + }; + + trace!("sccache {cmd}"); + + Command::new(SCCACHE_BIN.as_os_str()) + .args([cmd, "--stats-format=text"]) + .assert() + .try_success() + } +} + +impl Drop for SccacheTest<'_> { + fn drop(&mut self) { + stop_sccache().expect("Stopping Sccache server failed"); + } +} + +pub fn stop_sccache() -> Result<()> { + trace!("sccache --stop-server"); + + Command::new(SCCACHE_BIN.as_os_str()) + .arg("--stop-server") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .context("Failed to stop sccache server")?; + Ok(()) +} + +pub fn cargo_clean(test_info: &SccacheTest) -> Result<()> { + Command::new(CARGO.as_os_str()) + .args(["clean"]) + .envs(test_info.env.iter().cloned()) + .current_dir(CRATE_DIR.as_os_str()) + .assert() + .try_success()?; + Ok(()) +} diff --git a/tests/msvc-preprocessing/foo.cpp b/tests/msvc-preprocessing/foo.cpp index e0604526f..511b0a75d 100644 --- a/tests/msvc-preprocessing/foo.cpp +++ b/tests/msvc-preprocessing/foo.cpp @@ -10,6 +10,14 @@ __pragma(warning(disable: 4668)) #endif __pragma(warning(pop)) +// Minimal reproducible example for errors from user code +// More information: https://github.com/mozilla/sccache/issues/2250 +#pragma warning(disable : 4002) + +#define F(x, y) + int main() { + F(2, , , , , , 3, , , , , , ) // C4002 + return 0; } diff --git a/tests/sccache_args.rs b/tests/sccache_args.rs index 12b4bb89f..82f91b35a 100644 --- a/tests/sccache_args.rs +++ b/tests/sccache_args.rs @@ -2,32 +2,18 @@ //! //! Any copyright is dedicated to the Public Domain. //! http://creativecommons.org/publicdomain/zero/1.0/ +pub mod helpers; -use anyhow::{Context, Result}; +use anyhow::Result; use assert_cmd::prelude::*; -use once_cell::sync::Lazy; +use helpers::{stop_sccache, SCCACHE_BIN}; use predicates::prelude::*; use serial_test::serial; -use std::path::PathBuf; -use std::process::{Command, Stdio}; +use std::process::Command; #[macro_use] extern crate log; -static SCCACHE_BIN: Lazy = Lazy::new(|| assert_cmd::cargo::cargo_bin("sccache")); - -fn stop_sccache() -> Result<()> { - trace!("sccache --stop-server"); - - Command::new(SCCACHE_BIN.as_os_str()) - .arg("--stop-server") - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .context("Failed to stop sccache server")?; - Ok(()) -} - #[test] #[serial] #[cfg(feature = "gcs")] diff --git a/tests/sccache_cargo.rs b/tests/sccache_cargo.rs index cafec343f..ec040be8f 100644 --- a/tests/sccache_cargo.rs +++ b/tests/sccache_cargo.rs @@ -3,128 +3,22 @@ //! Any copyright is dedicated to the Public Domain. //! http://creativecommons.org/publicdomain/zero/1.0/ +pub mod helpers; + use anyhow::{Context, Result}; -use once_cell::sync::Lazy; +use helpers::{cargo_clean, stop_sccache, CARGO, CRATE_DIR}; use assert_cmd::prelude::*; -use chrono::Local; use fs_err as fs; +use helpers::{SccacheTest, SCCACHE_BIN}; use predicates::prelude::*; use serial_test::serial; -use std::convert::Infallible; -use std::ffi::OsString; -use std::io::Write; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; +use std::path::Path; +use std::process::Command; #[macro_use] extern crate log; -static SCCACHE_BIN: Lazy = Lazy::new(|| assert_cmd::cargo::cargo_bin("sccache")); -static CARGO: Lazy = Lazy::new(|| std::env::var_os("CARGO").unwrap()); -static CRATE_DIR: Lazy = - Lazy::new(|| Path::new(file!()).parent().unwrap().join("test-crate")); -/// Ensures the logger is only initialized once. Panics if initialization fails. -static LOGGER: Lazy> = Lazy::new(|| { - env_logger::Builder::new() - .format(|f, record| { - writeln!( - f, - "{} [{}] - {}", - Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"), - record.level(), - record.args() - ) - }) - .parse_env("RUST_LOG") - .init(); - Ok(()) -}); - -/// Used as a test setup fixture. The drop implementation cleans up after a _successful_ test. -/// We catch the panic to ensure that the drop runs and the TempDir is cleaned up. -struct SccacheTest<'a> { - /// Tempdir used for Sccache cache and cargo output. It is kept in the struct only to have the - /// destructor run when SccacheTest goes out of scope, but is never used otherwise. - #[allow(dead_code)] - tempdir: tempfile::TempDir, - env: Vec<(&'a str, std::ffi::OsString)>, -} - -impl SccacheTest<'_> { - fn new(additional_envs: Option<&[(&'static str, std::ffi::OsString)]>) -> Result { - assert!(LOGGER.is_ok()); - - // Create a temp directory to use for the disk cache. - let tempdir = tempfile::Builder::new() - .prefix("sccache_test_rust_cargo") - .tempdir() - .context("Failed to create tempdir")?; - let cache_dir = tempdir.path().join("cache"); - fs::create_dir(&cache_dir)?; - let cargo_dir = tempdir.path().join("cargo"); - fs::create_dir(&cargo_dir)?; - - // Ensure there's no existing sccache server running. - stop_sccache()?; - - trace!("sccache --start-server"); - - Command::new(SCCACHE_BIN.as_os_str()) - .arg("--start-server") - .env("SCCACHE_DIR", &cache_dir) - .assert() - .try_success() - .context("Failed to start sccache server")?; - - let mut env = vec![ - ("CARGO_TARGET_DIR", cargo_dir.as_os_str().to_owned()), - ("RUSTC_WRAPPER", SCCACHE_BIN.as_os_str().to_owned()), - // Explicitly disable incremental compilation because sccache is unable to cache it at - // the time of writing. - ("CARGO_INCREMENTAL", OsString::from("0")), - ("TEST_ENV_VAR", OsString::from("1")), - ]; - - if let Some(vec) = additional_envs { - env.extend_from_slice(vec); - } - - Ok(SccacheTest { - tempdir, - env: env.to_owned(), - }) - } - - /// Show the statistics for sccache. This will be called at the end of a test and making this - /// an associated function will ensure that the struct lives until the end of the test. - fn show_stats(&self) -> assert_cmd::assert::AssertResult { - trace!("sccache --show-stats"); - Command::new(SCCACHE_BIN.as_os_str()) - .args(["--show-stats", "--stats-format=json"]) - .assert() - .try_success() - } -} - -impl Drop for SccacheTest<'_> { - fn drop(&mut self) { - stop_sccache().expect("Stopping Sccache server failed"); - } -} - -fn stop_sccache() -> Result<()> { - trace!("sccache --stop-server"); - - Command::new(SCCACHE_BIN.as_os_str()) - .arg("--stop-server") - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .context("Failed to stop sccache server")?; - Ok(()) -} - #[test] #[serial] fn test_rust_cargo_check() -> Result<()> { @@ -200,9 +94,14 @@ fn test_rust_cargo_run_with_env_dep_parsing() -> Result<()> { #[test] #[serial] fn test_rust_cargo_check_nightly() -> Result<()> { + use std::ffi::OsString; + test_rust_cargo_cmd( "check", - SccacheTest::new(Some(&[("RUSTFLAGS", OsString::from("-Zprofile"))]))?, + SccacheTest::new(Some(&[( + "RUSTFLAGS", + OsString::from("-Cprofile-generate=."), + )]))?, ) } @@ -210,9 +109,14 @@ fn test_rust_cargo_check_nightly() -> Result<()> { #[test] #[serial] fn test_rust_cargo_check_nightly_readonly() -> Result<()> { + use std::ffi::OsString; + test_rust_cargo_cmd_readonly( "check", - SccacheTest::new(Some(&[("RUSTFLAGS", OsString::from("-Zprofile"))]))?, + SccacheTest::new(Some(&[( + "RUSTFLAGS", + OsString::from("-Cprofile-generate=."), + )]))?, ) } @@ -220,9 +124,14 @@ fn test_rust_cargo_check_nightly_readonly() -> Result<()> { #[test] #[serial] fn test_rust_cargo_build_nightly() -> Result<()> { + use std::ffi::OsString; + test_rust_cargo_cmd( "build", - SccacheTest::new(Some(&[("RUSTFLAGS", OsString::from("-Zprofile"))]))?, + SccacheTest::new(Some(&[( + "RUSTFLAGS", + OsString::from("-Cprofile-generate=."), + )]))?, ) } @@ -230,22 +139,17 @@ fn test_rust_cargo_build_nightly() -> Result<()> { #[test] #[serial] fn test_rust_cargo_build_nightly_readonly() -> Result<()> { + use std::ffi::OsString; + test_rust_cargo_cmd_readonly( "build", - SccacheTest::new(Some(&[("RUSTFLAGS", OsString::from("-Zprofile"))]))?, + SccacheTest::new(Some(&[( + "RUSTFLAGS", + OsString::from("-Cprofile-generate=."), + )]))?, ) } -fn cargo_clean(test_info: &SccacheTest) -> Result<()> { - Command::new(CARGO.as_os_str()) - .args(["clean"]) - .envs(test_info.env.iter().cloned()) - .current_dir(CRATE_DIR.as_os_str()) - .assert() - .try_success()?; - Ok(()) -} - /// Test that building a simple Rust crate with cargo using sccache results in a cache hit /// when built a second time and a cache miss, when the environment variable referenced via /// env! is changed. diff --git a/tests/system.rs b/tests/system.rs index 386805a24..78baf8a4a 100644 --- a/tests/system.rs +++ b/tests/system.rs @@ -68,7 +68,11 @@ fn adv_key_kind(lang: &str, compiler: &str) -> String { "gcc" | "g++" => language + " [gcc]", "cl.exe" => language + " [msvc]", "nvc" | "nvc++" => language + " [nvhpc]", - "nvcc" => language + " [nvcc]", + "nvcc" => match lang { + "ptx" => language + " [cicc]", + "cubin" => language + " [ptxas]", + _ => language + " [nvcc]", + }, _ => { trace!("Unknown compiler type: {}", compiler); language + "unknown" @@ -111,20 +115,32 @@ fn compile_cmdline>( fn compile_cuda_cmdline>( compiler: &str, exe: T, + compile_flag: &str, input: &str, output: &str, mut extra_args: Vec, ) -> Vec { let mut arg = match compiler { - "nvcc" => vec_from!(OsString, exe.as_ref(), "-c", input, "-o", output), + "nvcc" => vec_from!(OsString, exe.as_ref(), compile_flag, input, "-o", output), "clang++" => { vec_from!( OsString, exe, - "-c", + compile_flag, input, - "--cuda-gpu-arch=sm_50", - format!("-Fo{}", output) + "--cuda-gpu-arch=sm_70", + format!( + "--cuda-path={}", + env::var_os("CUDA_PATH") + .or(env::var_os("CUDA_HOME")) + .unwrap_or("/usr/local/cuda".into()) + .to_string_lossy() + ), + "--no-cuda-version-check", + // work around for clang-cuda on windows-2019 (https://github.com/microsoft/STL/issues/2359) + "-D_ALLOW_COMPILER_AND_STL_VERSION_MISMATCH", + "-o", + output ) } _ => panic!("Unsupported compiler: {}", compiler), @@ -200,14 +216,14 @@ fn test_basic_compile(compiler: Compiler, tempdir: &Path) { exe, env_vars, } = compiler; - trace!("run_sccache_command_test: {}", name); + println!("test_basic_compile: {}", name); // Compile a source file. copy_to_tempdir(&[INPUT, INPUT_ERR], tempdir); let out_file = tempdir.join(OUTPUT); trace!("compile"); sccache_command() - .args(&compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) .current_dir(tempdir) .envs(env_vars.clone()) .assert() @@ -226,7 +242,7 @@ fn test_basic_compile(compiler: Compiler, tempdir: &Path) { trace!("compile"); fs::remove_file(&out_file).unwrap(); sccache_command() - .args(&compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) .current_dir(tempdir) .envs(env_vars) .assert() @@ -252,7 +268,7 @@ fn test_noncacheable_stats(compiler: Compiler, tempdir: &Path) { exe, env_vars, } = compiler; - trace!("test_noncacheable_stats: {}", name); + println!("test_noncacheable_stats: {}", name); copy_to_tempdir(&[INPUT], tempdir); trace!("compile"); @@ -438,13 +454,80 @@ int main(int argc, char** argv) { }); } +/* test case like this: + echo "int test(){}" > test.cc + mkdir o1 o2 + sccache g++ -c -g -gsplit-dwarf test.cc -o test1.o + sccache g++ -c -g -gsplit-dwarf test.cc -o test1.o --- > cache hit + sccache g++ -c -g -gsplit-dwarf test.cc -o test2.o --- > cache miss + strings test2.o |grep test2.dwo +*/ +fn test_split_dwarf_object_generate_output_dir_changes(compiler: Compiler, tempdir: &Path) { + let Compiler { + name, + exe, + env_vars, + } = compiler; + trace!("test -g -gsplit-dwarf with different output"); + zero_stats(); + const SRC: &str = "source.c"; + write_source(tempdir, SRC, "int test(){}"); + let mut args = compile_cmdline(name, exe.clone(), SRC, "test1.o", Vec::new()); + args.extend(vec_from!(OsString, "-g")); + args.extend(vec_from!(OsString, "-gsplit-dwarf")); + trace!("compile source.c (1)"); + sccache_command() + .args(&args) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + get_stats(|info| { + assert_eq!(0, info.stats.cache_hits.all()); + assert_eq!(1, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_misses.get("C/C++").unwrap()); + }); + // Compile the same source again to ensure we can get a cache hit. + trace!("compile source.c (2)"); + sccache_command() + .args(&args) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + get_stats(|info| { + assert_eq!(1, info.stats.cache_hits.all()); + assert_eq!(1, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("C/C++").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("C/C++").unwrap()); + }); + // Compile the same source again with different output + // to ensure we can force generate new object file. + let mut args2 = compile_cmdline(name, exe, SRC, "test2.o", Vec::new()); + args2.extend(vec_from!(OsString, "-g")); + args2.extend(vec_from!(OsString, "-gsplit-dwarf")); + trace!("compile source.c (2)"); + sccache_command() + .args(&args2) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + get_stats(|info| { + assert_eq!(1, info.stats.cache_hits.all()); + assert_eq!(2, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("C/C++").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("C/C++").unwrap()); + }); +} + fn test_gcc_clang_no_warnings_from_macro_expansion(compiler: Compiler, tempdir: &Path) { let Compiler { name, exe, env_vars, } = compiler; - trace!("test_gcc_clang_no_warnings_from_macro_expansion: {}", name); + println!("test_gcc_clang_no_warnings_from_macro_expansion: {}", name); // Compile a source file. copy_to_tempdir(&[INPUT_MACRO_EXPANSION], tempdir); @@ -470,7 +553,7 @@ fn test_compile_with_define(compiler: Compiler, tempdir: &Path) { exe, env_vars, } = compiler; - trace!("test_compile_with_define: {}", name); + println!("test_compile_with_define: {}", name); // Compile a source file. copy_to_tempdir(&[INPUT_WITH_DEFINE], tempdir); @@ -505,6 +588,7 @@ fn run_sccache_command_tests(compiler: Compiler, tempdir: &Path, preprocessor_ca } if compiler.name == "clang" || compiler.name == "gcc" { test_gcc_clang_no_warnings_from_macro_expansion(compiler.clone(), tempdir); + test_split_dwarf_object_generate_output_dir_changes(compiler.clone(), tempdir); } if compiler.name == "clang++" { test_clang_multicall(compiler.clone(), tempdir); @@ -551,22 +635,487 @@ fn run_sccache_command_tests(compiler: Compiler, tempdir: &Path, preprocessor_ca } } -fn test_cuda_compiles(compiler: &Compiler, tempdir: &Path) { +fn test_nvcc_cuda_compiles(compiler: &Compiler, tempdir: &Path) { let Compiler { name, exe, env_vars, } = compiler; - trace!("run_sccache_command_test: {}", name); + println!("test_nvcc_cuda_compiles: {}", name); // Compile multiple source files. copy_to_tempdir(&[INPUT_FOR_CUDA_A, INPUT_FOR_CUDA_B], tempdir); let out_file = tempdir.join(OUTPUT); trace!("compile A"); sccache_command() - .args(&compile_cuda_cmdline( + .args(compile_cuda_cmdline( + name, + exe, + "-c", + // relative path for input + INPUT_FOR_CUDA_A, + // relative path for output + out_file.file_name().unwrap().to_string_lossy().as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&out_file).map(|m| m.len() > 0).unwrap()); + fs::remove_file(&out_file).unwrap(); + trace!("compile A request stats"); + get_stats(|info| { + assert_eq!(1, info.stats.compile_requests); + assert_eq!(4, info.stats.requests_executed); + assert_eq!(0, info.stats.cache_hits.all()); + assert_eq!(3, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + trace!("compile A"); + sccache_command() + .args(compile_cuda_cmdline( name, exe, + "-c", + // relative path for input + INPUT_FOR_CUDA_A, + // absolute path for output + out_file.to_string_lossy().as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&out_file).map(|m| m.len() > 0).unwrap()); + fs::remove_file(&out_file).unwrap(); + trace!("compile A request stats"); + get_stats(|info| { + assert_eq!(2, info.stats.compile_requests); + assert_eq!(8, info.stats.requests_executed); + assert_eq!(3, info.stats.cache_hits.all()); + assert_eq!(3, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&1, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + // By compiling another input source we verify that the pre-processor + // phase is correctly running and outputting text + trace!("compile B"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-c", + // absolute path for input + &tempdir.join(INPUT_FOR_CUDA_B).to_string_lossy(), + // absolute path for output + out_file.to_string_lossy().as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&out_file).map(|m| m.len() > 0).unwrap()); + fs::remove_file(&out_file).unwrap(); + trace!("compile B request stats"); + get_stats(|info| { + assert_eq!(3, info.stats.compile_requests); + assert_eq!(12, info.stats.requests_executed); + assert_eq!(4, info.stats.cache_hits.all()); + assert_eq!(5, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&1, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&2, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&2, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + trace!("compile ptx"); + let out_file = tempdir.join("test.ptx"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-ptx", + INPUT_FOR_CUDA_A, + // relative path for output + out_file.file_name().unwrap().to_string_lossy().as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&out_file).map(|m| m.len() > 0).unwrap()); + fs::remove_file(&out_file).unwrap(); + trace!("compile ptx request stats"); + get_stats(|info| { + assert_eq!(4, info.stats.compile_requests); + assert_eq!(14, info.stats.requests_executed); + assert_eq!(5, info.stats.cache_hits.all()); + assert_eq!(5, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&2, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&2, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&2, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&2, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + trace!("compile cubin"); + let out_file = tempdir.join("test.cubin"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-cubin", + INPUT_FOR_CUDA_A, + // absolute path for output + out_file.to_string_lossy().as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&out_file).map(|m| m.len() > 0).unwrap()); + fs::remove_file(&out_file).unwrap(); + trace!("compile cubin request stats"); + get_stats(|info| { + assert_eq!(5, info.stats.compile_requests); + assert_eq!(17, info.stats.requests_executed); + assert_eq!(7, info.stats.cache_hits.all()); + assert_eq!(5, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&3, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&3, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&3, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&3, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&1, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + // Test to ensure #2299 doesn't regress (https://github.com/mozilla/sccache/issues/2299) + let test_2299_src_name = "test_2299.cu"; + let test_2299_out_file = tempdir.join("test_2299.cu.o"); + // Two versions of the source with different contents inside the #ifndef __CUDA_ARCH__ + let test_2299_cu_src_1 = " +#ifndef __CUDA_ARCH__ +static const auto x = 5; +#endif +int main(int argc, char** argv) { + return 0; +} +"; + let test_2299_cu_src_2 = " +#ifndef __CUDA_ARCH__ +static const auto x = \"5\"; +#endif +int main(int argc, char** argv) { + return 0; +} +"; + write_source(tempdir, test_2299_src_name, test_2299_cu_src_1); + trace!("compile test_2299.cu (1)"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-c", + // relative path for input + test_2299_src_name, + // relative path for output + test_2299_out_file + .file_name() + .unwrap() + .to_string_lossy() + .as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&test_2299_out_file) + .map(|m| m.len() > 0) + .unwrap()); + fs::remove_file(&test_2299_out_file).unwrap(); + trace!("compile test_2299.cu request stats (1)"); + get_stats(|info| { + assert_eq!(6, info.stats.compile_requests); + assert_eq!(21, info.stats.requests_executed); + assert_eq!(7, info.stats.cache_hits.all()); + assert_eq!(8, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&3, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&3, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&3, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&3, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&3, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&3, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&3, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&3, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + write_source(tempdir, test_2299_src_name, test_2299_cu_src_2); + trace!("compile test_2299.cu (2)"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-c", + // relative path for input + test_2299_src_name, + // relative path for output + test_2299_out_file + .file_name() + .unwrap() + .to_string_lossy() + .as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&test_2299_out_file) + .map(|m| m.len() > 0) + .unwrap()); + fs::remove_file(&test_2299_out_file).unwrap(); + trace!("compile test_2299.cu request stats (2)"); + get_stats(|info| { + assert_eq!(7, info.stats.compile_requests); + assert_eq!(25, info.stats.requests_executed); + assert_eq!(9, info.stats.cache_hits.all()); + assert_eq!(9, info.stats.cache_misses.all()); + assert_eq!(&1, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&4, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&4, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&4, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&3, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&1, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&4, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&4, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&4, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&3, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); + + // Recompile the original version again to ensure only cache hits + write_source(tempdir, test_2299_src_name, test_2299_cu_src_1); + trace!("compile test_2299.cu (3)"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-c", + // relative path for input + test_2299_src_name, + // relative path for output + test_2299_out_file + .file_name() + .unwrap() + .to_string_lossy() + .as_ref(), + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + assert!(fs::metadata(&test_2299_out_file) + .map(|m| m.len() > 0) + .unwrap()); + fs::remove_file(&test_2299_out_file).unwrap(); + trace!("compile test_2299.cu request stats (3)"); + get_stats(|info| { + assert_eq!(8, info.stats.compile_requests); + assert_eq!(29, info.stats.requests_executed); + assert_eq!(12, info.stats.cache_hits.all()); + assert_eq!(9, info.stats.cache_misses.all()); + assert_eq!(&2, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&5, info.stats.cache_hits.get("PTX").unwrap()); + assert_eq!(&5, info.stats.cache_hits.get("CUBIN").unwrap()); + assert_eq!(&4, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&3, info.stats.cache_misses.get("PTX").unwrap()); + assert_eq!(&2, info.stats.cache_misses.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + let adv_cuda_key = adv_key_kind("cuda", compiler.name); + let adv_ptx_key = adv_key_kind("ptx", compiler.name); + let adv_cubin_key = adv_key_kind("cubin", compiler.name); + assert_eq!(&2, info.stats.cache_hits.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&5, info.stats.cache_hits.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&5, info.stats.cache_hits.get_adv(&adv_cubin_key).unwrap()); + assert_eq!(&4, info.stats.cache_misses.get_adv(&adv_cuda_key).unwrap()); + assert_eq!(&3, info.stats.cache_misses.get_adv(&adv_ptx_key).unwrap()); + assert_eq!(&2, info.stats.cache_misses.get_adv(&adv_cubin_key).unwrap()); + }); +} + +fn test_nvcc_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { + let Compiler { + name, + exe, + env_vars, + } = compiler; + zero_stats(); + + println!("test_nvcc_proper_lang_stat_tracking: {}", name); + // Compile multiple source files. + copy_to_tempdir(&[INPUT_FOR_CUDA_C, INPUT], tempdir); + + let out_file = tempdir.join(OUTPUT); + trace!("compile CUDA A"); + sccache_command() + .args(compile_cmdline( + name, + &exe, + INPUT_FOR_CUDA_C, + OUTPUT, + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + fs::remove_file(&out_file).unwrap(); + trace!("compile CUDA A"); + sccache_command() + .args(compile_cmdline( + name, + &exe, + INPUT_FOR_CUDA_C, + OUTPUT, + Vec::new(), + )) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + fs::remove_file(&out_file).unwrap(); + trace!("compile C++ A"); + sccache_command() + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .current_dir(tempdir) + .envs(env_vars.clone()) + .assert() + .success(); + fs::remove_file(&out_file).unwrap(); + trace!("compile C++ A"); + sccache_command() + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .current_dir(tempdir) + .envs(env_vars) + .assert() + .success(); + fs::remove_file(&out_file).unwrap(); + + trace!("request stats"); + get_stats(|info| { + assert_eq!(4, info.stats.compile_requests); + assert_eq!(12, info.stats.requests_executed); + assert_eq!(5, info.stats.cache_hits.all()); + assert_eq!(3, info.stats.cache_misses.all()); + assert!(info.stats.cache_hits.get("C/C++").is_none()); + assert_eq!(&2, info.stats.cache_hits.get("CUDA").unwrap()); + assert_eq!(&2, info.stats.cache_hits.get("CUBIN").unwrap()); + assert!(info.stats.cache_misses.get("C/C++").is_none()); + assert_eq!(&2, info.stats.cache_misses.get("CUDA").unwrap()); + assert_eq!(&1, info.stats.cache_misses.get("PTX").unwrap()); + }); +} + +fn run_sccache_nvcc_cuda_command_tests(compiler: Compiler, tempdir: &Path) { + test_nvcc_cuda_compiles(&compiler, tempdir); + test_nvcc_proper_lang_stat_tracking(compiler, tempdir); +} + +fn test_clang_cuda_compiles(compiler: &Compiler, tempdir: &Path) { + let Compiler { + name, + exe, + env_vars, + } = compiler; + println!("test_clang_cuda_compiles: {}", name); + // Compile multiple source files. + copy_to_tempdir(&[INPUT_FOR_CUDA_A, INPUT_FOR_CUDA_B], tempdir); + + let out_file = tempdir.join(OUTPUT); + trace!("compile A"); + sccache_command() + .args(compile_cuda_cmdline( + name, + exe, + "-c", INPUT_FOR_CUDA_A, OUTPUT, Vec::new(), @@ -589,9 +1138,10 @@ fn test_cuda_compiles(compiler: &Compiler, tempdir: &Path) { trace!("compile A"); fs::remove_file(&out_file).unwrap(); sccache_command() - .args(&compile_cuda_cmdline( + .args(compile_cuda_cmdline( name, exe, + "-c", INPUT_FOR_CUDA_A, OUTPUT, Vec::new(), @@ -617,9 +1167,10 @@ fn test_cuda_compiles(compiler: &Compiler, tempdir: &Path) { // phase is correctly running and outputting text trace!("compile B"); sccache_command() - .args(&compile_cuda_cmdline( + .args(compile_cuda_cmdline( name, exe, + "-c", INPUT_FOR_CUDA_B, OUTPUT, Vec::new(), @@ -643,7 +1194,7 @@ fn test_cuda_compiles(compiler: &Compiler, tempdir: &Path) { }); } -fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { +fn test_clang_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { let Compiler { name, exe, @@ -651,16 +1202,17 @@ fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { } = compiler; zero_stats(); - trace!("run_sccache_command_test: {}", name); + println!("test_clang_proper_lang_stat_tracking: {}", name); // Compile multiple source files. copy_to_tempdir(&[INPUT_FOR_CUDA_C, INPUT], tempdir); let out_file = tempdir.join(OUTPUT); trace!("compile CUDA A"); sccache_command() - .args(&compile_cmdline( + .args(compile_cuda_cmdline( name, &exe, + "-c", INPUT_FOR_CUDA_C, OUTPUT, Vec::new(), @@ -672,9 +1224,10 @@ fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { fs::remove_file(&out_file).unwrap(); trace!("compile CUDA A"); sccache_command() - .args(&compile_cmdline( + .args(compile_cuda_cmdline( name, &exe, + "-c", INPUT_FOR_CUDA_C, OUTPUT, Vec::new(), @@ -686,7 +1239,7 @@ fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { fs::remove_file(&out_file).unwrap(); trace!("compile C++ A"); sccache_command() - .args(&compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) .current_dir(tempdir) .envs(env_vars.clone()) .assert() @@ -694,7 +1247,7 @@ fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { fs::remove_file(&out_file).unwrap(); trace!("compile C++ A"); sccache_command() - .args(&compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) + .args(compile_cmdline(name, &exe, INPUT, OUTPUT, Vec::new())) .current_dir(tempdir) .envs(env_vars) .assert() @@ -714,9 +1267,9 @@ fn test_proper_lang_stat_tracking(compiler: Compiler, tempdir: &Path) { }); } -fn run_sccache_cuda_command_tests(compiler: Compiler, tempdir: &Path) { - test_cuda_compiles(&compiler, tempdir); - test_proper_lang_stat_tracking(compiler, tempdir); +fn run_sccache_clang_cuda_command_tests(compiler: Compiler, tempdir: &Path) { + test_clang_cuda_compiles(&compiler, tempdir); + test_clang_proper_lang_stat_tracking(compiler, tempdir); } fn test_hip_compiles(compiler: &Compiler, tempdir: &Path) { @@ -725,7 +1278,7 @@ fn test_hip_compiles(compiler: &Compiler, tempdir: &Path) { exe, env_vars, } = compiler; - trace!("run_sccache_command_test: {}", name); + println!("test_hip_compiles: {}", name); // Compile multiple source files. copy_to_tempdir(&[INPUT_FOR_HIP_A, INPUT_FOR_HIP_B], tempdir); @@ -734,7 +1287,7 @@ fn test_hip_compiles(compiler: &Compiler, tempdir: &Path) { let out_file = tempdir.join(OUTPUT); trace!("compile A"); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_A, @@ -760,7 +1313,7 @@ fn test_hip_compiles(compiler: &Compiler, tempdir: &Path) { trace!("compile A"); fs::remove_file(&out_file).unwrap(); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_A, @@ -789,7 +1342,7 @@ fn test_hip_compiles(compiler: &Compiler, tempdir: &Path) { // phase is correctly running and outputting text trace!("compile B"); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_B, @@ -822,7 +1375,7 @@ fn test_hip_compiles_multi_targets(compiler: &Compiler, tempdir: &Path) { exe, env_vars, } = compiler; - trace!("run_sccache_command_test: {}", name); + println!("test_hip_compiles_multi_targets: {}", name); // Compile multiple source files. copy_to_tempdir(&[INPUT_FOR_HIP_A, INPUT_FOR_HIP_B], tempdir); @@ -831,7 +1384,7 @@ fn test_hip_compiles_multi_targets(compiler: &Compiler, tempdir: &Path) { let out_file = tempdir.join(OUTPUT); trace!("compile A with gfx900 and gfx1030"); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_A, @@ -858,7 +1411,7 @@ fn test_hip_compiles_multi_targets(compiler: &Compiler, tempdir: &Path) { trace!("compile A with with gfx900 and gfx1030 again"); fs::remove_file(&out_file).unwrap(); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_A, @@ -888,7 +1441,7 @@ fn test_hip_compiles_multi_targets(compiler: &Compiler, tempdir: &Path) { // phase is correctly running and outputting text trace!("compile B with gfx900 and gfx1030"); sccache_command() - .args(&compile_hip_cmdline( + .args(compile_hip_cmdline( name, exe, INPUT_FOR_HIP_B, @@ -959,15 +1512,15 @@ fn test_clang_cache_whitespace_normalization( exe, env_vars, } = compiler; - println!("run_sccache_command_test: {}", name); - println!("expecting hit: {}", hit); + println!("test_clang_cache_whitespace_normalization: {}", name); + debug!("expecting hit: {}", hit); // Compile a source file. copy_to_tempdir(&[INPUT_WITH_WHITESPACE, INPUT_WITH_WHITESPACE_ALT], tempdir); zero_stats(); - println!("compile whitespace"); + debug!("compile whitespace"); sccache_command() - .args(&compile_cmdline( + .args(compile_cmdline( name, &exe, INPUT_WITH_WHITESPACE, @@ -978,7 +1531,7 @@ fn test_clang_cache_whitespace_normalization( .envs(env_vars.clone()) .assert() .success(); - println!("request stats"); + debug!("request stats"); get_stats(|info| { assert_eq!(1, info.stats.compile_requests); assert_eq!(1, info.stats.requests_executed); @@ -986,9 +1539,9 @@ fn test_clang_cache_whitespace_normalization( assert_eq!(1, info.stats.cache_misses.all()); }); - println!("compile whitespace_alt"); + debug!("compile whitespace_alt"); sccache_command() - .args(&compile_cmdline( + .args(compile_cmdline( name, &exe, INPUT_WITH_WHITESPACE_ALT, @@ -999,7 +1552,7 @@ fn test_clang_cache_whitespace_normalization( .envs(env_vars) .assert() .success(); - println!("request stats (expecting cache hit)"); + debug!("request stats (expecting cache hit)"); if hit { get_stats(move |info| { assert_eq!(2, info.stats.compile_requests); @@ -1073,7 +1626,13 @@ fn find_cuda_compilers() -> Vec { }) }) .collect::>(), - Err(_) => vec![], + Err(_) => { + eprintln!( + "unable to find `nvcc` in PATH={:?}", + env::var_os("PATH").unwrap_or_default() + ); + vec![] + } }; compilers } @@ -1173,6 +1732,13 @@ fn test_cuda_sccache_command(preprocessor_cache_mode: bool) { .tempdir() .unwrap(); let compilers = find_cuda_compilers(); + println!( + "CUDA compilers: {:?}", + compilers + .iter() + .map(|c| c.exe.to_string_lossy()) + .collect::>() + ); if compilers.is_empty() { warn!("No compilers found, skipping test"); } else { @@ -1189,7 +1755,11 @@ fn test_cuda_sccache_command(preprocessor_cache_mode: bool) { &sccache_cached_cfg_path, ); for compiler in compilers { - run_sccache_cuda_command_tests(compiler, tempdir.path()); + match compiler.name { + "nvcc" => run_sccache_nvcc_cuda_command_tests(compiler, tempdir.path()), + "clang++" => run_sccache_clang_cuda_command_tests(compiler, tempdir.path()), + _ => {} + } zero_stats(); } stop_local_daemon(); diff --git a/tests/xcode/main.cpp b/tests/xcode/main.cpp new file mode 100644 index 000000000..e371aa940 --- /dev/null +++ b/tests/xcode/main.cpp @@ -0,0 +1,6 @@ +#include + +int main(int argc, const char * argv[]) { + std::cout << "Hello, World!\n"; + return 0; +} diff --git a/tests/xcode/sccache.xcconfig b/tests/xcode/sccache.xcconfig new file mode 100644 index 000000000..590a5a531 --- /dev/null +++ b/tests/xcode/sccache.xcconfig @@ -0,0 +1,4 @@ +C_COMPILER_LAUNCHER=../../target/debug/sccache +CLANG_ENABLE_MODULES=NO +COMPILER_INDEX_STORE_ENABLE=NO +CLANG_USE_RESPONSE_FILE=NO diff --git a/tests/xcode/xcode-test.xcodeproj/project.pbxproj b/tests/xcode/xcode-test.xcodeproj/project.pbxproj new file mode 100644 index 000000000..22e869678 --- /dev/null +++ b/tests/xcode/xcode-test.xcodeproj/project.pbxproj @@ -0,0 +1,279 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 63; + objects = { + +/* Begin PBXBuildFile section */ + 9D52B00E2CABDB80008CF5FD /* main.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 9D52B00D2CABDB80008CF5FD /* main.cpp */; }; +/* End PBXBuildFile section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 9D52B0012CABDB40008CF5FD /* CopyFiles */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = /usr/share/man/man1/; + dstSubfolderSpec = 0; + files = ( + ); + runOnlyForDeploymentPostprocessing = 1; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 9D52B0032CABDB40008CF5FD /* xcode-test */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = "xcode-test"; sourceTree = BUILT_PRODUCTS_DIR; }; + 9D52B00D2CABDB80008CF5FD /* main.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = main.cpp; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 9D52B0002CABDB40008CF5FD /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 9D52AFFA2CABDB40008CF5FD = { + isa = PBXGroup; + children = ( + 9D52B00D2CABDB80008CF5FD /* main.cpp */, + 9D52B0042CABDB40008CF5FD /* Products */, + ); + sourceTree = ""; + }; + 9D52B0042CABDB40008CF5FD /* Products */ = { + isa = PBXGroup; + children = ( + 9D52B0032CABDB40008CF5FD /* xcode-test */, + ); + name = Products; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 9D52B0022CABDB40008CF5FD /* xcode-test */ = { + isa = PBXNativeTarget; + buildConfigurationList = 9D52B00A2CABDB40008CF5FD /* Build configuration list for PBXNativeTarget "xcode-test" */; + buildPhases = ( + 9D52AFFF2CABDB40008CF5FD /* Sources */, + 9D52B0002CABDB40008CF5FD /* Frameworks */, + 9D52B0012CABDB40008CF5FD /* CopyFiles */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "xcode-test"; + packageProductDependencies = ( + ); + productName = "xcode-test"; + productReference = 9D52B0032CABDB40008CF5FD /* xcode-test */; + productType = "com.apple.product-type.tool"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 9D52AFFB2CABDB40008CF5FD /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastUpgradeCheck = 1600; + TargetAttributes = { + 9D52B0022CABDB40008CF5FD = { + CreatedOnToolsVersion = 16.0; + }; + }; + }; + buildConfigurationList = 9D52AFFE2CABDB40008CF5FD /* Build configuration list for PBXProject "xcode-test" */; + compatibilityVersion = "Xcode 12.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 9D52AFFA2CABDB40008CF5FD; + minimizedProjectReferenceProxies = 1; + productRefGroup = 9D52B0042CABDB40008CF5FD /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 9D52B0022CABDB40008CF5FD /* xcode-test */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXSourcesBuildPhase section */ + 9D52AFFF2CABDB40008CF5FD /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 9D52B00E2CABDB80008CF5FD /* main.cpp in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 9D52B0082CABDB40008CF5FD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 11.5; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = macosx; + }; + name = Debug; + }; + 9D52B0092CABDB40008CF5FD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MACOSX_DEPLOYMENT_TARGET = 11.5; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = macosx; + }; + name = Release; + }; + 9D52B00B2CABDB40008CF5FD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Debug; + }; + 9D52B00C2CABDB40008CF5FD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 9D52AFFE2CABDB40008CF5FD /* Build configuration list for PBXProject "xcode-test" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9D52B0082CABDB40008CF5FD /* Debug */, + 9D52B0092CABDB40008CF5FD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 9D52B00A2CABDB40008CF5FD /* Build configuration list for PBXNativeTarget "xcode-test" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9D52B00B2CABDB40008CF5FD /* Debug */, + 9D52B00C2CABDB40008CF5FD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 9D52AFFB2CABDB40008CF5FD /* Project object */; +}