Skip to content

Commit

Permalink
Merge pull request #647 from JuliaParallel/sb/buildkite-latest
Browse files Browse the repository at this point in the history
Update buildkite to use latest Julia version
  • Loading branch information
simonbyrne authored Oct 6, 2022
2 parents c67cfa3 + 1eade93 commit 676df63
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
10 changes: 6 additions & 4 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@
Pkg.test("MPI")
'
- label: "Tests -- Julia 1.7"
- label: "Tests -- Julia latest"
plugins:
- JuliaCI/julia#v1:
version: "1.7"
version: "1"
persist_depot_dirs: packages,artifacts,compiled
agents:
queue: "juliagpu"
Expand Down Expand Up @@ -178,16 +178,18 @@

- wait

- label: "Tests -- Julia 1.7"
- label: "Tests -- Julia latest"
plugins:
- JuliaCI/julia#v1:
version: "1.7"
version: "1"
persist_depot_dirs: packages,artifacts,compiled
agents:
queue: "juliagpu"
rocm: "*" # todo fix ROCM version
if: build.message !~ /\[skip tests\]/
timeout_in_minutes: 60
soft_fail:
- exit_status: 1
env:
JULIA_MPI_TEST_ARRAYTYPE: ROCArray
JULIA_MPI_TEST_NPROCS: 2
Expand Down
4 changes: 2 additions & 2 deletions test/common.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ elseif get(ENV,"JULIA_MPI_TEST_ARRAYTYPE","") == "ROCArray"
ArrayType = AMDGPU.ROCArray
function synchronize()
# TODO: AMDGPU synchronization story is complicated. HSA does not provide a consistent notion of global queues. We need a mechanism for all GPUArrays.jl provided kernels to be synchronized.
queue = AMDGPU.ROCQueue()
queue = AMDGPU.default_queue()
barrier = AMDGPU.barrier_and!(queue, AMDGPU.active_kernels(queue))
AMDGPU.HIP.hipDeviceSynchronize() # Sync all HIP kernels e.g. BLAS. N.B. this is blocking Julia progress
# AMDGPU.HIP.hipDeviceSynchronize() # Sync all HIP kernels e.g. BLAS. N.B. this is blocking Julia progress
wait(barrier)
end
else
Expand Down

0 comments on commit 676df63

Please sign in to comment.