Skip to content

Commit

Permalink
Add a packaging workflow. (#44)
Browse files Browse the repository at this point in the history
Also fixes some other odds and ends:

* Switches to larger runners.
* Uses the compiler's native support for memory outputs (vs direct use
of memfd). This removes special casing that would be needed when
building for old glibc versions (the compiler already does the right
thing in these cases).
* Adds a :plugins target to build everything.
* Fixes the CUDA SDK env var to include the "_DIR" suffix.
* Installs the needed parts of the CUDA SDK
* Forks the build_linux_packages.sh that everyone uses and builds binary
plugins. Will extend this later to build Python wheels that
auto-configure.
  • Loading branch information
Stella Laurenzo authored Apr 10, 2023
1 parent 2d691ec commit ab703ba
Show file tree
Hide file tree
Showing 7 changed files with 271 additions and 70 deletions.
47 changes: 47 additions & 0 deletions .github/workflows/build_packages.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Builds binary packages

name: Build Packages

on:
workflow_call:
workflow_dispatch:
pull_request:
push:
branches:
- main

concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
# queued and in-progress runs for the same PR (presubmit) or commit
# (postsubmit).
group: build_packages_${{ github.event.number || github.sha }}
cancel-in-progress: true

# Jobs are organized into groups and topologically sorted by dependencies
jobs:
build:
runs-on: ubuntu-20.04-64core
steps:
- name: "Checking out repository"
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0

- name: "Setting up Python"
uses: actions/setup-python@75f3110429a8c05be0e1bf360334e4cced2b63fa # v2.3.3
with:
python-version: "3.10"

- name: Sync and install versions
run: |
# Since only building the runtime, exclude compiler deps (expensive).
python ./sync_deps.py --exclude-submodule "iree:third_party/(llvm|mlir-hlo)"
- name: Build Packages (Linux)
run: |
./build_tools/ci/build_linux_packages.sh
- uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
with:
path: |
bindist/*
bindist/**/*
retention-days: 5
16 changes: 8 additions & 8 deletions .github/workflows/build_smoketest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,46 +12,46 @@ on:
push:
branches:
- main
schedule:
- cron: "0 17 * * *"

concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
# queued and in-progress runs for the same PR (presubmit) or commit
# (postsubmit).
group: ${{ github.event.number || github.sha }}
group: build_smoketest_${{ github.event.number || github.sha }}
cancel-in-progress: true

# Jobs are organized into groups and topologically sorted by dependencies
jobs:
build:
runs-on: ubuntu-20.04
runs-on: ubuntu-20.04-64core
steps:
- name: "Checking out repository"
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0

- name: "Setting up Python"
uses: actions/setup-python@75f3110429a8c05be0e1bf360334e4cced2b63fa # v2.3.3
with:
python-version: "3.10" # Needs pybind >= 2.10.1 for Python >= 3.11
python-version: "3.10"

- name: Sync and install versions
run: |
# TODO: https://github.com/openxla/openxla-pjrt-plugin/issues/30
sudo apt install -y lld
python ./sync_deps.py
# Since only building the runtime, exclude compiler deps (expensive).
python ./sync_deps.py --exclude-submodule "iree:third_party/(llvm|mlir-hlo)"
pip install -r requirements.txt
- name: Setup Bazelisk
uses: bazelbuild/setup-bazelisk@v2

- name: "Sync and configure"
- name: "Install CUDA and Configure"
run: |
CUDA_SDK_DIR="$(../iree/third_party/nvidia_sdk_download/fetch_cuda_toolkit.py /tmp/cuda_sdk)"
python ./configure.py --cc=clang --cxx=clang++ --cuda-sdk-dir=$CUDA_SDK_DIR
- name: "Build CPU plugin"
run: |
bazel build iree/integrations/pjrt/cpu/...
bazel build :plugins
- name: "Run Unittests"
run: |
Expand Down
8 changes: 8 additions & 0 deletions BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,11 @@
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

filegroup(
name = "plugins",
srcs = [
"//iree/integrations/pjrt/cpu:pjrt_plugin_iree_cpu.so",
"//iree/integrations/pjrt/cuda:pjrt_plugin_iree_cuda.so",
],
)
183 changes: 183 additions & 0 deletions build_tools/ci/build_linux_packages.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
#!/bin/bash
# Copyright 2023 The IREE Authors
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

# build_linux_packages.sh
# One stop build of the project's Python packages for Linux. The Linux build is
# complicated because it has to be done via a docker container that has
# an LTS glibc version, all Python packages and other deps.
# This script handles all of those details.
#
# Usage:
# Build everything (all packages, all python versions):
# ./build_tools/build_linux_packages.sh
#
# Build specific Python versions and packages to custom directory:
# override_python_versions="cp38-cp38" \
# packages="plugins" \
# output_dir="/tmp/wheelhouse" \
# ./build_tools/ci/python_deploy/build_linux_packages.sh
#
# Valid Python versions match a subdirectory under /opt/python in the docker
# image. Typically:
# cp38-cp38 cp39-cp39 cp310-cp310
# Note that our Python packages are version independent so it is typical to
# build with the oldest supported Python vs multiples.
#
# Valid packages:
# plugins
# plugins_instrumented (currently does not work)
#
# Note that this script is meant to be run on CI and it will pollute both the
# output directory and in-tree build/ directories (under runtime/ and
# compiler/) with docker created, root owned builds. Sorry - there is
# no good way around it.
#
# It can be run on a workstation but recommend using a git worktree dedicated
# to packaging to avoid stomping on development artifacts.
set -xeu -o errtrace

this_dir="$(cd $(dirname $0) && pwd)"
script_name="$(basename $0)"
repo_root="$(cd "${this_dir}" && git rev-parse --show-toplevel)"
manylinux_docker_image="${manylinux_docker_image:-}"
python_versions="${override_python_versions:-cp38-cp38}"
output_dir="${output_dir:-${repo_root}/bindist}"
plugins="${plugins:-iree/integrations/pjrt/cpu/pjrt_plugin_iree_cpu.so iree/integrations/pjrt/cuda/pjrt_plugin_iree_cuda.so}"
packages="${packages:-plugins}"
package_suffix="${package_suffix:-}"

function run_on_host() {
echo "Running on host"
echo "Launching docker image ${manylinux_docker_image}"

if [ -z "${manylinux_docker_image}" ]; then
manylinux_docker_image="$($repo_root/../iree/build_tools/docker/get_image_name.py manylinux2014_x86_64-release)"
if [ -z "${manylinux_docker_image}" ]; then
echo "ERROR: Could not determine manylinux docker image"
exit 1
fi
echo "Using default docker image: $manylinux_docker_image"
fi

# Canonicalize paths.
mkdir -p "${output_dir}"
output_dir="$(cd "${output_dir}" && pwd)"
echo "Outputting to ${output_dir}"
mkdir -p "${output_dir}"
# Mount one level up to get the entire workspace.
mount_dir="$(cd $repo_root/.. && pwd)"
docker run --rm \
-v "${mount_dir}:${mount_dir}" \
-v "${output_dir}:${output_dir}" \
-e __MANYLINUX_BUILD_WHEELS_IN_DOCKER=1 \
-e "override_python_versions=${python_versions}" \
-e "plugins=${plugins}" \
-e "packages=${packages}" \
-e "package_suffix=${package_suffix}" \
-e "output_dir=${output_dir}" \
"${manylinux_docker_image}" \
-- ${this_dir}/${script_name}

echo "******************** BUILD COMPLETE ********************"
echo "Generated binaries:"
ls -l "${output_dir}"
}

function run_in_docker() {
echo "Running in docker"
echo "Using python versions: ${python_versions}"

local orig_path="${PATH}"

# Build phase.
for package in ${packages}; do
echo "******************** BUILDING PACKAGE ${package} ********************"
for python_version in ${python_versions}; do
python_dir="/opt/python/${python_version}"
if ! [ -x "${python_dir}/bin/python" ]; then
echo "ERROR: Could not find python: ${python_dir} (skipping)"
continue
fi
cd $repo_root
export PATH="${python_dir}/bin:${orig_path}"
echo ":::: Python version $(python --version)"
echo "::: Running from $(pwd)"
echo "::: Installing CUDA SDK..."
cuda_sdk_dir="$($repo_root/../iree/third_party/nvidia_sdk_download/fetch_cuda_toolkit.py /tmp/cuda_sdk)"
echo "CUDA SDK installed at $cuda_sdk_dir"
echo "::: Installing python dependencies"
pip install -r requirements.txt
echo "::: Configuring bazel"
python configure.py --cuda-sdk-dir=${cuda_sdk_dir}
# replace dashes with underscores
package_suffix="${package_suffix//-/_}"
case "${package}" in
plugins)
build_plugins
;;

plugins_instrumented)
build_plugins_instrumented
;;
*)
echo "Unrecognized package '${package}'"
exit 1
;;
esac
done
done
}

function build_plugins() {
local f
local dest="${output_dir}/pjrt_plugins"
mkdir -p $dest
bazel build -c opt ${plugins}
for f in ${plugins}; do
cp -fv bazel-bin/$f $dest
done
}

function build_plugins_instrumented() {
# TODO: Currently does not compile.
local f
local dest="${output_dir}/pjrt_plugins_instrumented"
mkdir -p $dest
bazel build -c opt --iree_enable_runtime_tracing ${plugins}
for f in ${plugins}; do
cp -fv bazel-bin/$f $dest
done
}

function build_wheel() {
python -m pip wheel --disable-pip-version-check -v -w "${output_dir}" "${repo_root}/$@"
}

function run_audit_wheel() {
local wheel_basename="$1"
local python_version="$2"
# Force wildcard expansion here
generic_wheel="$(echo "${output_dir}/${wheel_basename}-"*"-${python_version}-linux_x86_64.whl")"
ls "${generic_wheel}"
echo ":::: Auditwheel ${generic_wheel}"
auditwheel repair -w "${output_dir}" "${generic_wheel}"
rm -v "${generic_wheel}"
}

function clean_wheels() {
local wheel_basename="$1"
local python_version="$2"
echo ":::: Clean wheels ${wheel_basename} ${python_version}"
rm -f -v "${output_dir}/${wheel_basename}-"*"-${python_version}-"*".whl"
}

# Trampoline to the docker container if running on the host.
if [ -z "${__MANYLINUX_BUILD_WHEELS_IN_DOCKER-}" ]; then
run_on_host "$@"
else
run_in_docker "$@"
fi
2 changes: 1 addition & 1 deletion configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def add_env(key, value):
add_env("JAX_USE_PJRT_C_API_ON_TPU", "1") # TODO: Remove when ready
if args.cuda_sdk_dir:
print(f"Enabling CUDA SDK: {args.cuda_sdk_dir}")
add_env("IREE_CUDA_DEPS", args.cuda_sdk_dir)
add_env("IREE_CUDA_DEPS_DIR", args.cuda_sdk_dir)
else:
print("Not enabling CUDA. Pass --cuda-sdk-dir= to enable")

Expand Down
Loading

0 comments on commit ab703ba

Please sign in to comment.