Skip to content

Commit

Permalink
Add script to push nightly *.whl files to S3 (#1201)
Browse files Browse the repository at this point in the history
* Add script to push nightly binaries to S3

* Add README changes

* Add README command and cleanup unused declarations in code

* Specify full path for conda binary

* Fix output folder

* Remove pypi and anaconda upload command from buildspec

* Correct script name

* Add s3:// to s3 path

* Remove conda binary spec

* De-dupe and pre-check for existence of conda binary

* Correct docstring

* Set CONDA_BINARY correctly if conda already present on the instance

* Trigger build
  • Loading branch information
nikhil-sk authored Sep 7, 2021
1 parent 4724195 commit f18be88
Show file tree
Hide file tree
Showing 4 changed files with 134 additions and 3 deletions.
6 changes: 6 additions & 0 deletions binaries/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,15 @@
2. Edit `upload.py` to change the CONDA_USER if necessary
3. Run the following commands to build the packages, and then upload them to staging repos
```
python3 ts_scripts/install_dependencies.py --environment=dev
python3 binaries/conda/build_packages.py --install-conda-dependencies
exec bash
python3 binaries/build.py
cd binaries/
python3 upload.py --upload-pypi-packages --upload-conda-packages
```
4. To upload *.whl files to S3 bucket, run the following command:
Note: `--nightly` option puts the *.whl files in a subfolder named 'nightly' in the specified bucket
```
python s3_binary_upload.py --s3-bucket <s3_bucket> --s3-backup-bucket <s3_backup_bucket> --nightly
```
14 changes: 11 additions & 3 deletions binaries/conda/build_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,25 @@
conda_build_dir = os.path.dirname(os.path.abspath(__file__))
REPO_ROOT = os.path.join(conda_build_dir, "..", "..")
MINICONDA_DOWNLOAD_URL = "https://repo.anaconda.com/miniconda/Miniconda3-py39_4.9.2-Linux-x86_64.sh"
CONDA_BINARY = os.popen("which conda").read().strip() if os.system(f"conda --version") == 0 else f"$HOME/miniconda/condabin/conda"

def install_conda_build():
"""
Install conda-build, required to create conda packages
"""
os.system(f"conda install python=3.8 conda-build anaconda-client -y")
os.system(f"{CONDA_BINARY} install python=3.8 conda-build anaconda-client -y")

def install_miniconda():
"""
Installs miniconda, a slimmer anaconda installation to build conda packages
"""

# Check if conda binary already exists
exit_code = os.system(f"conda --version")
if exit_code == 0:
print(f"'conda' already present on the system. Proceeding without a fresh minconda installation.")
return

os.system(f"rm -rf $HOME/miniconda")
exit_code = os.system(f"wget {MINICONDA_DOWNLOAD_URL} -O ~/miniconda.sh")
if exit_code != 0:
Expand All @@ -27,7 +35,7 @@ def install_miniconda():
os.system(f"ln -s $HOME/miniconda/bin/activate $HOME/miniconda/condabin/activate")
os.system(f"ln -s $HOME/miniconda/bin/deactivate $HOME/miniconda/condabin/deactivate")

os.system(f"$HOME/miniconda/condabin/conda init")
os.system(f"{CONDA_BINARY} init")


def conda_build(ts_wheel_path, ma_wheel_path, wa_wheel_path):
Expand Down Expand Up @@ -64,7 +72,7 @@ def conda_build(ts_wheel_path, ma_wheel_path, wa_wheel_path):
for pkg in packages:
for pyv in python_versions:
output_dir = os.path.join(conda_build_dir, "output")
cmd = f"conda build --output-folder {output_dir} --python={pyv} {pkg}"
cmd = f"{CONDA_BINARY} build --output-folder {output_dir} --python={pyv} {pkg}"
print(f"## In directory: {os.getcwd()}; Executing command: {cmd}")
exit_code = os.system(cmd)
if exit_code != 0:
Expand Down
97 changes: 97 additions & 0 deletions binaries/s3_binary_upload.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import argparse
import glob
import logging
import os
import subprocess
import sys

LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
LOGGER.addHandler(logging.StreamHandler(sys.stderr))


REPO_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")

TS_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "dist"))[0]
MA_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "model-archiver", "dist"))[0]
WA_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "workflow-archiver", "dist"))[0]


class S3BinaryUploader:
def __init__(self, s3_bucket: str, is_dryrun: bool = False, is_nightly: bool = False):
"""
Initialize the uploader with s3_bucket, and s3_backup_bucket
"""
self.s3_bucket = s3_bucket
self.dryrun = is_dryrun
if self.dryrun:
self.s3_command = "aws s3 cp --recursive --dryrun"
else:
self.s3_command = "aws s3 cp --recursive"

self.channel = "nightly" if is_nightly else ""

def s3_upload_local_folder(self, local_folder_path: str):
"""
Uploads the *.whl files provided in a local folder to s3 bucket
:params
local_folder_path: str: path of the folder that needs to be uploaded
"""
LOGGER.info(f"Uploading *.whl files from folder: {local_folder_path}")
s3_command = f"{self.s3_command} --exclude '*' --include '*.whl' {local_folder_path} {self.s3_bucket.rstrip('/')}/whl/{self.channel}"

try:
ret_code = subprocess.run(
s3_command, check=True, stdout=subprocess.PIPE, universal_newlines=True, shell=True
)
except subprocess.CalledProcessError as e:
LOGGER.info(f"S3 upload command failed: {s3_command}. Exception: {e}")

LOGGER.info(f"S3 upload using command: {s3_command}")

def s3_upload_default_binaries(self):
"""
Uploads the *.whl files provided in the standard directory structure of the pytorch 'serve' directory,
assuming that the *.whl files are available in the 'dist' folder of the
"""
for local_folder_path in [TS_WHEEL_PATH, MA_WHEEL_PATH, WA_WHEEL_PATH]:
self.s3_upload_local_folder(local_folder_path)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="argument parser for s3_binary_upload.py")
parser.add_argument(
"--s3-bucket", required=True, help="Specify the s3 bucket to which the binaries will be uploaded"
)
parser.add_argument(
"--dry-run",
required=False,
action="store_true",
default=False,
help="Specify if you just want to dry-run the upload",
)
parser.add_argument(
"--nightly",
required=False,
action="store_true",
default=False,
help="Specify if you wnat to upload the binaries to the 'nightly' subfolder",
)
parser.add_argument(
"--local-binaries-path",
required=False,
default=None,
help="Specify a path to a folder with *.whl files, else default path to *.whl files will be used",
)

args = parser.parse_args()

s3BinaryUploader = S3BinaryUploader(args.s3_bucket, args.dry_run, args.nightly)

if args.local_binaries_path:
s3BinaryUploader.s3_upload_local_folder(args.local_binaries_path)
else:
s3BinaryUploader.s3_upload_default_binaries()

args = parser.parse_args()

20 changes: 20 additions & 0 deletions ci/torchserve_nightly_build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Build Spec for AWS CodeBuild CI

version: 0.2
env:
shell: bash

phases:
install:
commands:
- apt-get update
- apt-get install sudo -y
- python ts_scripts/install_dependencies.py --cuda=cu102 --environment=dev

build:
commands:
- python3 binaries/conda/build_packages.py --install-conda-dependencies
- exec bash
- python3 binaries/build.py
- cd binaries/
- python3 s3_binary_upload.py --s3-bucket s3://torchserve-builds --nightly

0 comments on commit f18be88

Please sign in to comment.