Skip to content

Commit

Permalink
Revert "tests folder has been seggregated (#2948)" (#3027)
Browse files Browse the repository at this point in the history
This reverts commit 264bfbb.
  • Loading branch information
suryapa1 authored Jun 11, 2024
1 parent dfff4a8 commit 8e5d582
Show file tree
Hide file tree
Showing 10 changed files with 214 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ extra_config: environment.yaml
test:
pytest:
enabled: true
pip_requirements: ../../tests/requirements.txt
tests_dir: ../../tests
pip_requirements: tests/requirements.txt
tests_dir: tests
categories: ["Inference"]
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""Tests running a sample job in the minimal 20.04 py38 cpu environment."""
"""Tests running a sample job in the minimal 20.04 py39 cpu environment."""
import os
import time
from pathlib import Path
Expand All @@ -10,17 +10,16 @@
from azure.ai.ml.entities import Environment, BuildContext
from azure.identity import AzureCliCredential

BUILD_CONTEXT = Path("../environments/minimal-ubuntu20.04-py39-cpu-inference/context")
print(f"BUILD_CONTEXT : {BUILD_CONTEXT}")
BUILD_CONTEXT = Path("../context")
JOB_SOURCE_CODE = "src"
TIMEOUT_MINUTES = os.environ.get("timeout_minutes", 30)
STD_LOG = Path("artifacts/user_logs/std_log.txt")


def test_minimal_cpu_inference():
"""Tests a sample job using minimal 20.04 py38 cpu as the environment."""
"""Tests a sample job using minimal 20.04 py39 cpu as the environment."""
this_dir = Path(__file__).parent
print(f"this_dir : {this_dir}")

subscription_id = os.environ.get("subscription_id")
resource_group = os.environ.get("resource_group")
workspace_name = os.environ.get("workspace")
Expand All @@ -34,9 +33,10 @@ def test_minimal_cpu_inference():
env_docker_context = Environment(
build=BuildContext(path=this_dir / BUILD_CONTEXT),
name="minimal_cpu_inference",
description="minimal 20.04 py38 cpu inference environment created from a Docker context.",
description="minimal 20.04 py39 cpu inference environment created from a Docker context.",
)
ml_client.environments.create_or_update(env_docker_context)

# create the command
job = command(
code=this_dir / JOB_SOURCE_CODE, # local path where the code is stored
Expand All @@ -47,7 +47,7 @@ def test_minimal_cpu_inference():
environment=f"{env_name}@latest",
compute=os.environ.get("cpu_cluster"),
display_name="minimal-cpu-inference-example",
description="A test run of the minimal 20.04 py38 cpu inference curated environment",
description="A test run of the minimal 20.04 py39 cpu inference curated environment",
experiment_name="minimalCPUInferenceExperiment"
)

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ extra_config: environment.yaml
test:
pytest:
enabled: true
pip_requirements: ../../tests/requirements.txt
tests_dir: ../../tests
pip_requirements: tests/requirements.txt
tests_dir: tests
categories: ["Inference"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""Tests running a sample job in the minimal 22.04 py311 cpu environment."""
import os
import time
from pathlib import Path
from azure.ai.ml import command, MLClient
from azure.ai.ml._restclient.models import JobStatus
from azure.ai.ml.entities import Environment, BuildContext
from azure.identity import AzureCliCredential

BUILD_CONTEXT = Path("../context")
JOB_SOURCE_CODE = "src"
TIMEOUT_MINUTES = os.environ.get("timeout_minutes", 30)
STD_LOG = Path("artifacts/user_logs/std_log.txt")


def test_minimal_cpu_inference():
"""Tests a sample job using minimal 22.04 py311 cpu as the environment."""
this_dir = Path(__file__).parent

subscription_id = os.environ.get("subscription_id")
resource_group = os.environ.get("resource_group")
workspace_name = os.environ.get("workspace")

ml_client = MLClient(
AzureCliCredential(), subscription_id, resource_group, workspace_name
)

env_name = "minimal_cpu_inference"

env_docker_context = Environment(
build=BuildContext(path=this_dir / BUILD_CONTEXT),
name="minimal_cpu_inference",
description="minimal 22.04 py311 cpu inference environment created from a Docker context.",
)
ml_client.environments.create_or_update(env_docker_context)

# create the command
job = command(
code=this_dir / JOB_SOURCE_CODE, # local path where the code is stored
command="python main.py --score ${{inputs.score}}",
inputs=dict(
score="valid_score.py",
),
environment=f"{env_name}@latest",
compute=os.environ.get("cpu_cluster"),
display_name="minimal-cpu-inference-example",
description="A test run of the minimal 22.04 py311 cpu inference curated environment",
experiment_name="minimalCPUInferenceExperiment"
)

returned_job = ml_client.create_or_update(job)
assert returned_job is not None

# Poll until final status is reached or timed out
timeout = time.time() + (TIMEOUT_MINUTES * 60)
while time.time() <= timeout:
job = ml_client.jobs.get(returned_job.name)
status = job.status
if status in [JobStatus.COMPLETED, JobStatus.FAILED]:
break
time.sleep(30) # sleep 30 seconds
else:
# Timeout
ml_client.jobs.cancel(returned_job.name)
raise Exception(f"Test aborted because the job took longer than {TIMEOUT_MINUTES} minutes. "
f"Last status was {status}.")

if status == JobStatus.FAILED:
ml_client.jobs.download(returned_job.name)
if STD_LOG.exists():
print(f"*** BEGIN {STD_LOG} ***")
with open(STD_LOG, "r") as f:
print(f.read(), end="")
print(f"*** END {STD_LOG} ***")
else:
ml_client.jobs.stream(returned_job.name)

assert status == JobStatus.COMPLETED
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
azure-ai-ml==0.1.0b4
azure.identity==1.10.0
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""Validate minimal inference cpu environment by running azmlinfsrv."""

# imports
import os
import subprocess
import requests
from datetime import datetime, timedelta
import time
import argparse


def main(args):
"""Start inference server and post scoring request."""
# start the server
server_process = start_server("/var/tmp", ["--entry_script", args.score, "--port", "8081"])

# score a request
req = score_with_post()
server_process.kill()

print(req)


def start_server(log_directory, args, timeout=timedelta(seconds=15)):
"""Start inference server with options."""
stderr_file = open(os.path.join(log_directory, "stderr.txt"), "w")
stdout_file = open(os.path.join(log_directory, "stdout.txt"), "w")

env = os.environ.copy()
server_process = subprocess.Popen(["azmlinfsrv"] + args, stdout=stdout_file, stderr=stderr_file, env=env)

max_time = datetime.now() + timeout

while datetime.now() < max_time:
time.sleep(0.25)
req = None
try:
req = requests.get("http://127.0.0.1:8081", timeout=10)
except Exception as e:
print(e)

if req is not None and req.ok:
break

# Ensure the server is still running
status = server_process.poll()
if status is not None:
break

print(log_directory, "stderr.txt")
print(log_directory, "stdout.txt")

return server_process


def score_with_post(headers=None, data=None):
"""Post scoring request to the server."""
url = "http://127.0.0.1:8081/score"
return requests.post(url=url, headers=headers, data=data)


def parse_args():
"""Parse input arguments."""
# setup arg parser
parser = argparse.ArgumentParser()

# add arguments
parser.add_argument("--score", type=str)

# parse args
args = parser.parse_args()

# return args
return args


# run script
if __name__ == "__main__":
# parse args
args = parse_args()

# run main function
main(args)
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""A basic entry script."""

# imports
import uuid
import os
from datetime import datetime
from azureml_inference_server_http.api.aml_response import AMLResponse
from azureml_inference_server_http.api.aml_request import rawhttp


def init():
"""Sample init function."""
print("Initializing")


@rawhttp
def run(input_data):
"""Sample run function."""
print('A new request received~~~')
try:
r = dict()
r['request_id'] = str(uuid.uuid4())
r['now'] = datetime.now().strftime("%Y/%m/%d %H:%M:%S %f")
r['pid'] = os.getpid()
r['message'] = "this is a sample"

return AMLResponse(r, 200, json_str=True)
except Exception as e:
error = str(e)

return AMLResponse({'error': error}, 500, json_str=True)

0 comments on commit 8e5d582

Please sign in to comment.