diff --git a/cloud-service-providers/azure/azureml/python_sdk/README.md b/cloud-service-providers/azure/azureml/python_sdk/README.md new file mode 100644 index 0000000..47f30f2 --- /dev/null +++ b/cloud-service-providers/azure/azureml/python_sdk/README.md @@ -0,0 +1,21 @@ +# Instructions for deploying NIM models on AzureML using Python SDK + +In this example, we will deploy the LLAMA3 8B model on AzureML using the Python SDK. + +****Prerequisites:** +- [NGC API Key](https://catalog.ngc.nvidia.com/) +- [AzureML workspace](https://learn.microsoft.com/en-us/azure/machine-learning/how-to-manage-workspace?view=azureml-api-2&tabs=python) + +1. Provision the compute instance using the Jupyter notebook `provision aml-compute.ipynb`.This will setup the GPU compute 1xA100 on AzureML. You can run this Jupyter notebook from your local machine. + +2. Upon the successful running of this notebook, you will get the URL of the Jupyter server which starts running on the AzureML compute as shown below (_note: your URL would be different name_). You can then paste the URL in your local machines' browser +```bash + +{'display_name': 'Jupyter Lab', 'endpoint_uri': 'https://mayani-gpu-ci.swedencentral.instances.azureml.ms/lab'}]..... + +``` + +3. Run the script `nim-azureml-compute.ipynb` from this repository on your jupyter server which is running on the AzureML compute node as shown in the image below +![image](imgs/browser.png) + + diff --git a/cloud-service-providers/azure/azureml/python_sdk/imgs/browser.png b/cloud-service-providers/azure/azureml/python_sdk/imgs/browser.png new file mode 100644 index 0000000..396e72e Binary files /dev/null and b/cloud-service-providers/azure/azureml/python_sdk/imgs/browser.png differ diff --git a/cloud-service-providers/azure/azureml/python_sdk/nim-azureml-compute.ipynb b/cloud-service-providers/azure/azureml/python_sdk/nim-azureml-compute.ipynb new file mode 100644 index 0000000..ee9c2e8 --- /dev/null +++ b/cloud-service-providers/azure/azureml/python_sdk/nim-azureml-compute.ipynb @@ -0,0 +1,309 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "be9cbb03-afe2-4c0a-96c2-bcfa2dfb7e65", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Thu Jul 11 21:57:03 2024 \n", + "+---------------------------------------------------------------------------------------+\n", + "| NVIDIA-SMI 535.171.04 Driver Version: 535.171.04 CUDA Version: 12.2 |\n", + "|-----------------------------------------+----------------------+----------------------+\n", + "| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\n", + "| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n", + "| | | MIG M. |\n", + "|=========================================+======================+======================|\n", + "| 0 NVIDIA A100 80GB PCIe On | 00000001:00:00.0 Off | 0 |\n", + "| N/A 32C P0 41W / 300W | 0MiB / 81920MiB | 0% Default |\n", + "| | | Disabled |\n", + "+-----------------------------------------+----------------------+----------------------+\n", + " \n", + "+---------------------------------------------------------------------------------------+\n", + "| Processes: |\n", + "| GPU GI CI PID Type Process name GPU Memory |\n", + "| ID ID Usage |\n", + "|=======================================================================================|\n", + "| No running processes found |\n", + "+---------------------------------------------------------------------------------------+\n" + ] + } + ], + "source": [ + "!nvidia-smi" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "247b5c45-c8f3-4cb4-8eea-823a04d3c3ea", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NGC API Key: ········\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Login Succeeded\n", + "\n", + "WARNING! Your password will be stored unencrypted in /home/azureuser/.docker/config.json.\n", + "Configure a credential helper to remove this warning. See\n", + "https://docs.docker.com/engine/reference/commandline/login/#credentials-store\n", + "\n", + "\n" + ] + } + ], + "source": [ + "import getpass\n", + "import subprocess\n", + "import os\n", + "\n", + "# Prompt for NGC API key\n", + "ngc_api_key = getpass.getpass(\"NGC API Key: \")\n", + "\n", + "# Log in to the Docker registry\n", + "login_command = f\"echo {ngc_api_key} | docker login nvcr.io -u '$oauthtoken' --password-stdin\"\n", + "login_result = subprocess.run(login_command, shell=True, capture_output=True, text=True)\n", + "print(login_result.stdout)\n", + "print(login_result.stderr)\n", + "\n", + "# Check if login was successful\n", + "if login_result.returncode != 0:\n", + " raise Exception(\"Docker login failed\")\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "3990ed25-a430-40b1-bd3b-d0e6c4532709", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Set environment variables\n", + "os.environ[\"CONTAINER_NAME\"] = \"llama3-8b-instruct\"\n", + "os.environ[\"IMG_NAME\"] = f\"nvcr.io/nim/meta/{os.environ['CONTAINER_NAME']}:1.0.0\"\n", + "os.environ[\"NGC_API_KEY\"]=\"\"\n", + "os.environ[\"LOCAL_NIM_CACHE\"] = \"/mnt/batch/tasks/shared/LS_root/mounts/clusters/mayani-gpu-ci/code/.cache/nim\"\n", + "\n", + "# Create the cache directory\n", + "os.makedirs(os.environ[\"LOCAL_NIM_CACHE\"], exist_ok=True)\n", + "\n", + "# Define the docker run command without -it and with -d\n", + "docker_command = [\n", + " \"docker\", \"run\", \"-d\", \"--rm\",\n", + " f\"--name={os.environ['CONTAINER_NAME']}\",\n", + " \"--gpus\", \"all\",\n", + " \"-e\", f\"{os.environ['NGC_API_KEY']}\",\n", + " \"-v\", f\"{os.environ['LOCAL_NIM_CACHE']}:/opt/nim/.cache\",\n", + " \"-u\", str(os.getuid()),\n", + " \"-p\", \"8000:8000\",\n", + " os.environ[\"IMG_NAME\"]\n", + "]\n", + "\n", + "# Execute the docker run command\n", + "result = subprocess.run(docker_command, capture_output=True, text=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "da55dd96-0d73-4039-a561-aa40f10fcaef", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "stdout: 9a7d3f7cc0fceeb3a6dc5a80dad6e859c8bef94f9963f2e251f374102601d436\n", + "\n", + "stderr: \n", + "Container started successfully with ID: 9a7d3f7cc0fceeb3a6dc5a80dad6e859c8bef94f9963f2e251f374102601d436\n", + "Container logs:\n", + "\n", + "===========================================\n", + "== NVIDIA Inference Microservice LLM NIM ==\n", + "===========================================\n", + "\n", + "NVIDIA Inference Microservice LLM NIM Version 1.0.0\n", + "Model: nim/meta/llama3-8b-instruct\n", + "\n", + "Container image Copyright (c) 2016-2024, NVIDIA CORPORATION & AFFILIATES. All rights reserved.\n", + "\n", + "This NIM container is governed by the NVIDIA AI Product Agreement here:\n", + "https://www.nvidia.com/en-us/data-center/products/nvidia-ai-enterprise/eula/.\n", + "A copy of this license can be found under /opt/nim/LICENSE.\n", + "\n", + "The use of this model is governed by the AI Foundation Models Community License\n", + "here: https://docs.nvidia.com/ai-foundation-models-community-license.pdf.\n", + "\n", + "ADDITIONAL INFORMATION: Meta Llama 3 Community License, Built with Meta Llama 3. \n", + "A copy of the Llama 3 license can be found under /opt/nim/MODEL_LICENSE.\n", + "\n", + "\n" + ] + } + ], + "source": [ + "import subprocess\n", + "import os\n", + "import getpass\n", + "\n", + "# Prompt for NGC API key if not set\n", + "if \"NGC_API_KEY\" not in os.environ:\n", + " os.environ[\"NGC_API_KEY\"] = getpass.getpass(\"NGC API Key: \")\n", + "\n", + "# Set environment variables\n", + "container_name = \"llama3-8b-instruct\"\n", + "img_name = f\"nvcr.io/nim/meta/{container_name}:1.0.0\"\n", + "local_nim_cache = \"/mnt/batch/tasks/shared/LS_root/mounts/clusters/mayani-gpu-ci/code/.cache/nim\" # this should be the path where you want to store the cache\n", + "\n", + "# Create the cache directory\n", + "os.makedirs(local_nim_cache, exist_ok=True)\n", + "\n", + "# Define the docker run command without -it and with -d\n", + "docker_command = [\n", + " \"docker\", \"run\", \"-d\", \"--rm\",\n", + " f\"--name={container_name}\",\n", + " \"--gpus\", \"all\",\n", + " \"-e\", f\"NGC_API_KEY={os.environ['NGC_API_KEY']}\",\n", + " \"-v\", f\"{local_nim_cache}:/opt/nim/.cache\",\n", + " \"-u\", str(os.getuid()),\n", + " \"-p\", \"8000:8000\",\n", + " img_name\n", + "]\n", + "\n", + "# Execute the docker run command\n", + "result = subprocess.run(docker_command, capture_output=True, text=True)\n", + "print(\"stdout:\", result.stdout)\n", + "print(\"stderr:\", result.stderr)\n", + "\n", + "# Check if the container started successfully\n", + "if result.returncode == 0:\n", + " container_id = result.stdout.strip()\n", + " print(f\"Container started successfully with ID: {container_id}\")\n", + "\n", + " # Optionally, check the logs of the container\n", + " logs_command = [\"docker\", \"logs\", container_id]\n", + " logs_result = subprocess.run(logs_command, capture_output=True, text=True)\n", + " print(\"Container logs:\")\n", + " print(logs_result.stdout)\n", + " print(logs_result.stderr)\n", + "else:\n", + " print(\"Failed to start the container\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "ed95bad4-d477-4c58-a0b8-fbf5732575eb", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n", + "32fdd3fe65a4 localhost/c3:latest \"/usr/local/bin/ice\" About an hour ago Up About an hour c3-progenitor\n" + ] + } + ], + "source": [ + "!docker container ps -a" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "ee094db9-c986-4e84-a023-56782d6f8837", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Status Code: 200\n", + "Response Body: {'id': 'cmpl-9f5372bd74444ce09a68b6335c6f9905', 'object': 'text_completion', 'created': 1720736060, 'model': 'meta/llama3-8b-instruct', 'choices': [{'index': 0, 'text': ', there was a lovely little girl name Sophie. She was eight years old and lived in a small town in the countryside. Sophie had a big heart and always tried to do the right thing, even if it meant going against the crowd.\\nOne day, Sophie discovered that her cat, Mr. Whiskers, was', 'logprobs': None, 'finish_reason': 'length', 'stop_reason': None}], 'usage': {'prompt_tokens': 5, 'total_tokens': 69, 'completion_tokens': 64}}\n" + ] + } + ], + "source": [ + "import requests\n", + "import json\n", + "\n", + "# Define the URL and headers\n", + "url = 'http://0.0.0.0:8000/v1/completions'\n", + "headers = {\n", + " 'accept': 'application/json',\n", + " 'Content-Type': 'application/json'\n", + "}\n", + "\n", + "# Define the payload\n", + "payload = {\n", + " \"model\": \"meta/llama3-8b-instruct\",\n", + " \"prompt\": \"Once upon a time\",\n", + " \"max_tokens\": 64\n", + "}\n", + "\n", + "# Make the POST request\n", + "response = requests.post(url, headers=headers, data=json.dumps(payload))\n", + "\n", + "# Print the response\n", + "print(\"Status Code:\", response.status_code)\n", + "print(\"Response Body:\", response.json())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b3437882-6834-4ffa-a959-8b1a4cbc4786", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.8 - AzureML", + "language": "python", + "name": "python38-azureml" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.19" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cloud-service-providers/azure/azureml/python_sdk/provision-aml-compute.ipynb b/cloud-service-providers/azure/azureml/python_sdk/provision-aml-compute.ipynb new file mode 100644 index 0000000..d3f0ed1 --- /dev/null +++ b/cloud-service-providers/azure/azureml/python_sdk/provision-aml-compute.ipynb @@ -0,0 +1,206 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Install the dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: azure-identity in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (1.16.0)\n", + "Requirement already satisfied: azure-mgmt-resource in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (22.0.0)\n", + "Requirement already satisfied: azure-core>=1.23.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity) (1.30.2)\n", + "Requirement already satisfied: cryptography>=2.5 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity) (42.0.8)\n", + "Requirement already satisfied: msal>=1.24.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity) (1.28.0)\n", + "Requirement already satisfied: msal-extensions>=0.3.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity) (1.1.0)\n", + "Requirement already satisfied: msrest>=0.7.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-mgmt-resource) (0.7.1)\n", + "Requirement already satisfied: azure-common~=1.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-mgmt-resource) (1.1.28)\n", + "Requirement already satisfied: azure-mgmt-core<2.0.0,>=1.3.2 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-mgmt-resource) (1.4.0)\n", + "Requirement already satisfied: requests>=2.21.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-core>=1.23.0->azure-identity) (2.32.3)\n", + "Requirement already satisfied: six>=1.11.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-core>=1.23.0->azure-identity) (1.16.0)\n", + "Requirement already satisfied: typing-extensions>=4.6.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-core>=1.23.0->azure-identity) (4.12.1)\n", + "Requirement already satisfied: cffi>=1.12 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from cryptography>=2.5->azure-identity) (1.16.0)\n", + "Requirement already satisfied: PyJWT<3,>=1.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from PyJWT[crypto]<3,>=1.0.0->msal>=1.24.0->azure-identity) (2.8.0)\n", + "Requirement already satisfied: packaging in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msal-extensions>=0.3.0->azure-identity) (24.0)\n", + "Requirement already satisfied: portalocker<3,>=1.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msal-extensions>=0.3.0->azure-identity) (2.8.2)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msrest>=0.7.1->azure-mgmt-resource) (2024.6.2)\n", + "Requirement already satisfied: isodate>=0.6.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msrest>=0.7.1->azure-mgmt-resource) (0.6.1)\n", + "Requirement already satisfied: requests-oauthlib>=0.5.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msrest>=0.7.1->azure-mgmt-resource) (2.0.0)\n", + "Requirement already satisfied: pycparser in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from cffi>=1.12->cryptography>=2.5->azure-identity) (2.22)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-identity) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-identity) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-identity) (2.2.1)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests-oauthlib>=0.5.0->msrest>=0.7.1->azure-mgmt-resource) (3.2.2)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.1.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Requirement already satisfied: azure-ai-ml in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (1.16.1)\n", + "Requirement already satisfied: pyyaml>=5.1.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (6.0.1)\n", + "Requirement already satisfied: msrest>=0.6.18 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (0.7.1)\n", + "Requirement already satisfied: azure-core>=1.23.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (1.30.2)\n", + "Requirement already satisfied: azure-mgmt-core>=1.3.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (1.4.0)\n", + "Requirement already satisfied: marshmallow>=3.5 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (3.21.3)\n", + "Requirement already satisfied: jsonschema>=4.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (4.22.0)\n", + "Requirement already satisfied: tqdm in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (4.66.4)\n", + "Requirement already satisfied: strictyaml in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (1.7.3)\n", + "Requirement already satisfied: colorama in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (0.4.6)\n", + "Requirement already satisfied: pyjwt in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (2.8.0)\n", + "Requirement already satisfied: azure-storage-blob>=12.10.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (12.20.0)\n", + "Requirement already satisfied: azure-storage-file-share in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (12.16.0)\n", + "Requirement already satisfied: azure-storage-file-datalake>=12.2.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (12.15.0)\n", + "Requirement already satisfied: pydash>=6.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (7.0.7)\n", + "Requirement already satisfied: isodate in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (0.6.1)\n", + "Requirement already satisfied: azure-common>=1.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (1.1.28)\n", + "Requirement already satisfied: typing-extensions in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (4.12.1)\n", + "Requirement already satisfied: opencensus-ext-azure in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (1.1.13)\n", + "Requirement already satisfied: opencensus-ext-logging in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-ai-ml) (0.1.1)\n", + "Requirement already satisfied: requests>=2.21.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-core>=1.23.0->azure-ai-ml) (2.32.3)\n", + "Requirement already satisfied: six>=1.11.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-core>=1.23.0->azure-ai-ml) (1.16.0)\n", + "Requirement already satisfied: cryptography>=2.1.4 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-storage-blob>=12.10.0->azure-ai-ml) (42.0.8)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from jsonschema>=4.0.0->azure-ai-ml) (23.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from jsonschema>=4.0.0->azure-ai-ml) (2023.12.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from jsonschema>=4.0.0->azure-ai-ml) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from jsonschema>=4.0.0->azure-ai-ml) (0.18.1)\n", + "Requirement already satisfied: packaging>=17.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from marshmallow>=3.5->azure-ai-ml) (24.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msrest>=0.6.18->azure-ai-ml) (2024.6.2)\n", + "Requirement already satisfied: requests-oauthlib>=0.5.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msrest>=0.6.18->azure-ai-ml) (2.0.0)\n", + "Requirement already satisfied: azure-identity<2.0.0,>=1.5.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from opencensus-ext-azure->azure-ai-ml) (1.16.0)\n", + "Requirement already satisfied: opencensus<1.0.0,>=0.11.4 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from opencensus-ext-azure->azure-ai-ml) (0.11.4)\n", + "Requirement already satisfied: psutil>=5.6.3 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from opencensus-ext-azure->azure-ai-ml) (5.9.8)\n", + "Requirement already satisfied: python-dateutil>=2.6.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from strictyaml->azure-ai-ml) (2.9.0.post0)\n", + "Requirement already satisfied: msal>=1.24.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity<2.0.0,>=1.5.0->opencensus-ext-azure->azure-ai-ml) (1.28.0)\n", + "Requirement already satisfied: msal-extensions>=0.3.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from azure-identity<2.0.0,>=1.5.0->opencensus-ext-azure->azure-ai-ml) (1.1.0)\n", + "Requirement already satisfied: cffi>=1.12 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from cryptography>=2.1.4->azure-storage-blob>=12.10.0->azure-ai-ml) (1.16.0)\n", + "Requirement already satisfied: opencensus-context>=0.1.3 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (0.1.3)\n", + "Requirement already satisfied: google-api-core<3.0.0,>=1.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (2.19.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-ai-ml) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-ai-ml) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests>=2.21.0->azure-core>=1.23.0->azure-ai-ml) (2.2.1)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from requests-oauthlib>=0.5.0->msrest>=0.6.18->azure-ai-ml) (3.2.2)\n", + "Requirement already satisfied: pycparser in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from cffi>=1.12->cryptography>=2.1.4->azure-storage-blob>=12.10.0->azure-ai-ml) (2.22)\n", + "Requirement already satisfied: googleapis-common-protos<2.0.dev0,>=1.56.2 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (1.63.1)\n", + "Requirement already satisfied: protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0.dev0,>=3.19.5 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (4.25.3)\n", + "Requirement already satisfied: proto-plus<2.0.0dev,>=1.22.3 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (1.23.0)\n", + "Requirement already satisfied: google-auth<3.0.dev0,>=2.14.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (2.30.0)\n", + "Requirement already satisfied: portalocker<3,>=1.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from msal-extensions>=0.3.0->azure-identity<2.0.0,>=1.5.0->opencensus-ext-azure->azure-ai-ml) (2.8.2)\n", + "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-auth<3.0.dev0,>=2.14.1->google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (5.3.3)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-auth<3.0.dev0,>=2.14.1->google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (0.4.0)\n", + "Requirement already satisfied: rsa<5,>=3.1.4 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from google-auth<3.0.dev0,>=2.14.1->google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (4.9)\n", + "Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /Users/mayani/contoso-chat/.venv/lib/python3.11/site-packages (from pyasn1-modules>=0.2.1->google-auth<3.0.dev0,>=2.14.1->google-api-core<3.0.0,>=1.0.0->opencensus<1.0.0,>=0.11.4->opencensus-ext-azure->azure-ai-ml) (0.6.0)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.1.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" + ] + } + ], + "source": [ + "!pip install azure-identity azure-mgmt-resource\n", + "!pip install azure-ai-ml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup authentication and create resource" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ComputeInstance({'state': 'Running', 'last_operation': {'operation_name': 'Create', 'operation_time': '2024-07-11T19:58:01.445Z', 'operation_status': 'Succeeded', 'operation_trigger': 'User'}, 'os_image_metadata': , 'services': [{'display_name': 'Jupyter', 'endpoint_uri': 'https://mayani-gpu-ci.swedencentral.instances.azureml.ms/tree/'}, {'display_name': 'Jupyter Lab', 'endpoint_uri': 'https://mayani-gpu-ci.swedencentral.instances.azureml.ms/lab'}], 'type': 'computeinstance', 'created_on': '2024-07-11T19:57:53.325138+0000', 'provisioning_state': 'Succeeded', 'provisioning_errors': None, 'name': 'mayani-gpu-ci', 'description': None, 'tags': None, 'properties': {}, 'print_as_yaml': False, 'id': '/subscriptions/b7d41fc8-d35d-41db-92ed-1f7f1d32d4d9/resourceGroups/contchat-rg-mayani-7/providers/Microsoft.MachineLearningServices/workspaces/contoso-chat-sf-ai/computes/mayani-gpu-ci', 'Resource__source_path': '', 'base_path': '/Users/mayani/nim-on-azure/nim_azureml', 'creation_context': None, 'serialize': , 'resource_id': None, 'location': 'swedencentral', 'size': 'Standard_NC24ads_A100_v4', 'ssh_public_access_enabled': False, 'create_on_behalf_of': None, 'network_settings': , 'ssh_settings': , 'schedules': None, 'identity': None, 'idle_time_before_shutdown': None, 'idle_time_before_shutdown_minutes': None, 'setup_scripts': None, 'enable_node_public_ip': True, 'enable_sso': True, 'enable_root_access': True, 'release_quota_on_stop': False, 'enable_os_patching': False, 'custom_applications': None, 'subnet': None})" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from azure.ai.ml import MLClient\n", + "from azure.identity import InteractiveBrowserCredential\n", + "from azure.ai.ml.entities import ComputeInstance, AmlCompute\n", + "import datetime\n", + "\n", + "# Set up the ML client\n", + "subscription_id = \"\"\n", + "resource_group = \"\"\n", + "workspace_name = \"\"\n", + "\n", + "\n", + "# Define the compute instance with custom image\n", + "ci_gpu = ComputeInstance(\n", + " name=ci_gpu_name,\n", + " size=\"Standard_NC24ads_A100_v4\", # NC24Ads VM with Nvidia 1 A100 GPU\n", + ")\n", + "\n", + "# Create or update the compute instance\n", + "ml_client.compute.begin_create_or_update(ci_gpu).result()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Deleting the resources" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [], + "source": [ + "ml_client.compute.begin_delete(ci_gpu_name).wait()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}