From f2084d63743fa8ff5d59e651c54128dfa55d79a1 Mon Sep 17 00:00:00 2001 From: XiangRao <131976564+ricardrao@users.noreply.github.com> Date: Mon, 24 Jun 2024 13:40:41 +0800 Subject: [PATCH] Modifying DBCopilot to support managed Identity (#3075) * Fix DBCopilot Vulnerabilities * fix * fix * fix * fix --- .../data_ingestion_db_to_acs/spec.yaml | 19 ++++++++++--------- .../data_ingestion_db_to_faiss/spec.yaml | 19 ++++++++++--------- .../spec.yaml | 19 ++++++++++--------- .../spec.yaml | 19 ++++++++++--------- .../llm_dbcopilot_create_promptflow/spec.yaml | 2 +- .../llm_dbcopilot_deploy_endpoint/spec.yaml | 2 +- .../llm_dbcopilot_grounding/spec.yaml | 2 +- .../spec.yaml | 2 +- .../components/src/endpoint_deployment.py | 4 ++-- .../components/src/prompt_flow_creation.py | 4 ++-- .../dbcopilot_embeddings/context/Dockerfile | 12 ++++++++---- .../context/requirements.txt | 2 +- .../dbcopilot_mir/context/Dockerfile | 13 ++----------- .../dbcopilot_mir/context/requirements.txt | 2 +- 14 files changed, 60 insertions(+), 61 deletions(-) diff --git a/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml index 6f3456bb21..57ae88c614 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml @@ -4,7 +4,7 @@ tags: Preview: "" name: llm_ingest_db_to_acs display_name: LLM - SQL Datastore to ACS Pipeline -version: 0.0.87 +version: 0.0.88 description: Single job pipeline to chunk data from AzureML sql data store, and create ACS embeddings index settings: default_compute: serverless @@ -109,10 +109,11 @@ inputs: type: string optional: true description: "The instruct template for the LLM." - user_managed_identity_client_id: - type: string + managed_identity_enabled: + type: boolean + default: flase optional: true - description: "The user identity client_id for the deployment." + description: "Whether to connect using managed identity." outputs: grounding_index: type: uri_folder @@ -152,8 +153,8 @@ jobs: type: uri_folder output_grounding_context_file: ${{parent.outputs.db_context}} environment_variables: - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} - component: "azureml:llm_dbcopilot_grounding:0.0.61" + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} + component: "azureml:llm_dbcopilot_grounding:0.0.62" type: command generate_meta_embeddings: type: command @@ -220,7 +221,7 @@ jobs: ######################################### db_sample_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.36" + component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -301,7 +302,7 @@ jobs: environment_variables: AZUREML_WORKSPACE_CONNECTION_ID_AOAI_EMBEDDING: ${{parent.inputs.embedding_connection}} AZUREML_WORKSPACE_CONNECTION_ID_AOAI_CHAT: ${{parent.inputs.llm_connection}} - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -335,5 +336,5 @@ jobs: path: ${{parent.inputs.include_views}} instruct_template: path: ${{parent.inputs.instruct_template}} - component: "azureml:llm_dbcopilot_create_promptflow:0.0.61" + component: "azureml:llm_dbcopilot_create_promptflow:0.0.62" type: command diff --git a/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml index 1d9208dd56..0117ca7ea0 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml @@ -4,7 +4,7 @@ tags: Preview: "" name: llm_ingest_db_to_faiss display_name: LLM - SQL Datastore to FAISS Pipeline -version: 0.0.87 +version: 0.0.88 description: Single job pipeline to chunk data from AzureML sql data store, and create FAISS embeddings index settings: default_compute: serverless @@ -99,10 +99,11 @@ inputs: type: string optional: true description: "The instruct template for the LLM." - user_managed_identity_client_id: - type: string + managed_identity_enabled: + type: boolean + default: flase optional: true - description: "The user identity client_id for the deployment." + description: "Whether to connect using managed identity." outputs: grounding_index: type: uri_folder @@ -142,8 +143,8 @@ jobs: type: uri_folder output_grounding_context_file: ${{parent.outputs.db_context}} environment_variables: - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} - component: "azureml:llm_dbcopilot_grounding:0.0.61" + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} + component: "azureml:llm_dbcopilot_grounding:0.0.62" type: command generate_meta_embeddings: type: command @@ -208,7 +209,7 @@ jobs: ######################################### db_sample_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.36" + component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -285,7 +286,7 @@ jobs: environment_variables: AZUREML_WORKSPACE_CONNECTION_ID_AOAI_EMBEDDING: ${{parent.inputs.embedding_connection}} AZUREML_WORKSPACE_CONNECTION_ID_AOAI_CHAT: ${{parent.inputs.llm_connection}} - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -319,5 +320,5 @@ jobs: path: ${{parent.inputs.include_views}} instruct_template: path: ${{parent.inputs.instruct_template}} - component: "azureml:llm_dbcopilot_create_promptflow:0.0.61" + component: "azureml:llm_dbcopilot_create_promptflow:0.0.62" type: command diff --git a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml index c6453cc7d4..6900aa64f4 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js type: pipeline name: llm_ingest_dbcopilot_acs_e2e -version: 0.0.55 +version: 0.0.56 display_name: Data Ingestion for DB Data Output to ACS E2E Deployment description: Single job pipeline to chunk data from AzureML DB Datastore and create acs embeddings index @@ -122,10 +122,11 @@ inputs: type: string optional: true description: "The instruct template for the LLM." - user_managed_identity_client_id: - type: string + managed_identity_enabled: + type: boolean + default: flase optional: true - description: "The user identity client_id for the deployment." + description: "Whether to connect using managed identity." outputs: grounding_index: type: uri_folder @@ -135,7 +136,7 @@ jobs: ######################################### db_meta_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding:0.0.61" + component: "azureml:llm_dbcopilot_grounding:0.0.62" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -160,7 +161,7 @@ jobs: type: uri_folder path: ${{parent.outputs.db_context}} environment_variables: - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} ######################################### generate_meta_embeddings: type: command @@ -207,7 +208,7 @@ jobs: ######################################### db_sample_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.36" + component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -269,7 +270,7 @@ jobs: ######################################### endpoint_deployment_job: type: command - component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.36" + component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -309,4 +310,4 @@ jobs: environment_variables: AZUREML_WORKSPACE_CONNECTION_ID_AOAI_EMBEDDING: ${{parent.inputs.embedding_connection}} AZUREML_WORKSPACE_CONNECTION_ID_AOAI_CHAT: ${{parent.inputs.llm_connection}} - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} diff --git a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml index 41e8125b30..19a81f4d8c 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js type: pipeline name: llm_ingest_dbcopilot_faiss_e2e -version: 0.0.55 +version: 0.0.56 display_name: Data Ingestion for DB Data Output to FAISS E2E Deployment description: Single job pipeline to chunk data from AzureML DB Datastore and create faiss embeddings index @@ -112,10 +112,11 @@ inputs: type: string optional: true description: "The instruct template for the LLM." - user_managed_identity_client_id: - type: string + managed_identity_enabled: + type: boolean + default: flase optional: true - description: "The user identity client_id for the deployment." + description: "Whether to connect using managed identity." outputs: grounding_index: type: uri_folder @@ -125,7 +126,7 @@ jobs: ######################################### db_meta_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding:0.0.61" + component: "azureml:llm_dbcopilot_grounding:0.0.62" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -150,7 +151,7 @@ jobs: type: uri_folder path: ${{parent.outputs.db_context}} environment_variables: - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} ######################################### generate_meta_embeddings: type: command @@ -195,7 +196,7 @@ jobs: ######################################### db_sample_loading_generator: type: command - component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.36" + component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -253,7 +254,7 @@ jobs: ######################################### endpoint_deployment_job: type: command - component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.36" + component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.37" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -293,4 +294,4 @@ jobs: environment_variables: AZUREML_WORKSPACE_CONNECTION_ID_AOAI_EMBEDDING: ${{parent.inputs.embedding_connection}} AZUREML_WORKSPACE_CONNECTION_ID_AOAI_CHAT: ${{parent.inputs.llm_connection}} - USER_MANAGED_IDENTITY_CLIENT_ID: ${{parent.inputs.user_managed_identity_client_id}} + MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}} diff --git a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_create_promptflow/spec.yaml b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_create_promptflow/spec.yaml index f9b4cb2c46..63f44b0eda 100644 --- a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_create_promptflow/spec.yaml +++ b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_create_promptflow/spec.yaml @@ -4,7 +4,7 @@ tags: Preview: "" name: llm_dbcopilot_create_promptflow display_name: LLM - Create DBCopilot Prompt Flow -version: 0.0.61 +version: 0.0.62 inputs: index_name: type: string diff --git a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_deploy_endpoint/spec.yaml b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_deploy_endpoint/spec.yaml index 293953c749..1f51cd6925 100644 --- a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_deploy_endpoint/spec.yaml +++ b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_deploy_endpoint/spec.yaml @@ -3,7 +3,7 @@ type: command tags: {} name: llm_dbcopilot_deploy_endpoint display_name: LLM - DBCopilot Deploy Endpoint Component -version: 0.0.36 +version: 0.0.37 inputs: deployment_name: type: string diff --git a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding/spec.yaml b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding/spec.yaml index 3492371ca0..0a4d840cf5 100644 --- a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding/spec.yaml +++ b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding/spec.yaml @@ -3,7 +3,7 @@ type: command tags: Preview: "" name: llm_dbcopilot_grounding -version: 0.0.61 +version: 0.0.62 inputs: asset_uri: type: string diff --git a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding_ground_samples/spec.yaml b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding_ground_samples/spec.yaml index 549be8fd40..c0ab0c926f 100644 --- a/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding_ground_samples/spec.yaml +++ b/assets/large_language_models/dbcopilot/components/llm_dbcopilot_grounding_ground_samples/spec.yaml @@ -3,7 +3,7 @@ type: command tags: {} name: llm_dbcopilot_grounding_ground_samples display_name: LLM - DBCopilot Grounding Ground Samples Component -version: 0.0.36 +version: 0.0.37 inputs: grounding_context: type: uri_folder diff --git a/assets/large_language_models/dbcopilot/components/src/endpoint_deployment.py b/assets/large_language_models/dbcopilot/components/src/endpoint_deployment.py index 7875e83a21..1e014c08f9 100644 --- a/assets/large_language_models/dbcopilot/components/src/endpoint_deployment.py +++ b/assets/large_language_models/dbcopilot/components/src/endpoint_deployment.py @@ -104,7 +104,7 @@ def deploy( logging.info("dumped secrets to secrets.json") with open(os.path.join(code_dir, "configs.json"), "w") as f: json.dump([asdict(config)], f) - user_managed_identity_client_id = os.getenv("USER_MANAGED_IDENTITY_CLIENT_ID", None) + managed_identity_enabled = os.getenv("MANAGED_IDENTITY_ENABLED", None) self._deploy_endpoint( mir_environment, endpoint_name, @@ -113,7 +113,7 @@ def deploy( score_script="score_zero.py", extra_environment_variables={ "INSTRUCT_TEMPLATE": instruct_template, - "USER_MANAGED_IDENTITY_CLIENT_ID": user_managed_identity_client_id, + "MANAGED_IDENTITY_ENABLED": managed_identity_enabled, }, sku=sku, ) diff --git a/assets/large_language_models/dbcopilot/components/src/prompt_flow_creation.py b/assets/large_language_models/dbcopilot/components/src/prompt_flow_creation.py index 44949ec892..eb035939c2 100644 --- a/assets/large_language_models/dbcopilot/components/src/prompt_flow_creation.py +++ b/assets/large_language_models/dbcopilot/components/src/prompt_flow_creation.py @@ -59,7 +59,7 @@ def create( datastore_uri = get_datastore_uri(workspace, asset_uri) logging.info(f"Datastore uri: {datastore_uri}") - user_managed_identity_client_id = os.environ.get("USER_MANAGED_IDENTITY_CLIENT_ID", None) + managed_identity_enabled = os.environ.get("MANAGED_IDENTITY_ENABLED", None) embedding_connection_id = os.environ.get( "AZUREML_WORKSPACE_CONNECTION_ID_AOAI_EMBEDDING", None ) @@ -127,7 +127,7 @@ def create( "AZUREML_WORKSPACE_NAME": self.workspace.name, "AZUREML_SUBSCRIPTION_ID": self.workspace.subscription_id, "AZUREML_RESOURCE_GROUP": self.workspace.resource_group, - "USER_MANAGED_IDENTITY_CLIENT_ID": user_managed_identity_client_id, + "MANAGED_IDENTITY_ENABLED": managed_identity_enabled, } base_run = pf_client.run( flow=flow, diff --git a/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/Dockerfile b/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/Dockerfile index 4d2a281852..ee8c9ab2e0 100644 --- a/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/Dockerfile +++ b/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/Dockerfile @@ -8,8 +8,12 @@ RUN apt-get update && apt-get install -y \ curl \ gnupg \ unixodbc-dev \ - git=1:2.25.1-1ubuntu3.12\ - git-man=1:2.25.1-1ubuntu3.12 &&\ + libc-bin=2.31-0ubuntu9.16 \ + libc6-dev=2.31-0ubuntu9.16 \ + libc6=2.31-0ubuntu9.16 \ + libc-dev-bin=2.31-0ubuntu9.16 \ + git \ + git-man &&\ rm -rf /var/lib/apt/lists/* # Install MS SQL ODBC Driver @@ -35,8 +39,8 @@ RUN /bin/bash -c "source /opt/miniconda/etc/profile.d/conda.sh && \ conda activate $AZUREML_CONDA_ENVIRONMENT_PATH && \ pip install --upgrade pip && \ pip install -r requirements.txt && \ - pip install promptflow-vectordb==0.2.9 && \ - pip install https://ragsample.blob.core.windows.net/ragdata/wheels/dbcopilot/db_copilot_tool-0.1.22-py3-none-any.whl &&\ + pip install promptflow-vectordb==0.2.10 && \ + pip install https://ragsample.blob.core.windows.net/ragdata/wheels/dbcopilot/db_copilot_tool-0.1.23-py3-none-any.whl &&\ pip install cryptography==42.0.5 langchain==0.1.11 idna==3.7 sqlparse==0.5.0 gunicorn==22.0.0 Werkzeug==3.0.3 requests==2.32.0" # Fix vunerabilities diff --git a/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/requirements.txt b/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/requirements.txt index c3656cd08a..e64fafebf5 100644 --- a/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/requirements.txt +++ b/assets/large_language_models/dbcopilot/environments/dbcopilot_embeddings/context/requirements.txt @@ -1,4 +1,4 @@ -azureml-rag[cognitive_search,data_generation]==0.2.29.1 +azureml-rag[cognitive_search,data_generation]==0.2.34 azureml-contrib-services azure-identity==1.14.0 azureml-core~=1.53.0 diff --git a/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/Dockerfile b/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/Dockerfile index d74df64e7f..2edfbd2f47 100644 --- a/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/Dockerfile +++ b/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/Dockerfile @@ -9,15 +9,6 @@ RUN apt-get update \ && curl https://packages.microsoft.com/config/ubuntu/22.04/prod.list | tee /etc/apt/sources.list.d/mssql-release.list \ && apt-get update \ && ACCEPT_EULA=Y apt-get install -y msodbcsql18=18.3.3.1-1 \ - && apt-get install -y libpam0g=1.4.0-11ubuntu2.4 \ - && apt-get install -y libexpat1=2.4.7-1ubuntu0.3 \ - && apt-get install -y bash=5.1-6ubuntu1.1 \ - && apt-get install -y libuuid1=2.37.2-4ubuntu3.4 \ - && apt-get install -y libblkid1=2.37.2-4ubuntu3.4\ - && apt-get install -y util-linux=2.37.2-4ubuntu3.4 \ - && apt-get install -y mount=2.37.2-4ubuntu3.4 \ - && apt-get install -y libsmartcols1=2.37.2-4ubuntu3.4 \ - && apt-get install -y libmount1=2.37.2-4ubuntu3.4 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* @@ -34,9 +25,9 @@ RUN python -m pip install --upgrade pip && \ pip install -r requirements.txt && \ # Install promptflow environment pip install promptflow-image-bundle[azure]==0.3.0 && \ - pip install promptflow-vectordb==0.2.9 && \ + pip install promptflow-vectordb==0.2.10 && \ ## Install dbcopilot - pip install https://ragsample.blob.core.windows.net/ragdata/wheels/dbcopilot/db_copilot_tool-0.1.22-py3-none-any.whl && \ + pip install https://ragsample.blob.core.windows.net/ragdata/wheels/dbcopilot/db_copilot_tool-0.1.23-py3-none-any.whl && \ ## Fix vulnerabilities pip install idna==3.7 diff --git a/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/requirements.txt b/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/requirements.txt index 2e0a8ab05b..0aaf99a0a7 100644 --- a/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/requirements.txt +++ b/assets/large_language_models/dbcopilot/environments/dbcopilot_mir/context/requirements.txt @@ -1,4 +1,4 @@ -azureml-rag[cognitive_search,data_generation]==0.2.29.1 +azureml-rag[cognitive_search,data_generation]==0.2.34 plotly~=5.13.1 azure-kusto-data==4.2.0 recognizers-text-suite~=1.0.2a2