diff --git a/.github/workflows/upload-pypi-dev.yml b/.github/workflows/upload-pypi-dev.yml index 1f92a5e2..de5fcaca 100644 --- a/.github/workflows/upload-pypi-dev.yml +++ b/.github/workflows/upload-pypi-dev.yml @@ -74,14 +74,23 @@ jobs: run: | echo "VERSION=$(poetry version --short)" >> $GITHUB_ENV - - name: Build and tag Docker image + # Build and tag Docker images with both :latest and :[NEW_VERSION] + - name: Proxy Build and tag Docker images + working-directory: ./deploy run: | - docker build \ - --build-arg LLMSTUDIO_VERSION=${{ env.VERSION }} \ - -t tensoropsai/llmstudio:${{ env.VERSION }} \ - . + make version=${{ env.VERSION }} build-proxy - - name: Push Docker image to Docker Hub + # Build and tag Docker images with both :latest and :[NEW_VERSION] + - name: Tracker Build and tag Docker images + working-directory: ./deploy run: | - docker push tensoropsai/llmstudio:${{ env.VERSION }} - \ No newline at end of file + make version=${{ env.VERSION }} build-tracker + + # Push both Docker images to Docker Hub + - name: Push Proxy Docker images to Docker Hub + run: | + docker push tensoropsai/llmstudio-proxy:${{ env.VERSION }} + # Push both Docker images to Docker Hub + - name: Push Tracker Docker images to Docker Hub + run: | + docker push tensoropsai/llmstudio-tracker:${{ env.VERSION }} diff --git a/.github/workflows/upload-pypi.yml b/.github/workflows/upload-pypi.yml index 07c7af36..5964128e 100644 --- a/.github/workflows/upload-pypi.yml +++ b/.github/workflows/upload-pypi.yml @@ -72,17 +72,24 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} # Build and tag Docker images with both :latest and :[NEW_VERSION] - - name: Build and tag Docker images + - name: Proxy Build and tag Docker images + working-directory: ./deploy run: | - docker build \ - --build-arg LLMSTUDIO_VERSION=${{ env.VERSION }} \ - -t tensoropsai/llmstudio:latest \ - -t tensoropsai/llmstudio:${{ env.VERSION }} \ - . + make version=${{ env.VERSION }} build-proxy + # Build and tag Docker images with both :latest and :[NEW_VERSION] + - name: Tracker Build and tag Docker images + working-directory: ./deploy + run: | + make version=${{ env.VERSION }} build-tracker + + # Push both Docker images to Docker Hub + - name: Push Proxy Docker images to Docker Hub + run: | + docker push tensoropsai/llmstudio-proxy:${{ env.VERSION }} + docker push tensoropsai/llmstudio-proxy:latest # Push both Docker images to Docker Hub - - name: Push Docker images to Docker Hub + - name: Push Tracker Docker images to Docker Hub run: | - docker push tensoropsai/llmstudio:${{ env.VERSION }} - docker push tensoropsai/llmstudio:latest - \ No newline at end of file + docker push tensoropsai/llmstudio-tracker:${{ env.VERSION }} + docker push tensoropsai/llmstudio-tracker:latest diff --git a/README.md b/README.md index 11fc5a66..7204747d 100644 --- a/README.md +++ b/README.md @@ -26,14 +26,14 @@ Don't forget to check out [https://docs.llmstudio.ai](docs) page. Install the latest version of **LLMstudio** using `pip`. We suggest that you create and activate a new environment using `conda` +For full version: ```bash -pip install llmstudio +pip install 'llmstudio[proxy,tracker]' ``` -Install `bun` if you want to use the UI - +For lightweight (core) version: ```bash -curl -fsSL https://bun.sh/install | bash +pip install llmstudio ``` Create a `.env` file at the same path you'll run **LLMstudio** @@ -41,15 +41,18 @@ Create a `.env` file at the same path you'll run **LLMstudio** ```bash OPENAI_API_KEY="sk-api_key" ANTHROPIC_API_KEY="sk-api_key" +VERTEXAI_KEY="sk-api-key" ``` Now you should be able to run **LLMstudio** using the following command. ```bash -llmstudio server --ui +llmstudio server --proxy --tacker ``` -When the `--ui` flag is set, you'll be able to access the UI at [http://localhost:3000](http://localhost:3000) +When the `--proxy` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50001/docs (default port)](http://0.0.0.0:50001/docs) + +When the `--tracker` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50002/docs (default port)](http://0.0.0.0:50002/docs) ## πŸ“– Documentation diff --git a/deploy/Makefile b/deploy/Makefile new file mode 100644 index 00000000..9eb6bc2e --- /dev/null +++ b/deploy/Makefile @@ -0,0 +1,18 @@ +build-proxy: + docker build --build-arg LLMSTUDIO_VERSION=$(version) \ + -t tensoropsai/llmstudio-proxy:latest \ + -t tensoropsai/llmstudio-proxy:$(version) \ + -f proxy.Dockerfile \ + . + +build-tracker: + docker build --build-arg LLMSTUDIO_VERSION=$(version) \ + -t tensoropsai/llmstudio-tracker:latest \ + -t tensoropsai/llmstudio-tracker:$(version) \ + -f tracker.Dockerfile \ + . + +build: build-tracker build-proxy + +run: + docker compose -f docker-compose.yml up diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml new file mode 100644 index 00000000..bcb8a08d --- /dev/null +++ b/deploy/docker-compose.yml @@ -0,0 +1,17 @@ +version: "3.8" + +services: + llmstudio-proxy: + image: tensoropsai/llmstudio-proxy + restart: always + env_file: + - .env + ports: + - 8001:50001 + llmstudio-tracking: + image: tensoropsai/llmstudio-tracker + restart: always + env_file: + - .env + ports: + - 8002:50002 \ No newline at end of file diff --git a/deploy/proxy.Dockerfile b/deploy/proxy.Dockerfile new file mode 100644 index 00000000..535e2b75 --- /dev/null +++ b/deploy/proxy.Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.11-slim +ENV PYTHONUNBUFFERED=1 + +# Install tools +RUN apt-get clean && apt-get update + +# Install llmstudio +ARG LLMSTUDIO_VERSION +RUN pip install 'llmstudio[proxy]'==${LLMSTUDIO_VERSION} + +CMD ["llmstudio", "server", "--proxy"] diff --git a/deploy/tracker.Dockerfile b/deploy/tracker.Dockerfile new file mode 100644 index 00000000..c9f80678 --- /dev/null +++ b/deploy/tracker.Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.11-slim +ENV PYTHONUNBUFFERED=1 + +# Install tools +RUN apt-get clean && apt-get update + +# Install llmstudio +ARG LLMSTUDIO_VERSION +RUN pip install 'llmstudio[tracker]'==${LLMSTUDIO_VERSION} +RUN pip install psycopg2-binary + +CMD ["llmstudio", "server", "--tracker"] diff --git a/libs/llmstudio/README.md b/libs/llmstudio/README.md index 11fc5a66..7204747d 100644 --- a/libs/llmstudio/README.md +++ b/libs/llmstudio/README.md @@ -26,14 +26,14 @@ Don't forget to check out [https://docs.llmstudio.ai](docs) page. Install the latest version of **LLMstudio** using `pip`. We suggest that you create and activate a new environment using `conda` +For full version: ```bash -pip install llmstudio +pip install 'llmstudio[proxy,tracker]' ``` -Install `bun` if you want to use the UI - +For lightweight (core) version: ```bash -curl -fsSL https://bun.sh/install | bash +pip install llmstudio ``` Create a `.env` file at the same path you'll run **LLMstudio** @@ -41,15 +41,18 @@ Create a `.env` file at the same path you'll run **LLMstudio** ```bash OPENAI_API_KEY="sk-api_key" ANTHROPIC_API_KEY="sk-api_key" +VERTEXAI_KEY="sk-api-key" ``` Now you should be able to run **LLMstudio** using the following command. ```bash -llmstudio server --ui +llmstudio server --proxy --tacker ``` -When the `--ui` flag is set, you'll be able to access the UI at [http://localhost:3000](http://localhost:3000) +When the `--proxy` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50001/docs (default port)](http://0.0.0.0:50001/docs) + +When the `--tracker` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50002/docs (default port)](http://0.0.0.0:50002/docs) ## πŸ“– Documentation diff --git a/libs/llmstudio/pyproject.toml b/libs/llmstudio/pyproject.toml index 99c7b83e..a481e6fa 100644 --- a/libs/llmstudio/pyproject.toml +++ b/libs/llmstudio/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "llmstudio" -version = "1.0.0" +version = "1.0.1" description = "Prompt Perfection at Your Fingertips" authors = ["ClΓ‘udio Lemos "] license = "MIT" @@ -20,8 +20,8 @@ python-dotenv = "^0" langchain = "^0" langchain-experimental = "^0" llmstudio-core = "1.0.0" -llmstudio-tracker = { version = "1.0.0", optional = true } -llmstudio-proxy = { version = "1.0.0", optional = true } +llmstudio-tracker = { version = "1.0.1", optional = true } +llmstudio-proxy = { version = "1.0.1", optional = true } [tool.poetry.extras] proxy = ["llmstudio-proxy"] diff --git a/libs/proxy/README.md b/libs/proxy/README.md index 11fc5a66..6584d1ef 100644 --- a/libs/proxy/README.md +++ b/libs/proxy/README.md @@ -43,13 +43,13 @@ OPENAI_API_KEY="sk-api_key" ANTHROPIC_API_KEY="sk-api_key" ``` -Now you should be able to run **LLMstudio** using the following command. +Now you should be able to run **LLMstudio Proxy** using the following command. ```bash -llmstudio server --ui +llmstudio server --proxy ``` -When the `--ui` flag is set, you'll be able to access the UI at [http://localhost:3000](http://localhost:3000) +When the `--proxy` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50001/docs (default port)](http://0.0.0.0:50001/docs) ## πŸ“– Documentation diff --git a/libs/proxy/llmstudio_proxy/config.py b/libs/proxy/llmstudio_proxy/config.py index 9cb813f2..37abde70 100644 --- a/libs/proxy/llmstudio_proxy/config.py +++ b/libs/proxy/llmstudio_proxy/config.py @@ -21,7 +21,7 @@ def assign_port(default_port=None): defaults = { - "LLMSTUDIO_ENGINE_HOST": "localhost", + "LLMSTUDIO_ENGINE_HOST": "0.0.0.0", "LLMSTUDIO_ENGINE_PORT": str(assign_port(50001)), } diff --git a/libs/proxy/pyproject.toml b/libs/proxy/pyproject.toml index 9b8d6840..1688a42c 100644 --- a/libs/proxy/pyproject.toml +++ b/libs/proxy/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "llmstudio-proxy" -version = "1.0.0" +version = "1.0.1" description = "" authors = ["Diogo Goncalves "] readme = "README.md" diff --git a/libs/tracker/README.md b/libs/tracker/README.md index 11fc5a66..40e4726c 100644 --- a/libs/tracker/README.md +++ b/libs/tracker/README.md @@ -27,29 +27,22 @@ Don't forget to check out [https://docs.llmstudio.ai](docs) page. Install the latest version of **LLMstudio** using `pip`. We suggest that you create and activate a new environment using `conda` ```bash -pip install llmstudio -``` - -Install `bun` if you want to use the UI - -```bash -curl -fsSL https://bun.sh/install | bash +pip install 'llmstudio[tracker]' ``` Create a `.env` file at the same path you'll run **LLMstudio** ```bash OPENAI_API_KEY="sk-api_key" -ANTHROPIC_API_KEY="sk-api_key" ``` -Now you should be able to run **LLMstudio** using the following command. +Now you should be able to run **LLMstudio Tracker** using the following command. ```bash -llmstudio server --ui +llmstudio server --tacker ``` -When the `--ui` flag is set, you'll be able to access the UI at [http://localhost:3000](http://localhost:3000) +When the `--tracker` flag is set, you'll be able to access the [Swagger at http://0.0.0.0:50002/docs (default port)](http://0.0.0.0:50002/docs) ## πŸ“– Documentation diff --git a/libs/tracker/llmstudio_tracker/config.py b/libs/tracker/llmstudio_tracker/config.py index 0a98c5db..3cbb397f 100644 --- a/libs/tracker/llmstudio_tracker/config.py +++ b/libs/tracker/llmstudio_tracker/config.py @@ -21,7 +21,7 @@ def assign_port(default_port=None): defaults = { - "LLMSTUDIO_TRACKING_HOST": "localhost", + "LLMSTUDIO_TRACKING_HOST": "0.0.0.0", "LLMSTUDIO_TRACKING_PORT": str(assign_port(50002)), "LLMSTUDIO_TRACKING_URI": "sqlite:///./llmstudio_mgmt.db", } diff --git a/libs/tracker/pyproject.toml b/libs/tracker/pyproject.toml index dde6a8da..4eef9ef4 100644 --- a/libs/tracker/pyproject.toml +++ b/libs/tracker/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "llmstudio-tracker" -version = "1.0.0" +version = "1.0.1" description = "" authors = ["Diogo Goncalves "] readme = "README.md"