Skip to content

Commit

Permalink
(WIP): #92, #83, #64, #89 migrating to OpenAI SDK, adding Azure provider
Browse files Browse the repository at this point in the history
  • Loading branch information
valentinfrlch committed Nov 17, 2024
1 parent bdba949 commit b532660
Show file tree
Hide file tree
Showing 5 changed files with 200 additions and 53 deletions.
23 changes: 18 additions & 5 deletions custom_components/llmvision/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
from .const import (
DOMAIN,
CONF_OPENAI_API_KEY,
CONF_AZURE_API_KEY,
CONF_AZURE_ENDPOINT,
CONF_AZURE_VERSION,
CONF_ANTHROPIC_API_KEY,
CONF_GOOGLE_API_KEY,
CONF_GROQ_API_KEY,
Expand Down Expand Up @@ -34,13 +37,15 @@
SENSOR_ENTITY,
)
from .calendar import SemanticIndex
from .providers import Request
from .media_handlers import MediaProcessor
import os
from datetime import timedelta
from homeassistant.util import dt as dt_util
from homeassistant.config_entries import ConfigEntry
from .providers import Request
from .media_handlers import MediaProcessor
from homeassistant.core import SupportsResponse
from homeassistant.exceptions import ServiceValidationError
from functools import partial
import logging

_LOGGER = logging.getLogger(__name__)
Expand All @@ -53,6 +58,9 @@ async def async_setup_entry(hass, entry):

# Get all entries from config flow
openai_api_key = entry.data.get(CONF_OPENAI_API_KEY)
azure_api_key = entry.data.get(CONF_AZURE_API_KEY)
azure_endpoint = entry.data.get(CONF_AZURE_ENDPOINT)
azure_version = entry.data.get(CONF_AZURE_VERSION)
anthropic_api_key = entry.data.get(CONF_ANTHROPIC_API_KEY)
google_api_key = entry.data.get(CONF_GOOGLE_API_KEY)
groq_api_key = entry.data.get(CONF_GROQ_API_KEY)
Expand All @@ -73,6 +81,9 @@ async def async_setup_entry(hass, entry):
# Create a dictionary for the entry data
entry_data = {
CONF_OPENAI_API_KEY: openai_api_key,
CONF_AZURE_API_KEY: azure_api_key,
CONF_AZURE_ENDPOINT: azure_endpoint,
CONF_AZURE_VERSION: azure_version,
CONF_ANTHROPIC_API_KEY: anthropic_api_key,
CONF_GOOGLE_API_KEY: google_api_key,
CONF_GROQ_API_KEY: groq_api_key,
Expand All @@ -96,6 +107,8 @@ async def async_setup_entry(hass, entry):

# check if the entry is the calendar entry (has entry rentention_time)
if filtered_entry_data.get(CONF_RETENTION_TIME) is not None:
# make sure 'llmvision' directory exists
await hass.loop.run_in_executor(None, partial(os.makedirs, "/llmvision", exist_ok=True))
# forward the calendar entity to the platform
await hass.config_entries.async_forward_entry_setups(entry, ["calendar"])

Expand Down Expand Up @@ -158,13 +171,13 @@ async def _remember(hass, call, start, response):
else:
camera_name = "Unknown"

camera_name = camera_name.replace("camera.", "").replace("image.", "")
camera_name = camera_name.replace("camera.", "").replace("image.", "").capitalize()

await semantic_index.remember(
start=start,
end=dt_util.now() + timedelta(minutes=1),
label=title,
camera_name=camera_name,
label=title + " near " + camera_name if camera_name != "Unknown" else title,
camera_name=camera_name if camera_name != "Unknown" else "Image Input",
summary=response["response_text"]
)

Expand Down
4 changes: 2 additions & 2 deletions custom_components/llmvision/calendar.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry):
self._attr_supported_features = (CalendarEntityFeature.DELETE_EVENT)
# Path to the JSON file where events are stored
self._file_path = os.path.join(
self.hass.config.path("custom_components/llmvision"), "events.json"
self.hass.config.path("llmvision"), "events.json"
)
self.hass.loop.create_task(self.async_update())

def _ensure_datetime(self, dt):
"""Ensure the input is a datetime.datetime object."""
if isinstance(dt, datetime.date) and not isinstance(dt, datetime.datetime):
Expand Down
95 changes: 84 additions & 11 deletions custom_components/llmvision/config_flow.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
from openai import AsyncOpenAI, AsyncAzureOpenAI
from homeassistant import config_entries
from homeassistant.helpers.selector import selector
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.httpx_client import get_async_client
import urllib.parse
from .const import (
DOMAIN,
CONF_OPENAI_API_KEY,
CONF_AZURE_API_KEY,
CONF_AZURE_ENDPOINT,
CONF_AZURE_VERSION,
CONF_ANTHROPIC_API_KEY,
CONF_GOOGLE_API_KEY,
CONF_GROQ_API_KEY,
Expand Down Expand Up @@ -36,12 +41,42 @@ async def _validate_api_key(self, api_key):
_LOGGER.error("You need to provide a valid API key.")
raise ServiceValidationError("empty_api_key")
elif self.user_input["provider"] == "OpenAI":
header = {'Content-type': 'application/json',
'Authorization': 'Bearer ' + api_key}
base_url = "api.openai.com"
endpoint = "/v1/models"
payload = {}
method = "GET"
# TODO: Implement OpenAI handshake with OpenAI SDK
client = AsyncOpenAI(
api_key=api_key,
http_client=get_async_client(self.hass),
)
try:
await client.models.list()
return True
except Exception as e:
_LOGGER.error(f"Could not connect to OpenAI: {e}")
return False
elif self.user_input["provider"] == "Custom OpenAI":
client = AsyncOpenAI(
api_key=api_key,
http_client=get_async_client(self.hass),
endpoint=self.user_input[CONF_CUSTOM_OPENAI_ENDPOINT]
)
try:
await client.models.list()
return True
except Exception as e:
_LOGGER.error(f"Could not connect to Custom OpenAI: {e}")
return False
elif self.user_input["provider"] == "Azure":
client = AsyncAzureOpenAI(
api_key=api_key,
api_version="2024-10-01-preview",
azure_endpoint="https://llmvision-test.openai.azure.com/",
http_client=get_async_client(self.hass),
)
try:
await client.models.list()
return True
except Exception as e:
_LOGGER.error(f"Could not connect to Azure: {e}")
return False
elif self.user_input["provider"] == "Anthropic":
header = {
'x-api-key': api_key,
Expand Down Expand Up @@ -136,6 +171,12 @@ async def openai(self):
_LOGGER.error("Could not connect to OpenAI server.")
raise ServiceValidationError("handshake_failed")

async def azure(self):
self._validate_provider()
if not await self._validate_api_key(self.user_input[CONF_AZURE_API_KEY]):
_LOGGER.error("Could not connect to Azure server.")
raise ServiceValidationError("handshake_failed")

async def custom_openai(self):
self._validate_provider()
try:
Expand Down Expand Up @@ -196,6 +237,8 @@ def get_configured_providers(self):
providers.append("OpenAI")
if CONF_ANTHROPIC_API_KEY in self.hass.data[DOMAIN]:
providers.append("Anthropic")
if CONF_AZURE_API_KEY in self.hass.data[DOMAIN]:
providers.append("Azure")
if CONF_GOOGLE_API_KEY in self.hass.data[DOMAIN]:
providers.append("Google")
if CONF_LOCALAI_IP_ADDRESS in self.hass.data[DOMAIN] and CONF_LOCALAI_PORT in self.hass.data[DOMAIN]:
Expand All @@ -215,14 +258,15 @@ class llmvisionConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):

async def handle_provider(self, provider):
provider_steps = {
"Event Calendar": self.async_step_semantic_index,
"OpenAI": self.async_step_openai,
"Anthropic": self.async_step_anthropic,
"Azure": self.async_step_azure,
"Custom OpenAI": self.async_step_custom_openai,
"Event Calendar": self.async_step_semantic_index,
"Google": self.async_step_google,
"Groq": self.async_step_groq,
"Ollama": self.async_step_ollama,
"LocalAI": self.async_step_localai,
"Custom OpenAI": self.async_step_custom_openai,
"Ollama": self.async_step_ollama,
"OpenAI": self.async_step_openai,
}

step_method = provider_steps.get(provider)
Expand All @@ -236,7 +280,7 @@ async def async_step_user(self, user_input=None):
data_schema = vol.Schema({
vol.Required("provider", default="Event Calendar"): selector({
"select": {
"options": ["Event Calendar", "OpenAI", "Anthropic", "Google", "Groq", "Ollama", "LocalAI", "Custom OpenAI"],
"options": ["Anthropic", "Azure", "Google", "Groq", "LocalAI", "Ollama", "OpenAI", "Custom OpenAI", "Event Calendar"],
"mode": "dropdown",
"sort": False,
"custom_value": False
Expand Down Expand Up @@ -338,6 +382,35 @@ async def async_step_openai(self, user_input=None):
data_schema=data_schema,
)

async def async_step_azure(self, user_input=None):
data_schema = vol.Schema({
vol.Required(CONF_AZURE_API_KEY): str,
vol.Required(CONF_AZURE_ENDPOINT, default="https://domain.openai.azure.com/"): str,
vol.Required(CONF_AZURE_VERSION, default="2024-10-01-preview"): str,
})

if user_input is not None:
# save provider to user_input
user_input["provider"] = self.init_info["provider"]
validator = Validator(self.hass, user_input)
try:
await validator.azure()
# add the mode to user_input
user_input["provider"] = self.init_info["provider"]
return self.async_create_entry(title="Azure", data=user_input)
except ServiceValidationError as e:
_LOGGER.error(f"Validation failed: {e}")
return self.async_show_form(
step_id="azure",
data_schema=data_schema,
errors={"base": "handshake_failed"}
)

return self.async_show_form(
step_id="azure",
data_schema=data_schema,
)

async def async_step_anthropic(self, user_input=None):
data_schema = vol.Schema({
vol.Required(CONF_ANTHROPIC_API_KEY): str,
Expand Down
11 changes: 4 additions & 7 deletions custom_components/llmvision/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@

# Configuration values from setup
CONF_OPENAI_API_KEY = 'openai_api_key'
CONF_AZURE_API_KEY = 'azure_api_key'
CONF_AZURE_ENDPOINT = 'azure_endpoint'
CONF_AZURE_VERSION = 'azure_version'
CONF_ANTHROPIC_API_KEY = 'anthropic_api_key'
CONF_GOOGLE_API_KEY = 'google_api_key'
CONF_GROQ_API_KEY = 'groq_api_key'
Expand Down Expand Up @@ -39,14 +42,8 @@
SENSOR_ENTITY = 'sensor_entity'

# Error messages
ERROR_OPENAI_NOT_CONFIGURED = "OpenAI is not configured"
ERROR_ANTHROPIC_NOT_CONFIGURED = "Anthropic is not configured"
ERROR_GOOGLE_NOT_CONFIGURED = "Google is not configured"
ERROR_GROQ_NOT_CONFIGURED = "Groq is not configured"
ERROR_NOT_CONFIGURED = "{provider} is not configured"
ERROR_GROQ_MULTIPLE_IMAGES = "Groq does not support videos or streams"
ERROR_LOCALAI_NOT_CONFIGURED = "LocalAI is not configured"
ERROR_OLLAMA_NOT_CONFIGURED = "Ollama is not configured"
ERROR_CUSTOM_OPENAI_NOT_CONFIGURED = "Custom OpenAI provider is not configured"
ERROR_NO_IMAGE_INPUT = "No image input provided"
ERROR_HANDSHAKE_FAILED = "Connection could not be established"

Expand Down
Loading

0 comments on commit b532660

Please sign in to comment.