Skip to content

Commit

Permalink
o1 compatibiity
Browse files Browse the repository at this point in the history
  • Loading branch information
piEsposito committed Dec 27, 2024
1 parent 03543a2 commit ac23ef9
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 9 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tiny-ai-client"
version = "0.0.13"
version = "0.0.14"
description = "Tiny AI client for LLMs. As simple as it gets."
authors = ["piEsposito <[email protected]>"]
license = "Apache 2.0"
Expand Down
19 changes: 11 additions & 8 deletions tiny_ai_client/openai_.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import json
from typing import Any, Callable, Dict, List, Union, Generator, AsyncGenerator
from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Union

from openai import AsyncOpenAI, OpenAI

Expand Down Expand Up @@ -74,7 +75,7 @@ def call_llm_provider(
if temperature is not None:
kwargs["temperature"] = temperature
if max_new_tokens is not None:
kwargs["max_tokens"] = max_new_tokens
kwargs["max_completion_tokens"] = max_new_tokens
if self.tools_json:
kwargs["tools"] = self.tools_json
response = self.client.with_options(timeout=timeout).chat.completions.create(
Expand Down Expand Up @@ -109,12 +110,12 @@ def stream(
if temperature is not None:
kwargs["temperature"] = temperature
if max_new_tokens is not None:
kwargs["max_tokens"] = max_new_tokens
kwargs["max_completion_tokens"] = max_new_tokens
if self.tools_json:
kwargs["tools"] = self.tools_json

model_input = self.build_model_input(chat)

stream = self.client.with_options(timeout=timeout).chat.completions.create(
model=self.model_name,
messages=model_input,
Expand All @@ -137,7 +138,7 @@ async def async_call_llm_provider(
if temperature is not None:
kwargs["temperature"] = temperature
if max_new_tokens is not None:
kwargs["max_tokens"] = max_new_tokens
kwargs["max_completion_tokens"] = max_new_tokens
if self.tools_json:
kwargs["tools"] = self.tools_json
response = await self.async_client.with_options(
Expand Down Expand Up @@ -174,13 +175,15 @@ async def astream(
if temperature is not None:
kwargs["temperature"] = temperature
if max_new_tokens is not None:
kwargs["max_tokens"] = max_new_tokens
kwargs["max_completion_tokens"] = max_new_tokens
if self.tools_json:
kwargs["tools"] = self.tools_json

model_input = self.build_model_input(chat)

stream = await self.async_client.with_options(timeout=timeout).chat.completions.create(

stream = await self.async_client.with_options(
timeout=timeout
).chat.completions.create(
model=self.model_name,
messages=model_input,
stream=True,
Expand Down

0 comments on commit ac23ef9

Please sign in to comment.