Skip to content

Commit

Permalink
[CLEANUP]
Browse files Browse the repository at this point in the history
  • Loading branch information
Your Name committed Nov 28, 2024
1 parent 494563e commit d82c47c
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 9 deletions.
2 changes: 1 addition & 1 deletion litellm_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

model = LiteLLMModel()
output = model.run("hey")
print(output)
print(output)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "swarm-models"
version = "0.1.5"
version = "0.1.6"
description = "Swarm Models - Pytorch"
license = "MIT"
authors = ["Kye Gomez <[email protected]>"]
Expand Down
24 changes: 17 additions & 7 deletions swarm_models/lite_llm_model.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
from litellm import completion, acompletion
from loguru import logger


class LiteLLMModel:
"""
This class represents a LiteLLMModel.
It is used to interact with the LLM model for various tasks.
"""

def __init__(
self,
model_name: str = "gpt-4o",
system_prompt: str = None,
stream: bool = False,
temperature: float = 0.5,
max_tokens: int = 4000,
):
"""
Initialize the LiteLLMModel with the given parameters.
Expand All @@ -20,35 +23,42 @@ def __init__(
self.system_prompt = system_prompt
self.stream = stream
self.temperature = temperature
self.max_tokens = max_tokens

def _prepare_messages(self, task: str) -> list:
"""
Prepare the messages for the given task.
"""
messages = []

if self.system_prompt: # Check if system_prompt is not None
messages.append({"role": "system", "content": self.system_prompt})

messages.append(
{"role": "system", "content": self.system_prompt}
)

messages.append({"role": "user", "content": task})

return messages

def run(self, task: str, *args, **kwargs):
"""
Run the LLM model for the given task.
"""
messages = self._prepare_messages(task)

response = completion(
model=self.model_name,
messages=messages,
stream=self.stream,
temperature=self.temperature,
max_completion_tokens=self.max_tokens,
max_tokens=self.max_tokens,
*args,
**kwargs
**kwargs,
)
content = response.choices[0].message.content # Accessing the content
content = response.choices[
0
].message.content # Accessing the content
return content

def __call__(self, task: str, *args, **kwargs):
Expand Down

0 comments on commit d82c47c

Please sign in to comment.