Skip to content

Commit

Permalink
feat(LLM): add official support for ChatOllama model.
Browse files Browse the repository at this point in the history
  • Loading branch information
RobRoyce committed Oct 10, 2024
1 parent df7fdeb commit 1134473
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 6 deletions.
3 changes: 2 additions & 1 deletion demo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ DEVELOPMENT=${DEVELOPMENT:-false}
case "$(uname)" in
Linux*|Darwin*)
echo "Enabling X11 forwarding..."
export DISPLAY=host.docker.internal:0
xhost +
;;
MINGW*|CYGWIN*|MSYS*)
Expand Down Expand Up @@ -66,4 +67,4 @@ docker run -it --rm --name $CONTAINER_NAME \
# Disable X11 forwarding
xhost -

exit 0
exit 0
4 changes: 2 additions & 2 deletions src/rosa/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@
# limitations under the License.

from .prompts import RobotSystemPrompts
from .rosa import ROSA
from .rosa import ROSA, ChatModel

__all__ = ["ROSA", "RobotSystemPrompts"]
__all__ = ["ROSA", "RobotSystemPrompts", "ChatModel"]
7 changes: 5 additions & 2 deletions src/rosa/rosa.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,22 @@
from langchain_community.callbacks import get_openai_callback
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate
from langchain_ollama import ChatOllama
from langchain_openai import AzureChatOpenAI, ChatOpenAI

from .prompts import RobotSystemPrompts, system_prompts
from .tools import ROSATools

ChatModel = Union[ChatOpenAI, AzureChatOpenAI, ChatOllama]


class ROSA:
"""ROSA (Robot Operating System Agent) is a class that encapsulates the logic for interacting with ROS systems
using natural language.
Args:
ros_version (Literal[1, 2]): The version of ROS that the agent will interact with.
llm (Union[AzureChatOpenAI, ChatOpenAI]): The language model to use for generating responses.
llm (Union[AzureChatOpenAI, ChatOpenAI, ChatOllama]): The language model to use for generating responses.
tools (Optional[list]): A list of additional LangChain tool functions to use with the agent.
tool_packages (Optional[list]): A list of Python packages containing LangChain tool functions to use.
prompts (Optional[RobotSystemPrompts]): Custom prompts to use with the agent.
Expand Down Expand Up @@ -63,7 +66,7 @@ class ROSA:
def __init__(
self,
ros_version: Literal[1, 2],
llm: Union[AzureChatOpenAI, ChatOpenAI],
llm: ChatModel,
tools: Optional[list] = None,
tool_packages: Optional[list] = None,
prompts: Optional[RobotSystemPrompts] = None,
Expand Down
11 changes: 10 additions & 1 deletion src/turtle_agent/scripts/turtle_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
import pyinputplus as pyip
import rospy
from langchain.agents import tool, Tool
from rich.console import Group # Add this import
# from langchain_ollama import ChatOllama
from rich.console import Console
from rich.console import Group
from rich.live import Live
from rich.markdown import Markdown
from rich.panel import Panel
Expand All @@ -48,6 +49,14 @@ def __init__(self, streaming: bool = False, verbose: bool = True):
self.__blacklist = ["master", "docker"]
self.__prompts = get_prompts()
self.__llm = get_llm(streaming=streaming)

# self.__llm = ChatOllama(
# base_url="host.docker.internal:11434",
# model="llama3.1",
# temperature=0,
# num_ctx=8192,
# )

self.__streaming = streaming

# Another method for adding tools
Expand Down

0 comments on commit 1134473

Please sign in to comment.