Skip to content

Commit

Permalink
build: bump langgraph version to 0.1.*
Browse files Browse the repository at this point in the history
fix(voice_hmi): use HumanMessage instead of string
fix(text_hmi): typo in import
  • Loading branch information
maciejmajek committed Sep 17, 2024
1 parent 748c478 commit 180b14d
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 10 deletions.
12 changes: 6 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ langchain-aws = "^0.1.7"
langchain-openai = "^0.1.8"
langchain-community = "^0.2.4"
transforms3d = "^0.4.1"
langgraph = "^0.0.66"
langgraph = "^0.1.0"
tabulate = "^0.9.0"
lark = "^1.1.9"
langfuse = "^2.36.1"
Expand Down
4 changes: 2 additions & 2 deletions src/rai_hmi/rai_hmi/text_hmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from rai.agents.state_based import get_stored_artifacts
from rai.messages import HumanMultimodalMessage
from rai.node import RaiBaseNode
from rai_hmi.agent import initlialize_agent
from rai_hmi.agent import initialize_agent
from rai_hmi.base import BaseHMINode
from rai_hmi.chat_msgs import EMOJIS, MissionMessage
from rai_hmi.ros import initialize_ros_nodes
Expand Down Expand Up @@ -71,7 +71,7 @@ def initialize_memory() -> Memory:
def initialize_agent_streamlit(
_hmi_node: BaseHMINode, _rai_node: RaiBaseNode, _memory: Memory
):
return initlialize_agent(_hmi_node, _rai_node, _memory)
return initialize_agent(_hmi_node, _rai_node, _memory)


@st.cache_resource
Expand Down
3 changes: 2 additions & 1 deletion src/rai_hmi/rai_hmi/voice_hmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from typing import Optional

import rclpy
from langchain_core.messages import HumanMessage
from rclpy.callback_groups import ReentrantCallbackGroup
from rclpy.executors import MultiThreadedExecutor
from std_msgs.msg import String
Expand Down Expand Up @@ -107,7 +108,7 @@ def handle_human_message(self, msg: String):
self.get_logger().info("Processing started")

# handle human message
self.history.append(msg.data)
self.history.append(HumanMessage(content=msg.data))

for state in self.agent.stream(dict(messages=self.history)):
node_name = list(state.keys())[0]
Expand Down

0 comments on commit 180b14d

Please sign in to comment.