Skip to content

Commit

Permalink
Upgrade to latest Textual, make faster
Browse files Browse the repository at this point in the history
  • Loading branch information
darrenburns committed Sep 8, 2024
1 parent 2e055df commit 7d21f7d
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 13 deletions.
11 changes: 10 additions & 1 deletion elia_chat/elia.scss
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,16 @@ Chatbox {
}

Footer {
background: $main-darken-1 0%;
background: transparent;

& FooterKey {
background: transparent;
}

.footer-key--key {
color: greenyellow;
background: transparent;
}
}

Footer > .footer--highlight {
Expand Down
17 changes: 10 additions & 7 deletions elia_chat/widgets/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,13 +195,16 @@ async def stream_agent_response(self) -> None:
"role": "assistant",
}
now = datetime.datetime.now(datetime.timezone.utc)
message = ChatMessage(message=ai_message, model=model, timestamp=now)

message = ChatMessage(message=ai_message, model=model, timestamp=now)
response_chatbox = Chatbox(
message=message,
model=self.chat_data.model,
classes="response-in-progress",
)
self.post_message(self.AgentResponseStarted())
self.app.call_from_thread(self.chat_container.mount, response_chatbox)

assert (
self.chat_container is not None
), "Textual has mounted container at this point in the lifecycle."
Expand All @@ -212,20 +215,20 @@ async def stream_agent_response(self) -> None:
chunk = cast(ModelResponse, chunk)
response_chatbox.border_title = "Agent is responding..."

if chunk_count == 0:
self.post_message(self.AgentResponseStarted())
await self.chat_container.mount(response_chatbox)

chunk_content = chunk.choices[0].delta.content
if isinstance(chunk_content, str):
response_chatbox.append_chunk(chunk_content)
self.app.call_from_thread(
response_chatbox.append_chunk, chunk_content
)
else:
break

scroll_y = self.chat_container.scroll_y
max_scroll_y = self.chat_container.max_scroll_y
if scroll_y in range(max_scroll_y - 3, max_scroll_y + 1):
self.chat_container.scroll_end(animate=False)
self.app.call_from_thread(
self.chat_container.scroll_end, animate=False
)

chunk_count += 1
except Exception:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ authors = [
{ name = "Darren Burns", email = "[email protected]" }
]
dependencies = [
"textual[syntax]==0.62",
"textual[syntax]==0.79.1",
"sqlmodel>=0.0.9",
"humanize>=4.6.0",
"click>=8.1.6",
Expand Down
9 changes: 5 additions & 4 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 7d21f7d

Please sign in to comment.