forked from deedy/mac_computer_use
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstreamlit.py
360 lines (308 loc) · 12.7 KB
/
streamlit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
"""
Entrypoint for streamlit, see https://docs.streamlit.io/
"""
import asyncio
import base64
import os
import subprocess
from datetime import datetime
from enum import StrEnum
from functools import partial
from pathlib import PosixPath
from typing import cast
import streamlit as st
from anthropic import APIResponse
from anthropic.types import (
TextBlock,
)
from anthropic.types.beta import BetaMessage, BetaTextBlock, BetaToolUseBlock
from anthropic.types.tool_use_block import ToolUseBlock
from streamlit.delta_generator import DeltaGenerator
from loop import (
PROVIDER_TO_DEFAULT_MODEL_NAME,
APIProvider,
sampling_loop,
)
from tools import ToolResult
from dotenv import load_dotenv
load_dotenv()
CONFIG_DIR = PosixPath("~/.anthropic").expanduser()
API_KEY_FILE = CONFIG_DIR / "api_key"
STREAMLIT_STYLE = """
<style>
/* Hide chat input while agent loop is running */
.stApp[data-teststate=running] .stChatInput textarea,
.stApp[data-test-script-state=running] .stChatInput textarea {
display: none;
}
/* Hide the streamlit deploy button */
.stDeployButton {
visibility: hidden;
}
</style>
"""
WARNING_TEXT = ""
class Sender(StrEnum):
USER = "user"
BOT = "assistant"
TOOL = "tool"
def setup_state():
if "messages" not in st.session_state:
st.session_state.messages = []
if "api_key" not in st.session_state:
# Try to load API key from file first, then environment
st.session_state.api_key = load_from_storage("api_key") or os.getenv(
"ANTHROPIC_API_KEY", ""
)
if "provider" not in st.session_state:
st.session_state.provider = (
os.getenv("API_PROVIDER", "anthropic") or APIProvider.ANTHROPIC
)
if "provider_radio" not in st.session_state:
st.session_state.provider_radio = st.session_state.provider
if "model" not in st.session_state:
_reset_model()
if "auth_validated" not in st.session_state:
st.session_state.auth_validated = False
if "responses" not in st.session_state:
st.session_state.responses = {}
if "tools" not in st.session_state:
st.session_state.tools = {}
if "only_n_most_recent_images" not in st.session_state:
st.session_state.only_n_most_recent_images = 10
if "custom_system_prompt" not in st.session_state:
st.session_state.custom_system_prompt = load_from_storage("system_prompt") or ""
if "hide_images" not in st.session_state:
st.session_state.hide_images = False
def _reset_model():
st.session_state.model = PROVIDER_TO_DEFAULT_MODEL_NAME[
cast(APIProvider, st.session_state.provider)
]
async def main():
"""Render loop for streamlit"""
setup_state()
st.markdown(STREAMLIT_STYLE, unsafe_allow_html=True)
st.title("Claude Computer Use for Mac")
st.markdown("""This is from [Mac Computer Use](https://github.com/deedy/mac_computer_use), a fork of [Anthropic Computer Use](https://github.com/anthropics/anthropic-quickstarts/blob/main/computer-use-demo/README.md) to work natively on Mac.""")
with st.sidebar:
def _reset_api_provider():
if st.session_state.provider_radio != st.session_state.provider:
_reset_model()
st.session_state.provider = st.session_state.provider_radio
st.session_state.auth_validated = False
provider_options = [option.value for option in APIProvider]
st.radio(
"API Provider",
options=provider_options,
key="provider_radio",
format_func=lambda x: x.title(),
on_change=_reset_api_provider,
)
st.text_input("Model", key="model")
if st.session_state.provider == APIProvider.ANTHROPIC:
st.text_input(
"Anthropic API Key",
type="password",
key="api_key",
on_change=lambda: save_to_storage("api_key", st.session_state.api_key),
)
st.number_input(
"Only send N most recent images",
min_value=0,
key="only_n_most_recent_images",
help="To decrease the total tokens sent, remove older screenshots from the conversation",
)
st.text_area(
"Custom System Prompt Suffix",
key="custom_system_prompt",
help="Additional instructions to append to the system prompt. see computer_use_demo/loop.py for the base system prompt.",
on_change=lambda: save_to_storage(
"system_prompt", st.session_state.custom_system_prompt
),
)
st.checkbox("Hide screenshots", key="hide_images")
if st.button("Reset", type="primary"):
with st.spinner("Resetting..."):
st.session_state.clear()
setup_state()
subprocess.run("pkill Xvfb; pkill tint2", shell=True) # noqa: ASYNC221
await asyncio.sleep(1)
subprocess.run("./start_all.sh", shell=True) # noqa: ASYNC221
if not st.session_state.auth_validated:
if auth_error := validate_auth(
st.session_state.provider, st.session_state.api_key
):
st.warning(f"Please resolve the following auth issue:\n\n{auth_error}")
return
else:
st.session_state.auth_validated = True
chat, http_logs = st.tabs(["Chat", "HTTP Exchange Logs"])
new_message = st.chat_input(
"Type a message to send to Claude to control the computer..."
)
with chat:
# render past chats
for message in st.session_state.messages:
if isinstance(message["content"], str):
_render_message(message["role"], message["content"])
elif isinstance(message["content"], list):
for block in message["content"]:
# the tool result we send back to the Anthropic API isn't sufficient to render all details,
# so we store the tool use responses
if isinstance(block, dict) and block["type"] == "tool_result":
_render_message(
Sender.TOOL, st.session_state.tools[block["tool_use_id"]]
)
else:
_render_message(
message["role"],
cast(BetaTextBlock | BetaToolUseBlock, block),
)
# render past http exchanges
for identity, response in st.session_state.responses.items():
_render_api_response(response, identity, http_logs)
# render past chats
if new_message:
st.session_state.messages.append(
{
"role": Sender.USER,
"content": [TextBlock(type="text", text=new_message)],
}
)
_render_message(Sender.USER, new_message)
try:
most_recent_message = st.session_state["messages"][-1]
except IndexError:
return
if most_recent_message["role"] is not Sender.USER:
# we don't have a user message to respond to, exit early
return
with st.spinner("Running Agent..."):
# run the agent sampling loop with the newest message
st.session_state.messages = await sampling_loop(
system_prompt_suffix=st.session_state.custom_system_prompt,
model=st.session_state.model,
provider=st.session_state.provider,
messages=st.session_state.messages,
output_callback=partial(_render_message, Sender.BOT),
tool_output_callback=partial(
_tool_output_callback, tool_state=st.session_state.tools
),
api_response_callback=partial(
_api_response_callback,
tab=http_logs,
response_state=st.session_state.responses,
),
api_key=st.session_state.api_key,
only_n_most_recent_images=st.session_state.only_n_most_recent_images,
)
def validate_auth(provider: APIProvider, api_key: str | None):
if provider == APIProvider.ANTHROPIC:
if not api_key:
return "Enter your Anthropic API key in the sidebar to continue."
if provider == APIProvider.BEDROCK:
import boto3
if not boto3.Session().get_credentials():
return "You must have AWS credentials set up to use the Bedrock API."
if provider == APIProvider.VERTEX:
import google.auth
from google.auth.exceptions import DefaultCredentialsError
if not os.environ.get("CLOUD_ML_REGION"):
return "Set the CLOUD_ML_REGION environment variable to use the Vertex API."
try:
google.auth.default(
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
except DefaultCredentialsError:
return "Your google cloud credentials are not set up correctly."
def load_from_storage(filename: str) -> str | None:
"""Load data from a file in the storage directory."""
try:
file_path = CONFIG_DIR / filename
if file_path.exists():
data = file_path.read_text().strip()
if data:
return data
except Exception as e:
st.write(f"Debug: Error loading {filename}: {e}")
return None
def save_to_storage(filename: str, data: str) -> None:
"""Save data to a file in the storage directory."""
try:
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
file_path = CONFIG_DIR / filename
file_path.write_text(data)
# Ensure only user can read/write the file
file_path.chmod(0o600)
except Exception as e:
st.write(f"Debug: Error saving {filename}: {e}")
def _api_response_callback(
response: APIResponse[BetaMessage],
tab: DeltaGenerator,
response_state: dict[str, APIResponse[BetaMessage]],
):
"""
Handle an API response by storing it to state and rendering it.
"""
response_id = datetime.now().isoformat()
response_state[response_id] = response
_render_api_response(response, response_id, tab)
def _tool_output_callback(
tool_output: ToolResult, tool_id: str, tool_state: dict[str, ToolResult]
):
"""Handle a tool output by storing it to state and rendering it."""
tool_state[tool_id] = tool_output
_render_message(Sender.TOOL, tool_output)
def _render_api_response(
response: APIResponse[BetaMessage], response_id: str, tab: DeltaGenerator
):
"""Render an API response to a streamlit tab"""
with tab:
with st.expander(f"Request/Response ({response_id})"):
newline = "\n\n"
st.markdown(
f"`{response.http_request.method} {response.http_request.url}`{newline}{newline.join(f'`{k}: {v}`' for k, v in response.http_request.headers.items())}"
)
st.json(response.http_request.read().decode())
st.markdown(
f"`{response.http_response.status_code}`{newline}{newline.join(f'`{k}: {v}`' for k, v in response.headers.items())}"
)
st.json(response.http_response.text)
def _render_message(
sender: Sender,
message: str | BetaTextBlock | BetaToolUseBlock | ToolResult,
):
"""Convert input from the user or output from the agent to a streamlit message."""
# streamlit's hotreloading breaks isinstance checks, so we need to check for class names
is_tool_result = not isinstance(message, str) and (
isinstance(message, ToolResult)
or message.__class__.__name__ == "ToolResult"
or message.__class__.__name__ == "CLIResult"
)
if not message or (
is_tool_result
and st.session_state.hide_images
and not hasattr(message, "error")
and not hasattr(message, "output")
):
return
with st.chat_message(sender):
if is_tool_result:
message = cast(ToolResult, message)
if message.output:
if message.__class__.__name__ == "CLIResult":
st.code(message.output)
else:
st.markdown(message.output)
if message.error:
st.error(message.error)
if message.base64_image and not st.session_state.hide_images:
st.image(base64.b64decode(message.base64_image))
elif isinstance(message, BetaTextBlock) or isinstance(message, TextBlock):
st.write(message.text)
elif isinstance(message, BetaToolUseBlock) or isinstance(message, ToolUseBlock):
st.code(f"Tool Use: {message.name}\nInput: {message.input}")
else:
st.markdown(message)
if __name__ == "__main__":
asyncio.run(main())