diff --git a/examples/chat_with_openai.py b/examples/chat_with_openai.py index d4f221b..fa143d8 100644 --- a/examples/chat_with_openai.py +++ b/examples/chat_with_openai.py @@ -1,15 +1,10 @@ from openai import OpenAI +# If runnning this service with proxy, you might need to unset `http(s)_proxy`. base_url = "http://localhost:22222" api_key = "sk-xxxxx" - client = OpenAI(base_url=base_url, api_key=api_key) - -extra_body = { - "invocation_id": 1, -} - response = client.chat.completions.create( model="precise", messages=[ @@ -19,15 +14,12 @@ } ], stream=True, - extra_body=extra_body, ) -# print(response) for chunk in response: if chunk.choices[0].delta.content is not None: print(chunk.choices[0].delta.content, end="", flush=True) elif chunk.choices[0].finish_reason == "stop": print() else: - # print(chunk) pass diff --git a/examples/chat_with_post.py b/examples/chat_with_post.py index 8cac32c..15d665c 100644 --- a/examples/chat_with_post.py +++ b/examples/chat_with_post.py @@ -7,7 +7,6 @@ chat_api = "http://localhost:22222" api_key = "sk-xxxxx" requests_headers = {} - requests_payload = { "model": "precise", "messages": [ @@ -17,7 +16,6 @@ } ], "stream": True, - "invocation_id": 1, } with httpx.stream(