Skip to content

Commit

Permalink
fix streaming chunk
Browse files Browse the repository at this point in the history
  • Loading branch information
braisedpork1964 committed Oct 24, 2024
1 parent 573c7b5 commit bc4ebf6
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions lagent/llms/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def streaming(raw_response):
chunk_size=8192, decode_unicode=False, delimiter=b'\n'):
if chunk:
decoded = chunk.decode('utf-8')
if decoded == 'data: [DONE]':
if decoded.startswith('data: [DONE]'):
return
if decoded[:5] == 'data:':
decoded = decoded[5:]
Expand Down Expand Up @@ -716,7 +716,7 @@ async def streaming(raw_response):
async for chunk in raw_response.content:
if chunk:
decoded = chunk.decode('utf-8')
if decoded == 'data: [DONE]':
if decoded.startswith('data: [DONE]'):
return
if decoded[:5] == 'data:':
decoded = decoded[5:]
Expand Down

0 comments on commit bc4ebf6

Please sign in to comment.