mirror of
https://github.com/nexy7574/LCC-bot.git
synced 2024-09-19 18:16:34 +01:00
Fix ollama being unable to decode partial chunk
This commit is contained in:
parent
ed277d9f1f
commit
c531975569
1 changed files with 3 additions and 3 deletions
|
@ -93,10 +93,10 @@ async def ollama_stream_reader(response: httpx.Response) -> typing.AsyncGenerato
|
||||||
dict[str, str | int | bool], None
|
dict[str, str | int | bool], None
|
||||||
]:
|
]:
|
||||||
print("Starting to iterate over ollama response %r..." % response, file=sys.stderr)
|
print("Starting to iterate over ollama response %r..." % response, file=sys.stderr)
|
||||||
async for chunk in response.aiter_bytes():
|
async for chunk in response.aiter_lines():
|
||||||
# Each chunk is a JSON string
|
# Each line is a JSON string
|
||||||
try:
|
try:
|
||||||
loaded = json.loads(chunk.strip().decode("utf-8", "replace"))
|
loaded = json.loads(chunk)
|
||||||
print("Loaded chunk: %r" % loaded)
|
print("Loaded chunk: %r" % loaded)
|
||||||
yield loaded
|
yield loaded
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
|
|
Loading…
Reference in a new issue