Skip to content

Commit 98bb2af

Browse files
committed
feat: add chat_output_stream
1 parent b77aff2 commit 98bb2af

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

libs/core/llmstudio_core/providers/provider.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -328,6 +328,7 @@ def handle_response(
328328
chunk = chunk[0] if isinstance(chunk, tuple) else chunk
329329
model = chunk.get("model")
330330
if chunk.get("choices")[0].get("finish_reason") != "stop":
331+
chat_output = chunk.get("choices")[0].get("delta").get("content")
331332
chunk = {
332333
**chunk,
333334
"id": str(uuid.uuid4()),
@@ -336,7 +337,8 @@ def handle_response(
336337
if isinstance(request.chat_input, str)
337338
else request.chat_input[-1]["content"]
338339
),
339-
"chat_output": chunk.get("choices")[0].get("delta").get("content"),
340+
"chat_output": None,
341+
"chat_output_stream": chat_output if chat_output else "",
340342
"context": (
341343
[{"role": "user", "content": request.chat_input}]
342344
if isinstance(request.chat_input, str)
@@ -383,7 +385,8 @@ def handle_response(
383385
if isinstance(request.chat_input, str)
384386
else request.chat_input[-1]["content"]
385387
),
386-
"chat_output": "" if request.is_stream else output_string,
388+
"chat_output": output_string,
389+
"chat_output_stream": "",
387390
"context": (
388391
[{"role": "user", "content": request.chat_input}]
389392
if isinstance(request.chat_input, str)

0 commit comments

Comments
 (0)