Merge pull request #91 from devchat-ai/fix-stream-out

Use chat_completion_stream_out for streaming output
This commit is contained in:
boob.yang 2024-04-24 21:23:29 +08:00 committed by GitHub
commit b7d36a3b7a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,7 +1,7 @@
from functools import partial
from typing import List, Optional
from devchat.llm.openai import chat_completion_stream
from devchat.llm.chat import chat_completion_stream_out
from find_context import Context
from llm_conf import (
CONTEXT_SIZE,
@ -138,12 +138,10 @@ def write_and_print_tests(
if USE_USER_MODEL:
# Use the wrapped api
res = chat_completion_stream(
_ = chat_completion_stream_out(
messages=[{"role": "user", "content": user_msg}],
llm_config={"model": MODEL, "temperature": 0.1},
)
if res:
print(res.get("content", ""))
else:
# Use the openai api parameters