Merge pull request #91 from devchat-ai/fix-stream-out
Use chat_completion_stream_out for streaming output
This commit is contained in:
commit
b7d36a3b7a
@ -1,7 +1,7 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from devchat.llm.openai import chat_completion_stream
|
from devchat.llm.chat import chat_completion_stream_out
|
||||||
from find_context import Context
|
from find_context import Context
|
||||||
from llm_conf import (
|
from llm_conf import (
|
||||||
CONTEXT_SIZE,
|
CONTEXT_SIZE,
|
||||||
@ -138,12 +138,10 @@ def write_and_print_tests(
|
|||||||
|
|
||||||
if USE_USER_MODEL:
|
if USE_USER_MODEL:
|
||||||
# Use the wrapped api
|
# Use the wrapped api
|
||||||
res = chat_completion_stream(
|
_ = chat_completion_stream_out(
|
||||||
messages=[{"role": "user", "content": user_msg}],
|
messages=[{"role": "user", "content": user_msg}],
|
||||||
llm_config={"model": MODEL, "temperature": 0.1},
|
llm_config={"model": MODEL, "temperature": 0.1},
|
||||||
)
|
)
|
||||||
if res:
|
|
||||||
print(res.get("content", ""))
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Use the openai api parameters
|
# Use the openai api parameters
|
||||||
|
Loading…
x
Reference in New Issue
Block a user