From 686c355b98adc7287b2d00dd8fff02b9954278eb Mon Sep 17 00:00:00 2001 From: kagami Date: Wed, 24 Apr 2024 21:03:50 +0800 Subject: [PATCH] Use chat_completion_stream_out for streaming output --- unit_tests/write_tests.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/unit_tests/write_tests.py b/unit_tests/write_tests.py index ff7d7fb..058f606 100644 --- a/unit_tests/write_tests.py +++ b/unit_tests/write_tests.py @@ -1,7 +1,7 @@ from functools import partial from typing import List, Optional -from devchat.llm.openai import chat_completion_stream +from devchat.llm.chat import chat_completion_stream_out from find_context import Context from llm_conf import ( CONTEXT_SIZE, @@ -138,12 +138,10 @@ def write_and_print_tests( if USE_USER_MODEL: # Use the wrapped api - res = chat_completion_stream( + _ = chat_completion_stream_out( messages=[{"role": "user", "content": user_msg}], llm_config={"model": MODEL, "temperature": 0.1}, ) - if res: - print(res.get("content", "")) else: # Use the openai api parameters