diff --git a/unit_tests/assistants/directory_structure/relevant_file_finder.py b/unit_tests/assistants/directory_structure/relevant_file_finder.py index a4dbc52..1aa3acb 100644 --- a/unit_tests/assistants/directory_structure/relevant_file_finder.py +++ b/unit_tests/assistants/directory_structure/relevant_file_finder.py @@ -16,7 +16,7 @@ from openai_util import create_chat_completion_content from tools.directory_viewer import ListViewer from tools.tiktoken_util import get_encoding -MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview" +MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview" # "gpt-3.5-turbo" ENCODING = ( get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation if USE_USER_MODEL diff --git a/unit_tests/propose_test.py b/unit_tests/propose_test.py index 21347e4..f356ce4 100644 --- a/unit_tests/propose_test.py +++ b/unit_tests/propose_test.py @@ -16,7 +16,7 @@ from openai_util import create_chat_completion_content from prompts import PROPOSE_TEST_PROMPT from tools.tiktoken_util import get_encoding -MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview" +MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview" # "gpt-3.5-turbo" ENCODING = ( get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation if USE_USER_MODEL