Use gpt4-turbo for unit_tests as default
This commit is contained in:
parent
ccc1d97c90
commit
6a1fdc829e
@ -16,7 +16,7 @@ from openai_util import create_chat_completion_content
|
|||||||
from tools.directory_viewer import ListViewer
|
from tools.directory_viewer import ListViewer
|
||||||
from tools.tiktoken_util import get_encoding
|
from tools.tiktoken_util import get_encoding
|
||||||
|
|
||||||
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-3.5-turbo"
|
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview"
|
||||||
ENCODING = (
|
ENCODING = (
|
||||||
get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation
|
get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation
|
||||||
if USE_USER_MODEL
|
if USE_USER_MODEL
|
||||||
|
@ -8,7 +8,7 @@ from llm_conf import (
|
|||||||
)
|
)
|
||||||
from openai_util import create_chat_completion_content
|
from openai_util import create_chat_completion_content
|
||||||
|
|
||||||
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-3.5-turbo"
|
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview"
|
||||||
|
|
||||||
# ruff: noqa: E501
|
# ruff: noqa: E501
|
||||||
|
|
||||||
|
@ -16,13 +16,13 @@ from openai_util import create_chat_completion_content
|
|||||||
from prompts import PROPOSE_TEST_PROMPT
|
from prompts import PROPOSE_TEST_PROMPT
|
||||||
from tools.tiktoken_util import get_encoding
|
from tools.tiktoken_util import get_encoding
|
||||||
|
|
||||||
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-3.5-turbo"
|
MODEL = USER_LLM_MODEL if USE_USER_MODEL else "gpt-4-turbo-preview"
|
||||||
ENCODING = (
|
ENCODING = (
|
||||||
get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation
|
get_encoding(DEFAULT_ENCODING) # Use default encoding as an approximation
|
||||||
if USE_USER_MODEL
|
if USE_USER_MODEL
|
||||||
else get_encoding("cl100k_base")
|
else get_encoding("cl100k_base")
|
||||||
)
|
)
|
||||||
TOKEN_BUDGET = int(CONTEXT_SIZE.get(MODEL, DEFAULT_CONTEXT_SIZE) * 0.9)
|
TOKEN_BUDGET = int(CONTEXT_SIZE.get(MODEL, DEFAULT_CONTEXT_SIZE) * 0.95)
|
||||||
|
|
||||||
|
|
||||||
def _mk_user_msg(
|
def _mk_user_msg(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user