2023-12-24 19:43:29 +08:00
|
|
|
from typing import List
|
2023-12-18 20:46:16 +08:00
|
|
|
import tiktoken
|
|
|
|
import json
|
2023-12-24 19:15:43 +08:00
|
|
|
|
|
|
|
from openai_util import create_chat_completion_content
|
2023-12-24 19:43:29 +08:00
|
|
|
from prompts import PROPOSE_TEST_PROMPT
|
2023-12-18 20:46:16 +08:00
|
|
|
|
|
|
|
|
2023-12-18 22:21:12 +08:00
|
|
|
MODEL = "gpt-3.5-turbo-1106"
|
|
|
|
# MODEL = "gpt-4-1106-preview"
|
2023-12-18 20:46:16 +08:00
|
|
|
|
|
|
|
|
|
|
|
def propose_test(
|
|
|
|
repo_root: str,
|
|
|
|
user_prompt: str,
|
|
|
|
function_name: str,
|
2023-12-18 22:21:12 +08:00
|
|
|
function_content: str,
|
2023-12-18 20:46:16 +08:00
|
|
|
file_path: str,
|
2023-12-24 17:31:35 +08:00
|
|
|
chat_language: str = "English",
|
2023-12-18 20:46:16 +08:00
|
|
|
) -> List[str]:
|
|
|
|
"""Propose test cases for a specified function based on a user prompt
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_prompt (str): The prompt or description for which test cases need to be generated.
|
|
|
|
function_name (str): The name of the function to generate test cases for.
|
|
|
|
file_path (str): The absolute path to the file containing the target function for which
|
|
|
|
test cases will be generated.
|
2023-12-18 22:21:12 +08:00
|
|
|
|
2023-12-18 20:46:16 +08:00
|
|
|
Returns:
|
|
|
|
List[str]: A list of test case descriptions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
encoding: tiktoken.Encoding = tiktoken.encoding_for_model(MODEL)
|
|
|
|
token_budget = 16000 * 0.9
|
|
|
|
|
|
|
|
user_msg = PROPOSE_TEST_PROMPT.format(
|
|
|
|
user_prompt=user_prompt,
|
|
|
|
function_name=function_name,
|
|
|
|
file_path=file_path,
|
|
|
|
function_content=function_content,
|
2023-12-24 17:31:35 +08:00
|
|
|
chat_language=chat_language,
|
2023-12-18 20:46:16 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
tokens = len(encoding.encode(user_msg))
|
|
|
|
if tokens > token_budget:
|
|
|
|
return f"Token budget exceeded while generating test cases. ({tokens}/{token_budget})"
|
|
|
|
|
2023-12-24 19:15:43 +08:00
|
|
|
content = create_chat_completion_content(
|
2023-12-18 20:46:16 +08:00
|
|
|
model=MODEL,
|
|
|
|
messages=[{"role": "user", "content": user_msg}],
|
|
|
|
response_format={"type": "json_object"},
|
|
|
|
temperature=0.1,
|
|
|
|
)
|
|
|
|
|
|
|
|
cases = json.loads(content).get("test_cases", [])
|
|
|
|
|
|
|
|
descriptions = []
|
|
|
|
for case in cases:
|
|
|
|
description = case.get("description", None)
|
|
|
|
if description:
|
|
|
|
descriptions.append(description)
|
2023-12-18 22:21:12 +08:00
|
|
|
|
2023-12-18 20:46:16 +08:00
|
|
|
return descriptions
|