Refactor code to use llmApiBase for completion endpoint

This commit is contained in:
bobo.yang 2024-06-04 13:23:42 +08:00
parent 4eea931db1
commit a05e6caee1
2 changed files with 17 additions and 2 deletions

View File

@ -186,7 +186,11 @@ export async function * ollamaDeepseekComplete(prompt: string) : AsyncGenerator<
export async function * devchatComplete(prompt: string) : AsyncGenerator<CodeCompletionChunk> {
const devchatEndpoint = DevChatConfig.getInstance().get("providers.devchat.api_base");
const completionApiBase = devchatEndpoint + "/completions";
const llmApiBase = DevChatConfig.getInstance().get("complete_api_base");
let completionApiBase = devchatEndpoint + "/completions";
if (llmApiBase) {
completionApiBase = llmApiBase + "/completions";
}
let model = DevChatConfig.getInstance().get("complete_model");
if (!model) {
@ -221,10 +225,17 @@ export async function * devchatComplete(prompt: string) : AsyncGenerator<CodeCom
const endTimeLLM = process.hrtime(startTimeLLM);
const durationLLM = endTimeLLM[0] + endTimeLLM[1] / 1e9;
logger.channel()?.debug(`LLM first chunk took ${durationLLM} seconds`);
logger.channel()?.debug(`LLM api post took ${durationLLM} seconds`);
let hasFirstLine = false;
let hasFirstChunk = false;
for await (const chunk of stream) {
if (!hasFirstChunk) {
hasFirstChunk = true;
const endTimeFirstChunk = process.hrtime(startTimeLLM);
const durationFirstChunk = endTimeFirstChunk[0] + endTimeFirstChunk[1] / 1e9;
logger.channel()?.debug(`LLM first chunk took ${durationFirstChunk} seconds`);
}
const chunkDataText = decoder.decode(chunk).trim();
// split chunkText by "data: ", for example:
// data: 123 data: 456 will split to ["", "data: 123 ", "data: 456"]

View File

@ -677,6 +677,10 @@ export async function createPrompt(filePath: string, fileContent: string, line:
}
if (completeModel.indexOf("deepseek") > -1) {
prompt = "<fim▁begin>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<fim▁hole>" + suffix + "<fim▁end>";
} else if (completeModel.indexOf("starcoder") > -1) {
prompt = "<fim_prefix>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<fim_suffix>" + suffix + "<fim_middle>";
} else if (completeModel.indexOf("codestral") > -1) {
prompt = "<s>[SUFFIX]" + suffix + "[PREFIX]" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix;
} else {
prompt = "<fim_prefix>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<fim_suffix>" + suffix + "<fim_middle>";
}