Merge pull request #527 from devchat-ai/log_completion_time
Chore: Optimize InlineCompletionProvider for Better Performance
This commit is contained in:
commit
ee6baee369
@ -120,9 +120,9 @@ export class InlineCompletionProvider implements vscode.InlineCompletionItemProv
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
async codeComplete(document: vscode.TextDocument, position: vscode.Position, context: vscode.InlineCompletionContext, token: vscode.CancellationToken): Promise<CodeCompleteResult | undefined> {
|
async codeComplete(document: vscode.TextDocument, position: vscode.Position, context: vscode.InlineCompletionContext, token: vscode.CancellationToken): Promise<CodeCompleteResult | undefined> {
|
||||||
|
const startTime = process.hrtime();
|
||||||
GitDiffWatcher.getInstance().tryRun();
|
GitDiffWatcher.getInstance().tryRun();
|
||||||
const completeDebug = this.devchatConfig.get("complete_debug");
|
|
||||||
|
|
||||||
// create prompt
|
// create prompt
|
||||||
const fsPath = document.uri.fsPath;
|
const fsPath = document.uri.fsPath;
|
||||||
const fileContent = document.getText();
|
const fileContent = document.getText();
|
||||||
@ -154,12 +154,21 @@ export class InlineCompletionProvider implements vscode.InlineCompletionItemProv
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const endTime = process.hrtime(startTime);
|
||||||
|
const duration = endTime[0] + endTime[1] / 1e9;
|
||||||
|
logger.channel()?.debug(`Make prompt took ${duration} seconds`);
|
||||||
|
const startTimeLLM = process.hrtime();
|
||||||
|
|
||||||
const completor = new LLMStreamComplete(token, lines, position.line, position.character);
|
const completor = new LLMStreamComplete(token, lines, position.line, position.character);
|
||||||
const response = await completor.llmStreamComplete(prompt);
|
const response = await completor.llmStreamComplete(prompt);
|
||||||
if (!response || response.code.length === 0) {
|
if (!response || response.code.length === 0) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const endTimeLLM = process.hrtime(startTimeLLM);
|
||||||
|
const durationLLM = endTimeLLM[0] + endTimeLLM[1] / 1e9;
|
||||||
|
logger.channel()?.debug(`LLMStreamComplete took ${durationLLM} seconds`);
|
||||||
|
|
||||||
if (token.isCancellationRequested) {
|
if (token.isCancellationRequested) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@ -170,8 +179,6 @@ export class InlineCompletionProvider implements vscode.InlineCompletionItemProv
|
|||||||
}
|
}
|
||||||
|
|
||||||
async provideInlineCompletionItems(document: vscode.TextDocument, position: vscode.Position, context: vscode.InlineCompletionContext, token: vscode.CancellationToken): Promise<vscode.InlineCompletionItem[] | null> {
|
async provideInlineCompletionItems(document: vscode.TextDocument, position: vscode.Position, context: vscode.InlineCompletionContext, token: vscode.CancellationToken): Promise<vscode.InlineCompletionItem[] | null> {
|
||||||
const completeDebug = this.devchatConfig.get("complete_debug");
|
|
||||||
|
|
||||||
const result = await this.debouncer.debounce();
|
const result = await this.debouncer.debounce();
|
||||||
if (!result) {
|
if (!result) {
|
||||||
return [];
|
return [];
|
||||||
|
Loading…
x
Reference in New Issue
Block a user