From ac4f9d68022a468ad863feecbd2da888bf7dad48 Mon Sep 17 00:00:00 2001 From: "bobo.yang" Date: Mon, 8 May 2023 20:21:57 +0800 Subject: [PATCH] add useHistoryPrompts config --- package.json | 6 ++++++ src/toolwrapper/devchat.ts | 8 +++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index cf57df7..72ca370 100644 --- a/package.json +++ b/package.json @@ -53,6 +53,12 @@ "description": "token for each prompt", "when": "DevChat.llmModel == 'OpenAI'" }, + "DevChat.OpenAI.useHistoryPrompt": { + "type": "boolean", + "default": true, + "description": "use history prompts as context", + "when": "DevChat.llmModel == 'OpenAI'" + }, "DevChat.OpenAI.apiKey": { "type": "string", "default": "", diff --git a/src/toolwrapper/devchat.ts b/src/toolwrapper/devchat.ts index 5184008..1be1053 100644 --- a/src/toolwrapper/devchat.ts +++ b/src/toolwrapper/devchat.ts @@ -70,9 +70,6 @@ class DevChat { async chat(content: string, options: ChatOptions = {}, onData: (data: string) => void): Promise { let args = ["prompt"]; - if (options.parent) { - args.push("-p", options.parent); - } if (options.reference) { for (const reference of options.reference) { args.push("-r", reference); @@ -102,6 +99,11 @@ class DevChat { const openaiStream = vscode.workspace.getConfiguration('DevChat').get('OpenAI.stream'); const llmModel = vscode.workspace.getConfiguration('DevChat').get('llmModel'); const tokensPerPrompt = vscode.workspace.getConfiguration('DevChat').get('OpenAI.tokensPerPrompt'); + const userHistoryPrompts = vscode.workspace.getConfiguration('DevChat').get('OpenAI.useHistoryPrompt'); + + if (userHistoryPrompts && options.parent) { + args.push("-p", options.parent); + } const devchatConfig = { model: openaiModel,