diff --git a/package.json b/package.json index cf57df7..72ca370 100644 --- a/package.json +++ b/package.json @@ -53,6 +53,12 @@ "description": "token for each prompt", "when": "DevChat.llmModel == 'OpenAI'" }, + "DevChat.OpenAI.useHistoryPrompt": { + "type": "boolean", + "default": true, + "description": "use history prompts as context", + "when": "DevChat.llmModel == 'OpenAI'" + }, "DevChat.OpenAI.apiKey": { "type": "string", "default": "", diff --git a/src/toolwrapper/devchat.ts b/src/toolwrapper/devchat.ts index 5184008..1be1053 100644 --- a/src/toolwrapper/devchat.ts +++ b/src/toolwrapper/devchat.ts @@ -70,9 +70,6 @@ class DevChat { async chat(content: string, options: ChatOptions = {}, onData: (data: string) => void): Promise { let args = ["prompt"]; - if (options.parent) { - args.push("-p", options.parent); - } if (options.reference) { for (const reference of options.reference) { args.push("-r", reference); @@ -102,6 +99,11 @@ class DevChat { const openaiStream = vscode.workspace.getConfiguration('DevChat').get('OpenAI.stream'); const llmModel = vscode.workspace.getConfiguration('DevChat').get('llmModel'); const tokensPerPrompt = vscode.workspace.getConfiguration('DevChat').get('OpenAI.tokensPerPrompt'); + const userHistoryPrompts = vscode.workspace.getConfiguration('DevChat').get('OpenAI.useHistoryPrompt'); + + if (userHistoryPrompts && options.parent) { + args.push("-p", options.parent); + } const devchatConfig = { model: openaiModel,