From e586c1d5001bfabe76b4214d88c1bc5d4c9e46f8 Mon Sep 17 00:00:00 2001 From: "bobo.yang" Date: Fri, 5 May 2023 14:58:38 +0800 Subject: [PATCH 1/2] add config --- package.json | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/package.json b/package.json index 38e6ed0..583d942 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,41 @@ "README.md" ], "contributes": { + "configuration": { + "title": "DevChat", + "properties": { + "DevChat.llmModel": { + "type": "string", + "default": "OpenAI", + "enum": ["OpenAI"], + "description": "Select whose llm to use." + }, + "DevChat.OpenAI.model": { + "type": "string", + "default": "gpt-4", + "description": "Specify llm model", + "when": "DevChat.llmModel == 'OpenAI'" + }, + "DevChat.OpenAI.temperature": { + "type": "number", + "default": 0.2, + "description": "Specify llm temperature", + "when": "DevChat.llmModel == 'OpenAI'" + }, + "DevChat.OpenAI.stream": { + "type": "boolean", + "default": true, + "description": "Specify llm stream", + "when": "DevChat.llmModel == 'OpenAI'" + }, + "DevChat.OpenAI.apiKey": { + "type": "string", + "default": "", + "description": "Open API Key", + "when": "DevChat.llmModel == 'OpenAI'" + } + } + }, "views": { "explorer": [ { From ac98b9674f8aa8f36e8d652aebd834107433eef9 Mon Sep 17 00:00:00 2001 From: "bobo.yang" Date: Fri, 5 May 2023 15:00:46 +0800 Subject: [PATCH 2/2] use value in config --- src/devchat.ts | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/devchat.ts b/src/devchat.ts index a249a19..2a9b381 100644 --- a/src/devchat.ts +++ b/src/devchat.ts @@ -5,6 +5,7 @@ import { promisify } from "util"; import * as vscode from 'vscode'; import * as dotenv from 'dotenv'; import * as path from 'path'; +import * as fs from 'fs'; const spawnAsync = async (command: string, args: string[], options: any, onData: (data: string) => void): Promise<{code: number, stdout: string; stderr: string }> => { return new Promise((resolve, reject) => { @@ -82,7 +83,28 @@ class DevChat { args.push(content) const workspaceDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath; - const openaiApiKey = process.env.OPENAI_API_KEY; + // const openaiApiKey = process.env.OPENAI_API_KEY; + + const openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey'); + + const openaiModel = vscode.workspace.getConfiguration('DevChat').get('OpenAI.model'); + const openaiTemperature = vscode.workspace.getConfiguration('DevChat').get('OpenAI.temperature'); + const openaiStream = vscode.workspace.getConfiguration('DevChat').get('OpenAI.stream'); + const llmModel = vscode.workspace.getConfiguration('DevChat').get('llmModel'); + + const devchatConfig = { + llm: llmModel, + OpenAI: { + model: openaiModel, + temperature: openaiTemperature, + stream: openaiStream + } + } + // write to config file + const configPath = path.join(workspaceDir!, '.chatconfig.json'); + // write devchatConfig to configPath + const configJson = JSON.stringify(devchatConfig, null, 2); + fs.writeFileSync(configPath, configJson); try { const {code, stdout, stderr } = await spawnAsync('devchat', args, {