diff --git a/package-lock.json b/package-lock.json index 7cc33e1..7103b94 100644 --- a/package-lock.json +++ b/package-lock.json @@ -44,6 +44,7 @@ "@types/react-dom": "^18.2.3", "@types/react-syntax-highlighter": "^15.5.6", "@types/shell-escape": "^0.2.1", + "@types/uuid": "^9.0.1", "@types/vscode": "^1.77.0", "@typescript-eslint/eslint-plugin": "^5.56.0", "@typescript-eslint/parser": "^5.56.0", @@ -3942,6 +3943,12 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==", "dev": true }, + "node_modules/@types/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-rFT3ak0/2trgvp4yYZo5iKFEPsET7vKydKF+VRCxlQ9bpheehyAJH89dAkaLEq/j/RZXJIqcgsmPJKUP1Z28HA==", + "dev": true + }, "node_modules/@types/vscode": { "version": "1.77.0", "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.77.0.tgz", @@ -15921,6 +15928,12 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==", "dev": true }, + "@types/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-rFT3ak0/2trgvp4yYZo5iKFEPsET7vKydKF+VRCxlQ9bpheehyAJH89dAkaLEq/j/RZXJIqcgsmPJKUP1Z28HA==", + "dev": true + }, "@types/vscode": { "version": "1.77.0", "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.77.0.tgz", diff --git a/package.json b/package.json index 8a7aeaf..0cfd3b6 100644 --- a/package.json +++ b/package.json @@ -69,16 +69,16 @@ "description": "The max number of tokens of a prompt.", "when": "DevChat.llmModel == 'OpenAI'" }, - "DevChat.OpenAI.apiKey": { + "DevChat.API_KEY": { "type": "string", "default": "", - "description": "Open API Key", + "description": "API key for accessing the LLM model", "when": "DevChat.llmModel == 'OpenAI'" }, - "DevChat.OpenAI.EndPoint": { + "DevChat.API_ENDPOINT": { "type": "string", "default": "", - "description": "The OpenAI API endpoint URL.", + "description": "API endpoint URL", "when": "DevChat.llmModel == 'OpenAI'" }, "DevChat.DevChatPath": { @@ -155,7 +155,7 @@ }, { "command": "DevChat.OPENAI_API_KEY", - "title": "OPENAI_API_KEY", + "title": "DEVCHAT_API_KEY", "category": "DevChat" }, { diff --git a/src/contributes/commands.ts b/src/contributes/commands.ts index 7380d86..bfdc629 100644 --- a/src/contributes/commands.ts +++ b/src/contributes/commands.ts @@ -38,7 +38,7 @@ export async function checkOpenaiApiKey() { const secretStorage: vscode.SecretStorage = ExtensionContextHolder.context!.secrets; let openaiApiKey = await secretStorage.get("devchat_OPENAI_API_KEY"); if (!openaiApiKey) { - openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey'); + openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('API_KEY'); } if (!openaiApiKey) { openaiApiKey = process.env.OPENAI_API_KEY; @@ -50,7 +50,7 @@ export async function checkOpenaiApiKey() { } function checkOpenaiKey() { - let openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey'); + let openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('API_KEY'); if (!openaiApiKey) { openaiApiKey = process.env.OPENAI_API_KEY; } @@ -60,8 +60,8 @@ function checkOpenaiKey() { placeHolder: 'Please input your OpenAI API key (or DevChat access key)' }).then((value) => { if (value) { - // Set API Key - vscode.workspace.getConfiguration('DevChat').update('OpenAI.apiKey', value, true); + // 设置用户输入的API Key + vscode.workspace.getConfiguration('DevChat').update('API_KEY', value, true); } }); return false; diff --git a/src/toolwrapper/devchat.ts b/src/toolwrapper/devchat.ts index 26b2503..aca9dd6 100644 --- a/src/toolwrapper/devchat.ts +++ b/src/toolwrapper/devchat.ts @@ -88,7 +88,7 @@ class DevChat { const secretStorage: vscode.SecretStorage = ExtensionContextHolder.context!.secrets; let openaiApiKey = await secretStorage.get("devchat_OPENAI_API_KEY"); if (!openaiApiKey) { - openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey'); + openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('API_KEY'); } if (!openaiApiKey) { openaiApiKey = process.env.OPENAI_API_KEY; @@ -146,6 +146,22 @@ class DevChat { isError: false, }; } + + apiEndpoint(apiKey: string | undefined): any { + let openAiApiBase: string | undefined = undefined; + if (apiKey?.startsWith("DC.")) { + // TODO add devchat proxy + openAiApiBase = "https://xw4ymuy6qj.ap-southeast-1.awsapprunner.com/api/v1"; + } + + if (vscode.workspace.getConfiguration('DevChat').get('API_ENDPOINT')) { + openAiApiBase = vscode.workspace.getConfiguration('DevChat').get('API_ENDPOINT'); + } + + const openAiApiBaseObject = openAiApiBase ? { OPENAI_API_BASE: openAiApiBase } : {}; + return openAiApiBaseObject; + } + async chat(content: string, options: ChatOptions = {}, onData: (data: ChatResponse) => void): Promise { const args = await this.buildArgs(options); args.push(content); @@ -158,8 +174,8 @@ class DevChat { } - const openaiApiBase = vscode.workspace.getConfiguration('DevChat').get('OpenAI.EndPoint'); - const openaiApiBaseObject = openaiApiBase ? { OPENAI_API_BASE: openaiApiBase } : {}; + // 如果配置了devchat的TOKEN,那么就需要使用默认的代理 + let openAiApiBaseObject = this.apiEndpoint(openaiApiKey); const openaiModel = vscode.workspace.getConfiguration('DevChat').get('OpenAI.model'); const openaiTemperature = vscode.workspace.getConfiguration('DevChat').get('OpenAI.temperature'); @@ -196,16 +212,19 @@ class DevChat { onData(data); }; - logger.channel()?.info(`Running devchat with args: ${args.join(" ")}`); - const { exitCode: code, stdout, stderr } = await this.commandRun.spawnAsync(devChat, args, { + const spawnAsyncOptions = { maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB cwd: workspaceDir, env: { ...process.env, OPENAI_API_KEY: openaiApiKey, - ...openaiApiBaseObject + ...openAiApiBaseObject }, - }, onStdoutPartial, undefined, undefined, undefined); + }; + + logger.channel()?.info(`Running devchat with args: ${args.join(" ")}`); + logger.channel()?.info(`Running devchat with env: ${JSON.stringify(openAiApiBaseObject)}`); + const { exitCode: code, stdout, stderr } = await this.commandRun.spawnAsync(devChat, args, spawnAsyncOptions, onStdoutPartial, undefined, undefined, undefined); if (stderr) { const errorMessage = stderr.trim().match(/Error:(.+)/)?.[1];