Merge pull request #24 from covespace/update_devchat

update to lastest devchat
This commit is contained in:
boob.yang 2023-05-08 09:52:22 +08:00 committed by GitHub
commit 90c75f4d46
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 202 additions and 182 deletions

View File

@ -47,6 +47,12 @@
"description": "Specify llm stream", "description": "Specify llm stream",
"when": "DevChat.llmModel == 'OpenAI'" "when": "DevChat.llmModel == 'OpenAI'"
}, },
"DevChat.OpenAI.tokensPerPrompt": {
"type": "number",
"default": 6000,
"description": "token for each prompt",
"when": "DevChat.llmModel == 'OpenAI'"
},
"DevChat.OpenAI.apiKey": { "DevChat.OpenAI.apiKey": {
"type": "string", "type": "string",
"default": "", "default": "",

View File

@ -7,220 +7,234 @@ import * as dotenv from 'dotenv';
import * as path from 'path'; import * as path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
const spawnAsync = async (command: string, args: string[], options: any, onData: (data: string) => void): Promise<{code: number, stdout: string; stderr: string }> => { const spawnAsync = async (command: string, args: string[], options: any, onData: (data: string) => void): Promise<{ code: number, stdout: string; stderr: string }> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const child = spawn(command, args, options); const child = spawn(command, args, options);
let stdout = ''; let stdout = '';
let stderr = ''; let stderr = '';
child.stdout.on('data', (data) => { child.stdout.on('data', (data) => {
const dataStr = data.toString(); const dataStr = data.toString();
onData(dataStr); onData(dataStr);
stdout += dataStr; stdout += dataStr;
}); });
child.stderr.on('data', (data) => { child.stderr.on('data', (data) => {
stderr += data; stderr += data;
}); });
child.on('close', (code) => { child.on('close', (code) => {
if (code === 0) { if (code === 0) {
resolve({code, stdout, stderr }); resolve({ code, stdout, stderr });
} else { } else {
reject({code, stdout, stderr }); reject({ code, stdout, stderr });
} }
}); });
}); });
}; };
const envPath = path.join(__dirname, '..', '.env'); const envPath = path.join(__dirname, '..', '.env');
dotenv.config({ path: envPath }); dotenv.config({ path: envPath });
export interface ChatOptions { export interface ChatOptions {
parent?: string; parent?: string;
reference?: string[]; reference?: string[];
header?: string[]; header?: string[];
context?: string[]; context?: string[];
} }
export interface LogOptions { export interface LogOptions {
skip?: number; skip?: number;
maxCount?: number; maxCount?: number;
} }
export interface LogEntry { export interface LogEntry {
"prompt-hash": string; hash: string;
user: string; user: string;
date: string; date: string;
message: string; request: string;
response: string; response: string;
context: string[];
} }
export interface ChatResponse { export interface ChatResponse {
"prompt-hash": string; "prompt-hash": string;
user: string; user: string;
date: string; date: string;
response: string; response: string;
isError: boolean; isError: boolean;
} }
class DevChat { class DevChat {
async chat(content: string, options: ChatOptions = {}, onData: (data: string) => void): Promise<ChatResponse> { async chat(content: string, options: ChatOptions = {}, onData: (data: string) => void): Promise<ChatResponse> {
let args = ["prompt"]; let args = ["prompt"];
if (options.parent) {
args.push("-p", options.parent);
}
if (options.reference) {
args.push("-r", options.reference.join(","));
}
if (options.header) {
args.push("-i", options.header.join(","));
}
if (options.context) {
args.push("-c", options.context.join(","));
}
args.push(content)
const workspaceDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
// const openaiApiKey = process.env.OPENAI_API_KEY;
const openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey'); if (options.parent) {
for (const parent of options.parent) {
args.push("-p", parent);
}
}
if (options.reference) {
for (const reference of options.reference) {
args.push("-r", reference);
}
}
if (options.header) {
for (const header of options.header) {
args.push("-i", header);
}
}
const openaiModel = vscode.workspace.getConfiguration('DevChat').get('OpenAI.model'); if (options.context) {
const openaiTemperature = vscode.workspace.getConfiguration('DevChat').get('OpenAI.temperature'); args.push("-c", options.context.join(","));
const openaiStream = vscode.workspace.getConfiguration('DevChat').get('OpenAI.stream'); }
const llmModel = vscode.workspace.getConfiguration('DevChat').get('llmModel'); args.push(content)
const devchatConfig = { const workspaceDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
llm: llmModel, // const openaiApiKey = process.env.OPENAI_API_KEY;
OpenAI: {
model: openaiModel,
temperature: openaiTemperature,
stream: openaiStream
}
}
// write to config file
const configPath = path.join(workspaceDir!, '.chatconfig.json');
// write devchatConfig to configPath
const configJson = JSON.stringify(devchatConfig, null, 2);
fs.writeFileSync(configPath, configJson);
try { let openaiApiKey = vscode.workspace.getConfiguration('DevChat').get('OpenAI.apiKey');
const {code, stdout, stderr } = await spawnAsync('devchat', args, { if (!openaiApiKey) {
maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB openaiApiKey = process.env.OPENAI_API_KEY;
cwd: workspaceDir, }
env: {
...process.env,
OPENAI_API_KEY: openaiApiKey,
},
}, onData);
if (stderr) { const openaiModel = vscode.workspace.getConfiguration('DevChat').get('OpenAI.model');
const errorMessage = stderr.trim().match(/Error(.+)/)?.[1]; const openaiTemperature = vscode.workspace.getConfiguration('DevChat').get('OpenAI.temperature');
return { const openaiStream = vscode.workspace.getConfiguration('DevChat').get('OpenAI.stream');
"prompt-hash": "", const llmModel = vscode.workspace.getConfiguration('DevChat').get('llmModel');
user: "", const tokensPerPrompt = vscode.workspace.getConfiguration('DevChat').get('OpenAI.tokensPerPrompt');
date: "",
response: errorMessage ? `Error: ${errorMessage}` : "Unknown error", const devchatConfig = {
isError: true, model: openaiModel,
}; provider: llmModel,
} OpenAI: {
temperature: openaiTemperature,
stream: openaiStream,
"tokens-per-prompt": tokensPerPrompt
}
}
// write to config file
const configPath = path.join(workspaceDir!, '.chat', 'config.json');
// write devchatConfig to configPath
const configJson = JSON.stringify(devchatConfig, null, 2);
fs.writeFileSync(configPath, configJson);
const responseLines = stdout.trim().split("\n"); try {
console.log(responseLines) const { code, stdout, stderr } = await spawnAsync('devchat', args, {
maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB
if (responseLines.length === 0) { cwd: workspaceDir,
return { env: {
"prompt-hash": "", ...process.env,
user: "", OPENAI_API_KEY: openaiApiKey,
date: "", },
response: "", }, onData);
isError: true,
};
}
let promptHashLine = "";
for (let i = responseLines.length - 1; i >= 0; i--) {
if (responseLines[i].startsWith("prompt")) {
promptHashLine = responseLines[i];
responseLines.splice(i, 1);
break;
}
}
if (!promptHashLine) {
return {
"prompt-hash": "",
user: "",
date: "",
response: responseLines.join("\n"),
isError: true,
};
}
const promptHash = promptHashLine.split(" ")[1];
const userLine = responseLines.shift()!;
const user = (userLine.match(/User: (.+)/)?.[1]) ?? "";
const dateLine = responseLines.shift()!;
const date = (dateLine.match(/Date: (.+)/)?.[1]) ?? "";
const response = responseLines.join("\n");
return {
"prompt-hash": promptHash,
user,
date,
response,
isError: false,
};
} catch (error: any) {
return {
"prompt-hash": "",
user: "",
date: "",
response: `Error: ${error.stderr}\nExit code: ${error.code}`,
isError: true,
};
}
}
async log(options: LogOptions = {}): Promise<LogEntry[]> { if (stderr) {
let args = ["log"]; const errorMessage = stderr.trim().match(/Error(.+)/)?.[1];
return {
"prompt-hash": "",
user: "",
date: "",
response: errorMessage ? `Error: ${errorMessage}` : "Unknown error",
isError: true,
};
}
if (options.skip) { const responseLines = stdout.trim().split("\n");
args.push('--skip', `${options.skip}`); console.log(responseLines)
}
if (options.maxCount) {
args.push('--max-count', `${options.maxCount}`);
}
const workspaceDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath; if (responseLines.length === 0) {
const openaiApiKey = process.env.OPENAI_API_KEY; return {
"prompt-hash": "",
user: "",
date: "",
response: "",
isError: true,
};
}
try { let promptHashLine = "";
const {code, stdout, stderr } = await spawnAsync('devchat', args, { for (let i = responseLines.length - 1; i >= 0; i--) {
maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB if (responseLines[i].startsWith("prompt")) {
cwd: workspaceDir, promptHashLine = responseLines[i];
env: { responseLines.splice(i, 1);
...process.env, break;
OPENAI_API_KEY: openaiApiKey, }
}, }
}, (partialResponse: string) => {});
if (stderr) {
console.error(stderr);
return [];
}
return JSON.parse(stdout.trim()); if (!promptHashLine) {
} catch (error) { return {
console.error(error) "prompt-hash": "",
return []; user: "",
} date: "",
} response: responseLines.join("\n"),
isError: true,
};
}
const promptHash = promptHashLine.split(" ")[1];
const userLine = responseLines.shift()!;
const user = (userLine.match(/User: (.+)/)?.[1]) ?? "";
const dateLine = responseLines.shift()!;
const date = (dateLine.match(/Date: (.+)/)?.[1]) ?? "";
const response = responseLines.join("\n");
return {
"prompt-hash": promptHash,
user,
date,
response,
isError: false,
};
} catch (error: any) {
return {
"prompt-hash": "",
user: "",
date: "",
response: `Error: ${error.stderr}\nExit code: ${error.code}`,
isError: true,
};
}
}
async log(options: LogOptions = {}): Promise<LogEntry[]> {
let args = ["log"];
if (options.skip) {
args.push('--skip', `${options.skip}`);
}
if (options.maxCount) {
args.push('--max-count', `${options.maxCount}`);
}
const workspaceDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
const openaiApiKey = process.env.OPENAI_API_KEY;
try {
const { code, stdout, stderr } = await spawnAsync('devchat', args, {
maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB
cwd: workspaceDir,
env: {
...process.env,
OPENAI_API_KEY: openaiApiKey,
},
}, (partialResponse: string) => { });
if (stderr) {
console.error(stderr);
return [];
}
return JSON.parse(stdout.trim());
} catch (error) {
console.error(error)
return [];
}
}
} }
export default DevChat; export default DevChat;