Merge pull request #322 from devchat-ai/workflow-message-step

Workflow message step
This commit is contained in:
boob.yang 2023-10-12 00:18:24 +08:00 committed by GitHub
commit 31d9dd1a9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 2124 additions and 499 deletions

1590
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -8,6 +8,9 @@
"engines": {
"vscode": "^1.75.0"
},
"extensionDependencies": [
"merico.lang-bridge-vsc"
],
"repository": {
"type": "git",
"url": "https://github.com/devchat-ai/devchat-vscode.git"
@ -412,7 +415,6 @@
"order": 7,
"markdownDescription": "Specify the default llm model for DevChat. [Price of each model](https://devchat.ai/pricing)"
},
"DevChat.OpenAI.stream": {
"type": "boolean",
"default": true,
@ -794,6 +796,9 @@
"axios": "^1.3.6",
"dotenv": "^16.0.3",
"js-yaml": "^4.1.0",
"mdast": "^3.0.0",
"mdast-util-from-markdown": "^2.0.0",
"mdast-util-to-markdown": "^2.1.0",
"mobx": "^6.10.0",
"mobx-react": "^9.0.0",
"mobx-state-tree": "^5.1.8",
@ -807,6 +812,8 @@
"rehype-raw": "^6.1.1",
"shell-escape": "^0.2.0",
"string-argv": "^0.3.2",
"unified": "^11.0.3",
"unist-util-visit": "^5.0.0",
"uuid": "^9.0.0",
"yaml": "^2.3.2"
}

View File

@ -26,7 +26,7 @@ class CommandManager {
CommandManager.instance.registerCommand({
name: 'ask-code',
pattern: 'ask-code',
description: 'ask code',
description: 'Ask anything about your codebase and get answers from our AI agent',
args: 0,
handler: async (commandName: string, userInput: string) => {
return '';

View File

@ -284,9 +284,6 @@ export function registerAskCodeIndexStartCommand(context: vscode.ExtensionContex
if (!pythonVirtualEnv) {
progressBar.update("Installing devchat-ask. See OUTPUT for progress...", 0);
await installAskCode(supportedFileTypes, progressBar, indexCode);
} else {
progressBar.update("Index source files. See OUTPUT for progress...", 0);
await indexCode(pythonVirtualEnv, supportedFileTypes, progressBar);
}
updateIndexingStatus("stopped");
@ -310,7 +307,7 @@ async function installAskCode(supportedFileTypes, progressBar: any, callback: Fu
return;
}
UiUtilWrapper.updateConfiguration("DevChat", "PythonVirtualEnv", pythonEnvPath.trim());
await UiUtilWrapper.updateConfiguration("DevChat", "PythonVirtualEnv", pythonEnvPath.trim());
logger.channel()?.info(`Installation finished.`);
// Execute the callback function after the installation is finished
@ -318,68 +315,6 @@ async function installAskCode(supportedFileTypes, progressBar: any, callback: Fu
}
async function indexCode(pythonVirtualEnv, supportedFileTypes, progressBar: any) {
let envs = {};
const llmModelData = await ApiKeyManager.llmModel();
if (!llmModelData) {
logger.channel()?.error('No valid llm model is selected!');
logger.channel()?.show();
progressBar.endWithError("No valid llm model is selected!");
return;
}
let openaiApiKey = llmModelData.api_key;
if (!openaiApiKey) {
logger.channel()?.error('The OpenAI key is invalid!');
logger.channel()?.show();
progressBar.endWithError("The OpenAI key is invalid!");
return;
}
envs['OPENAI_API_KEY'] = openaiApiKey;
const openAiApiBase = llmModelData.api_base;
if (openAiApiBase) {
envs['OPENAI_API_BASE'] = openAiApiBase;
}
const workspaceDir = UiUtilWrapper.workspaceFoldersFirstPath();
const command = pythonVirtualEnv.trim();
const args = [UiUtilWrapper.extensionPath() + "/tools/askcode_index_query.py", "index", ".", supportedFileTypes];
const options = { env: envs, cwd: workspaceDir };
indexProcess = new CommandRun();
const result = await indexProcess.spawnAsync(command, args, options, (data) => {
if (data.includes('Skip file:')) {
return;
}
logger.channel()?.info(`${data}`);
}, (data) => {
if (data.includes('Skip file:')) {
return;
}
logger.channel()?.info(`${data}`);
}, undefined, undefined);
if (result.exitCode !== 0) {
if (result.exitCode === null) {
logger.channel()?.info(`Indexing stopped!`);
progressBar.endWithError(`Indexing stopped!`);
} else {
logger.channel()?.error(`Indexing failed: ${result.stderr}`);
logger.channel()?.show();
progressBar.endWithError(`Indexing failed: ${result.stderr}`);
}
return;
}
updateLastModifyTime();
logger.channel()?.info(`index finished.`);
progressBar.update("Indexing finished.");
progressBar.end();
}

View File

@ -14,7 +14,7 @@ export function checkDevChatDependency(showError: boolean = true): boolean {
try {
// Check if DevChat is installed
const expectVersion = 'DevChat 0.2.8';
const expectVersion = 'DevChat 0.2.9';
const devchatVersion = runCommand(`"${devChat}" --version`).toString().trim();
if (devchatVersion < expectVersion) {
logger.channel()?.info(`devchat version: ${devchatVersion}, but expect version: ${expectVersion}`);

View File

@ -2,7 +2,7 @@
import * as vscode from 'vscode';
import { MessageHandler } from './messageHandler';
import { regInMessage, regOutMessage } from '../util/reg_messages';
import { stopDevChatBase, sendMessageBase, deleteChatMessageBase } from './sendMessageBase';
import { stopDevChatBase, sendMessageBase, deleteChatMessageBase, insertDevChatLog, handleTopic } from './sendMessageBase';
import { UiUtilWrapper } from '../util/uiUtil';
import * as fs from 'fs';
import * as os from 'os';
@ -15,6 +15,9 @@ import { CommandRun, createTempSubdirectory } from '../util/commonUtil';
const exec = promisify(execCb);
let askcode_stop = true;
let askcode_runner : CommandRun | null = null;
let _lastMessage: any = undefined;
export function createTempFile(content: string): string {
@ -35,9 +38,15 @@ export function deleteTempFiles(fileName: string): void {
regInMessage({command: 'askCode', text: '', parent_hash: undefined});
regOutMessage({ command: 'receiveMessage', text: 'xxxx', hash: 'xxx', user: 'xxx', date: 'xxx'});
export async function askCode(message: any, panel: vscode.WebviewPanel|vscode.WebviewView): Promise<void> {
try {
askcode_stop = false;
askcode_runner = null;
_lastMessage = [message];
_lastMessage[0]['askCode'] = true;
const port = await UiUtilWrapper.getLSPBrigePort();
let pythonVirtualEnv: string|undefined = vscode.workspace.getConfiguration('DevChat').get('PythonVirtualEnv');
if (!pythonVirtualEnv) {
try {
@ -78,36 +87,50 @@ export async function askCode(message: any, panel: vscode.WebviewPanel|vscode.We
}
const workspaceDir = UiUtilWrapper.workspaceFoldersFirstPath();
try {
// create temp directory and file
const tempDir = await createTempSubdirectory('devchat/context');
const tempFile = path.join(tempDir, "doc_context.txt");
// If tempFile already exists, delete it
if (fs.existsSync(tempFile)) {
fs.unlinkSync(tempFile);
if (askcode_stop) {
return;
}
const commandRun = new CommandRun();
try {
let outputResult = "";
askcode_runner = new CommandRun();
const command = pythonVirtualEnv.trim();
const args = [UiUtilWrapper.extensionPath() + "/tools/askcode_index_query.py", "query", message.text, tempFile];
const result = await commandRun.spawnAsync(command, args, { env: envs, cwd: workspaceDir }, (data) => {
const args = [UiUtilWrapper.extensionPath() + "/tools/askcode_index_query.py", "query", message.text, `${port}`];
const result = await askcode_runner.spawnAsync(command, args, { env: envs, cwd: workspaceDir }, (data) => {
outputResult += data;
MessageHandler.sendMessage(panel, { command: 'receiveMessagePartial', text: outputResult, hash:"", user:"", isError: false });
logger.channel()?.info(data);
}, (data) => {
logger.channel()?.error(data);
}, undefined, undefined);
// Check if tempFile has been written to
if (!fs.existsSync(tempFile) || fs.readFileSync(tempFile, 'utf8') === '') {
logger.channel()?.error(`Did not get relevant context from AskCode.`);
if (result.exitCode === 0) {
// save askcode result to devchat
const stepIndex = result.stdout.lastIndexOf("```Step");
const stepEndIndex = result.stdout.lastIndexOf("```");
let resultOut = result.stdout;
if (stepIndex > 0 && stepEndIndex > 0) {
resultOut = result.stdout.substring(stepEndIndex+3, result.stdout.length);
}
let logHash = await insertDevChatLog(message, "/ask-code " + message.text, resultOut);
if (!logHash) {
logHash = "";
logger.channel()?.error(`Failed to insert devchat log.`);
logger.channel()?.show();
MessageHandler.sendMessage(panel, { command: 'receiveMessage', text: "Did not get relevant context from AskCode.", hash: "", user: "", date: 0, isError: true });
return;
}
// Send message
await sendMessage({command: "sendMessage", contextInfo: [{file: tempFile, context: ""}], text: message.text, parent_hash: message.hash}, panel);
MessageHandler.sendMessage(panel, { command: 'receiveMessagePartial', text: result.stdout, hash:logHash, user:"", isError: false });
MessageHandler.sendMessage(panel, { command: 'receiveMessage', text: result.stdout, hash:logHash, user:"", date:0, isError: false });
const dateStr = Math.floor(Date.now()/1000).toString();
await handleTopic(
message.parent_hash,
{"text": "/ask-code " + message.text},
{ response: result.stdout, "prompt-hash": logHash, user: "", "date": dateStr, finish_reason: "", isError: false });
} else {
logger.channel()?.info(`${result.stdout}`);
MessageHandler.sendMessage(panel, { command: 'receiveMessage', text: result.stderr, hash: "", user: "", date: 0, isError: true });
}
} catch (error) {
if (error instanceof Error) {
logger.channel()?.error(`error: ${error.message}`);
@ -117,6 +140,10 @@ export async function askCode(message: any, panel: vscode.WebviewPanel|vscode.We
logger.channel()?.show();
MessageHandler.sendMessage(panel, { command: 'receiveMessage', text: "Did not get relevant context from AskCode.", hash: "", user: "", date: 0, isError: true });
}
} finally {
askcode_stop = true;
askcode_runner = null;
}
}
@ -199,6 +226,15 @@ export async function regeneration(message: any, panel: vscode.WebviewPanel|vsco
regInMessage({command: 'stopDevChat'});
export async function stopDevChat(message: any, panel: vscode.WebviewPanel|vscode.WebviewView): Promise<void> {
stopDevChatBase(message);
if (askcode_stop === false) {
askcode_stop = true;
if (askcode_runner) {
askcode_runner.stop();
askcode_runner = null;
}
await vscode.commands.executeCommand('DevChat.AskCodeIndexStop');
}
}
regInMessage({command: 'deleteChatMessage', hash: 'xxx'});

View File

@ -178,6 +178,17 @@ export async function stopDevChatBase(message: any): Promise<void> {
devChat.stop();
}
export async function insertDevChatLog(message: any, request: string, response: string): Promise<string | undefined> {
logger.channel()?.info(`Inserting devchat log`);
await devChat.logInsert(request, response, message.parent_hash);
const logs = await devChat.log({"maxCount": 1});
if (logs && logs.length > 0) {
return logs[0]['hash'];
} else {
return undefined;
}
}
// delete a chat message
// each message is identified by hash
export async function deleteChatMessageBase(message:{'hash': string}): Promise<boolean> {

View File

@ -301,6 +301,64 @@ class DevChat {
}
}
async logInsert(request: string, response: string, parent: string | undefined) {
let log_data = {
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": request
},
{
"role": "assistant",
"content": response
}
],
"timestamp": Math.floor(Date.now()/1000),
"request_tokens": 1,
"response_tokens": 1
};
if (parent) {
log_data["parent"] = parent;
}
const args = ["log", "--insert", JSON.stringify(log_data)];
const devChat = this.getDevChatPath();
const workspaceDir = UiUtilWrapper.workspaceFoldersFirstPath();
const openaiApiKey = process.env.OPENAI_API_KEY;
logger.channel()?.info(`Running devchat with arguments: ${args.join(" ")}`);
const spawnOptions = {
maxBuffer: 10 * 1024 * 1024, // Set maxBuffer to 10 MB
cwd: workspaceDir,
env: {
...process.env,
OPENAI_API_KEY: openaiApiKey,
},
};
const { exitCode: code, stdout, stderr } = await this.commandRun.spawnAsync(devChat, args, spawnOptions, undefined, undefined, undefined, undefined);
logger.channel()?.info(`Finish devchat with arguments: ${args.join(" ")}`);
if (stderr) {
logger.channel()?.error(`Error: ${stderr}`);
logger.channel()?.show();
return false;
}
if (stdout.indexOf('Failed to insert log') >= 0) {
logger.channel()?.error(`Failed to insert log: ${log_data}`);
logger.channel()?.show();
return false;
}
if (code !== 0) {
logger.channel()?.error(`Exit code: ${code}`);
logger.channel()?.show();
return false;
}
return true;
}
async delete(hash: string): Promise<boolean> {
const args = ["log", "--delete", hash];
const devChat = this.getDevChatPath();

View File

@ -17,6 +17,7 @@ export interface UiUtil {
// current selected text
selectText(): string | undefined;
showErrorMessage(message: string): void;
getLSPBrigePort(): Promise<number | undefined>;
}
@ -72,5 +73,9 @@ export class UiUtilWrapper {
public static showErrorMessage(message: string): void {
this._uiUtil?.showErrorMessage(message);
}
public static async getLSPBrigePort(): Promise<number | undefined> {
return await this._uiUtil?.getLSPBrigePort();
}
}

View File

@ -122,4 +122,9 @@ export class UiUtilVscode implements UiUtil {
public showErrorMessage(message: string): void {
vscode.window.showErrorMessage(message);
}
public async getLSPBrigePort(): Promise<number | undefined> {
const port = await vscode.commands.executeCommand('LangBrige.getAddress') as number | undefined;;
return port;
}
}

View File

@ -1,12 +1,13 @@
import React, { useEffect } from "react";
import React, { useEffect, useState } from "react";
import { keyframes } from "@emotion/react";
import { Box, Container, Text } from "@mantine/core";
import MessageBody from "@/views/components/MessageBody";
import { observer } from "mobx-react-lite";
import { useMst } from "@/views/stores/RootStore";
import { Message } from "@/views/stores/ChatStore";
import {fromMarkdown} from 'mdast-util-from-markdown';
import {toMarkdown} from 'mdast-util-to-markdown';
const MessageBlink = observer(() => {
const { chat } = useMst();
@ -18,7 +19,7 @@ const MessageBlink = observer(() => {
return <Text sx={{
animation: `${blink} 0.5s infinite;`,
width: 5,
marginTop: chat.responsed ? 0 : '1em',
marginTop: '1em',
backgroundColor: 'black',
display: 'block'
}}>|</Text>;
@ -49,25 +50,19 @@ const CurrentMessage = observer((props: any) => {
const { width } = props;
const { chat } = useMst();
const { messages, currentMessage, generating, responsed, hasDone } = chat;
// split blocks
const messageBlocks = getBlocks(currentMessage);
const lastMessageBlocks = getBlocks(messages[messages.length - 1]?.message);
const fixedCount = lastMessageBlocks.length;
const receivedCount = messageBlocks.length;
const renderBlocks = messageBlocks.splice(-1);
useEffect(() => {
if (generating) {
// new a bot message
const messageItem = Message.create({ type: 'bot', message: currentMessage });
chat.newMessage(messageItem);
}
}, [generating]);
const messageBlocks = fromMarkdown(currentMessage);
const lastMessageBlocks = fromMarkdown(messages[messages.length - 1]?.message);
const fixedCount = lastMessageBlocks.children.length;
const receivedCount = messageBlocks.children.length;
const renderBlocks = messageBlocks.children.splice(-1);
useEffect(() => {
if (generating && (receivedCount - fixedCount >= 1 || !responsed)) {
chat.updateLastMessage(currentMessage);
chat.updateLastMessage(toMarkdown({
type: 'root',
children: messageBlocks.children
}));
}
}, [currentMessage, responsed, generating]);
@ -80,13 +75,18 @@ const CurrentMessage = observer((props: any) => {
return generating
? <Box
sx={{
padding: 0,
marginTop: -5,
marginBottom: 50,
width: width,
pre: {
margin: 0,
whiteSpace: 'break-spaces'
},
}}>
<MessageBody messageText={renderBlocks.join('\n\n')} messageType="bot" />
<MessageBody messageType="bot" temp={true} >
{renderBlocks.length>0?toMarkdown(renderBlocks[0]):''}
</MessageBody>
<MessageBlink />
</Box>
: <></>;

View File

@ -60,27 +60,19 @@ const InputMessage = observer((props: any) => {
if (inputValue) {
if (inputValue.trim() === '/help') {
chat.helpMessage();
input.setValue('');
event.preventDefault();
} else {
const text = inputValue;
// Add the user's message to the chat UI
const chatContexts = contexts ? [...contexts].map((item) => ({ ...item })) : undefined;
const newMessage = Message.create({
type: 'user',
message: inputValue,
contexts: chatContexts
});
chat.newMessage(newMessage);
// start generating
chat.startGenerating(text, chatContexts);
if (inputValue.trim().startsWith('/ask-code')) {
chat.devchatAsk(text, chatContexts);
} else{
chat.commonMessage(text, chatContexts);
}
}
// Clear the input field
input.setValue('');
input.clearContexts();
setTimeout(() => {
chat.goScrollBottom();
}, 1000);
}
event.preventDefault();
}
};

View File

@ -5,8 +5,9 @@ import MessageMarkdown from "@/views/components/MessageMarkdown";
import { useMst } from "@/views/stores/RootStore";
interface IProps {
messageText: string,
messageType: string
messageType: string,
children: string,
temp?: boolean
}
@ -17,14 +18,16 @@ const useStyles = createStyles((theme, options:any) => ({
}));
const MessageBody = observer((props: IProps) => {
const { messageText, messageType } = props;
const { children, messageType, temp=false } = props;
const { chat } = useMst();
const {classes} = useStyles({
chatPanelWidth:chat.chatPanelWidth
});
return (
messageType === 'bot'
? <MessageMarkdown className={classes.bodyWidth}>{messageText}</MessageMarkdown>
? <MessageMarkdown className={classes.bodyWidth} temp={temp}>
{children}
</MessageMarkdown>
: <Container
sx={{
margin: 0,
@ -35,7 +38,7 @@ const MessageBody = observer((props: IProps) => {
wordBreak: 'break-word',
},
}}>
<pre>{messageText}</pre>
<pre>{children}</pre>
</Container>
);
});

View File

@ -1,5 +1,5 @@
import { Stack, Container, Divider, Box } from "@mantine/core";
import { Stack, Container, Divider, Box, Group,Text, Button, createStyles } from "@mantine/core";
import React, { useEffect } from "react";
import MessageBody from "@/views/components/MessageBody";
import MessageAvatar from "@/views/components/MessageAvatar";
@ -8,14 +8,44 @@ import { useMst } from "@/views/stores/RootStore";
import { Message } from "@/views/stores/ChatStore";
import MessageContext from "@/views/components/MessageContext";
import CurrentMessage from "@/views/components/CurrentMessage";
import { Card } from '@mantine/core';
import { IconInfoSquareRounded } from "@tabler/icons-react";
const useStyles = createStyles((theme) => ({
card:{
backgroundColor: 'var(--vscode-menu-background)',
fontFamily: 'var(--vscode-editor-font-familyy)',
fontSize: 'var(--vscode-editor-font-size)',
color: 'var(--vscode-menu-foreground)',
borderColor: 'var(--vscode-menu-border)',
},
cardDescription:{
marginTop: 10,
marginBottom: 10,
},
button:{
backgroundColor:"#ED6A45",
fontFamily: 'var(--vscode-editor-font-familyy)',
fontSize: 'var(--vscode-editor-font-size)',
color:"#fff",
"&:hover":{
backgroundColor:"#ED6A45",
opacity: 0.8,
},
"&:focus":{
backgroundColor:"#ED6A45",
opacity: 0.8,
}
}
}));
const MessageList = observer((props: any) => {
const { chat } = useMst();
const {classes} = useStyles();
return (<Stack spacing={0} sx={{margin:'0 10px 10px 10px'}}>
{chat.messages.map((item, index: number) => {
const { message: messageText, type: messageType, hash: messageHash, contexts } = item;
const { message: messageText, type: messageType, hash: messageHash, contexts, confirm } = item;
// setMessage(messageText);
return <Stack
spacing={0}
@ -40,8 +70,28 @@ const MessageList = observer((props: any) => {
whiteSpace: 'break-spaces'
},
}}>
{ messageType === 'bot' && confirm && <Card shadow="sm" padding="xs" radius="md" withBorder className={classes.card}>
<Card.Section withBorder inheritPadding py="xs">
<Group position="left">
<IconInfoSquareRounded size={20} />
<Text fw={500}>Explore with /ask-code!</Text>
</Group>
</Card.Section>
<Text className={classes.cardDescription}>
/ask-code, your AI agent, navigates through your codebase to answer questions using GPT-4, analyzing up to 10 source files for approximately $0.4 USD per question. We're evolving soon, we'll implement the more affordable LLama2-70b model.
<br/>
<br/>
Would you like to proceed?
</Text>
<Group position="right" >
<Button size="compact-xs" className={classes.button} onClick={()=> chat.sendLastUserMessage() }>Yes</Button>
<Button size="compact-xs" className={classes.button} onClick={()=> chat.cancelDevchatAsk()}>No</Button>
</Group>
</Card>}
<MessageContext key={`message-context-${index}`} contexts={contexts} />
<MessageBody key={`message-codeblock-${index}`} messageType={messageType} messageText={messageText} />
<MessageBody key={`message-codeblock-${index}`} messageType={messageType} >
{messageText}
</MessageBody>
</Box >
{index !== chat.messages.length - 1 && <Divider my={3} key={`message-divider-${index}`} />}
</Stack >;

View File

@ -0,0 +1,24 @@
import React from 'react';
interface LanguageCornerProps {
language: string;
}
const LanguageCorner: React.FC<LanguageCornerProps> = ({ language }) => {
return (
<div style={{ position: 'absolute', top: 0, left: 0 }}>
{language && (
<div style={{
backgroundColor: '#333',
color: '#fff',
padding: '0.2rem 0.5rem',
borderRadius: '0.2rem',
fontSize: '0.8rem',
}}>
{language}
</div>
)}
</div>
);
};
export default LanguageCorner;

View File

@ -0,0 +1,106 @@
import { Accordion, Box, Button, Collapse, Group,Loader,Text } from "@mantine/core";
import { useDisclosure } from "@mantine/hooks";
import React from "react";
import SyntaxHighlighter from "react-syntax-highlighter";
import LanguageCorner from "./LanguageCorner";
import { okaidia } from "react-syntax-highlighter/dist/esm/styles/prism";
import { IconCheck, IconChevronDown, IconFileDiff, IconLoader } from "@tabler/icons-react";
import { observer } from "mobx-react-lite";
import { useMst } from "@/views/stores/RootStore";
import { keyframes,css } from "@emotion/react";
interface StepProps {
language: string;
children: string;
done:boolean;
}
const Step = observer((props:StepProps) => {
const { chat } = useMst();
const {language,children,done} = props;
const [opened, { toggle }] = useDisclosure(false);
// extract first line with # as button label
const lines = children.split('\n');
const title = lines.length>0&&lines[0].indexOf('#')>=0?lines[0].split('#')[1]:'';
const contents = lines.slice(1,lines.length-1);
const spin = keyframes`
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
`;
return <Accordion
variant="contained"
chevronPosition="right"
sx={{
marginTop: 5,
borderRadius: 5,
backgroundColor: 'var(--vscode-menu-background)',
}}
styles={{
item: {
borderColor: 'var(--vscode-menu-border)',
backgroundColor: 'var(--vscode-menu-background)',
'&[data-active]': {
backgroundColor: 'var(--vscode-menu-background)',
}
},
control: {
height: 30,
borderRadius: 3,
backgroundColor: 'var(--vscode-menu-background)',
'&[aria-expanded="true"]': {
borderBottomLeftRadius: 0,
borderBottomRightRadius: 0,
},
'&:hover': {
backgroundColor: 'var(--vscode-menu-background)',
},
paddingLeft: '0.5rem',
paddingRight: '0.5rem',
fontFamily: 'var(--vscode-editor-font-familyy)',
fontSize: 'var(--vscode-editor-font-size)',
},
chevron: {
color: 'var(--vscode-menu-foreground)',
},
icon: {
color: 'var(--vscode-menu-foreground)',
},
label: {
color: 'var(--vscode-menu-foreground)',
},
panel: {
color: 'var(--vscode-menu-foreground)',
backgroundColor: 'var(--vscode-menu-background)',
},
content: {
borderRadius: 3,
backgroundColor: 'var(--vscode-menu-background)',
padding:'0.5rem'
}
}}
>
<Accordion.Item value={title} mah='200'>
<Accordion.Control icon={
done
?<IconCheck size="1.125rem"/>
:<Loader size="xs" color="#ED6A45" speed={1} />
}
>
<Text truncate='end' w={chat.chatPanelWidth-100}>{title}</Text>
</Accordion.Control>
<Accordion.Panel>
<SyntaxHighlighter {...props}
language="markdown"
style={okaidia}
PreTag="div">
{children}
</SyntaxHighlighter>
</Accordion.Panel>
</Accordion.Item>
</Accordion>;
});
export default Step;

View File

@ -1,42 +1,40 @@
import { Button, Anchor } from "@mantine/core";
import React from "react";
import { Button, Anchor, Stack, Group, Box } from "@mantine/core";
import React, { useEffect, useState } from "react";
import ReactMarkdown from "react-markdown";
import rehypeRaw from "rehype-raw";
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { okaidia } from "react-syntax-highlighter/dist/esm/styles/prism";
import CodeButtons from "./CodeButtons";
import Step from "./Step";
import LanguageCorner from "./LanguageCorner";
import { observer } from "mobx-react-lite";
import { useMst } from "@/views/stores/RootStore";
import { Message } from "@/views/stores/ChatStore";
import messageUtil from '@/util/MessageUtil';
import {fromMarkdown} from 'mdast-util-from-markdown';
import {visit} from 'unist-util-visit';
interface MessageMarkdownProps extends React.ComponentProps<typeof ReactMarkdown> {
children: string,
className: string
className: string,
temp?: boolean
}
const MessageMarkdown = observer((props: MessageMarkdownProps) => {
const { children } = props;
const { chat } = useMst();
const LanguageCorner = (props: any) => {
const { language } = props;
return (<div style={{ position: 'absolute', top: 0, left: 0 }}>
{language && (
<div style={{
backgroundColor: '#333',
color: '#fff',
padding: '0.2rem 0.5rem',
borderRadius: '0.2rem',
fontSize: '0.8rem',
}}>
{language}
</div>
)}
</div>);
type Step = {
index: number,
content: string;
endsWithTripleBacktick: boolean;
};
const MessageMarkdown = observer((props: MessageMarkdownProps) => {
const { children,temp=false } = props;
const { chat } = useMst();
const [steps, setSteps] = useState<Step[]>([]);
const tree = fromMarkdown(children);
const codes = tree.children.filter(node => node.type === 'code');
const lastNode = tree.children[tree.children.length-1];
let index = 1;
const handleExplain = (value: string | undefined) => {
console.log(value);
switch (value) {
@ -48,27 +46,9 @@ const MessageMarkdown = observer((props: MessageMarkdownProps) => {
}),
Message.create({
type: 'bot',
message: `***/ask_code***
message: `***/ask-code***
If you would like to ask questions related to your own codebase, you can enable and use the /ask_code feature of DevChat.
While /ask_code is being enabled, DevChat will need to index your codebase before you can use this feature. Indexing usually takes a while, depending on the size of your codebase, your computing power and the network. Once its done, you can ask questions about your codebase by typing the /ask_code command, followed by your question.
Example questions:
(Here we only show example questions from a few popular open-source projects codebases.)
How do I access POST form fields in Express?
How do I pass command line arguments to a Node.js program?
How do I print the value of a tensor object in TensorFlow?
How do I force Kubernetes to re-pull an image in Kubernetes?
How do I set focus on an input field after rendering in React?
\`Please check DevChat.ask_code settings\` before enabling the feature, because once indexing has been started, changing the settings will not affect the process anymore, unless if you terminate it and re-index.
To enable, you can enter \`DevChat:Start AskCode Index\` in the Command Palette or click on the button to start indexing now.
<button value="settings">Settings</button>
<button value="start_indexing">Start Indexing</button>
Your AI agent, navigates through your codebase to answer questions using GPT-4, analyzing up to 10 source files for approximately $0.4 USD per question. We're evolving — soon, we'll implement the more affordable LLama2-70b model.
`
}),
]);
@ -135,9 +115,20 @@ Generate a professionally written and formatted release note in markdown with th
return <ReactMarkdown
{...props}
remarkPlugins={[()=> (tree) =>{
visit(tree, function (node) {
if (node.type === 'code' && (node.lang ==='step' || node.lang ==='Step')) {
node.data = {
hProperties:{
index: index++
}
};
}
});
}]}
rehypePlugins={[rehypeRaw]}
components={{
code({ node, inline, className, children, ...props }) {
code({ node, inline, className, children, index, ...props }) {
const match = /language-(\w+)/.exec(className || '');
const value = String(children).replace(/\n$/, '');
@ -150,6 +141,12 @@ Generate a professionally written and formatted release note in markdown with th
if (lanugage === 'markdown' || lanugage === 'text') {
wrapLongLines = true;
}
if (lanugage === 'step' || lanugage === 'Step') {
let done = Number(index) < codes.length? true : lastNode.type !== 'code';
return <Step language={lanugage} done={temp?done:true}>{value}</Step>;
}
return !inline && lanugage ? (
<div style={{ position: 'relative' }}>
<LanguageCorner language={lanugage} />

View File

@ -61,6 +61,7 @@ export const Message = types.model({
type: types.enumeration(['user', 'bot', 'system']),
message: types.string,
contexts: types.maybe(types.array(ChatContext)),
confirm: types.maybe(types.boolean)
});
export const ChatStore = types.model('Chat', {
@ -82,6 +83,34 @@ export const ChatStore = types.model('Chat', {
})
.actions(self => {
const goScrollBottom = () => {
self.scrollBottom++;
};
const lastNonEmptyHash = () => {
let lastNonEmptyHash;
for (let i = self.messages.length - 1; i >= 0; i--) {
if (self.messages[i].hash) {
lastNonEmptyHash = self.messages[i].hash;
break;
}
}
return lastNonEmptyHash === 'message' ? null : lastNonEmptyHash;
};
// Process and send the message to the extension
const contextInfo = chatContexts => chatContexts.map((item, index: number) => {
const { file, path, content, command } = item;
return {
file,
context: {
path: path,
command: command,
content: content,
}
};
});
const helpMessage = (originalMessage = false) => {
let helps = `
@ -97,7 +126,7 @@ To get started, here are some of the things that I can do for you:
[/release_note: draft a release note based on your latest commits](#release_note)
${self.features['ask-code'] ? '[/ask-code: ask me questions about your codebase](#ask_code)' : ''}
${self.features['ask-code'] ? '[/ask-code: ask anything about your codebase and get answers from our AI agent](#ask_code)' : ''}
You can configure DevChat from [Settings](#settings).`;
@ -111,10 +140,83 @@ You can configure DevChat from [Settings](#settings).`;
type: 'bot',
message: helps
}));
// goto bottom
goScrollBottom();
};
const devchatAsk = (userMessage, chatContexts) => {
self.messages.push(
Message.create({
type: 'user',
contexts: chatContexts,
message: userMessage
}));
self.messages.push(
Message.create({
type: 'bot',
message: '',
confirm: true
}));
// goto bottom
goScrollBottom();
};
const startGenerating = (text: string, chatContexts) => {
self.generating = true;
self.responsed = false;
self.hasDone = false;
self.errorMessage = '';
self.currentMessage = '';
messageUtil.sendMessage({
command: 'sendMessage',
text: text,
contextInfo: contextInfo(chatContexts),
parent_hash: lastNonEmptyHash()
});
};
const sendLastUserMessage = () => {
const lastUserMessage = self.messages[self.messages.length - 2];
const lastBotMessage = self.messages[self.messages.length - 1];
if (lastUserMessage && lastUserMessage.type === 'user') {
lastBotMessage.confirm = false;
startGenerating(lastUserMessage.message, lastUserMessage.contexts);
}
};
const cancelDevchatAsk = () => {
const lastBotMessage = self.messages[self.messages.length - 1];
if (lastBotMessage && lastBotMessage.type === 'bot') {
lastBotMessage.confirm = false;
lastBotMessage.message = 'You\'ve cancelled the question. Please let me know if you have any other questions or if there\'s anything else I can assist with.';
}
};
const commonMessage = (text: string, chatContexts) => {
self.messages.push({
type: 'user',
message: text,
contexts: chatContexts
});
self.messages.push({
type: 'bot',
message: ''
});
// start generating
startGenerating(text, chatContexts);
// goto bottom
goScrollBottom();
};
return {
helpMessage,
devchatAsk,
sendLastUserMessage,
cancelDevchatAsk,
goScrollBottom,
startGenerating,
commonMessage,
updateChatPanelWidth: (width: number) => {
self.chatPanelWidth = width;
},
@ -124,38 +226,6 @@ You can configure DevChat from [Settings](#settings).`;
updateFeatures: (features: any) => {
self.features = features;
},
startGenerating: (text: string, chatContexts) => {
self.generating = true;
self.responsed = false;
self.hasDone = false;
self.errorMessage = '';
self.currentMessage = '';
let lastNonEmptyHash;
for (let i = self.messages.length - 1; i >= 0; i--) {
if (self.messages[i].hash) {
lastNonEmptyHash = self.messages[i].hash;
break;
}
}
// Process and send the message to the extension
const contextInfo = chatContexts.map((item, index: number) => {
const { file, path, content, command } = item;
return {
file,
context: {
path: path,
command: command,
content: content,
}
};
});
messageUtil.sendMessage({
command: 'sendMessage',
text: text,
contextInfo: contextInfo,
parent_hash: lastNonEmptyHash === 'message' ? null : lastNonEmptyHash
});
},
startSystemMessage: () => {
self.generating = true;
self.responsed = false;
@ -229,9 +299,6 @@ You can configure DevChat from [Settings](#settings).`;
self.isTop = false;
self.isBottom = false;
},
goScrollBottom: () => {
self.scrollBottom++;
},
fetchHistoryMessages: flow(function* (params: { pageIndex: number }) {
const { pageIndex, entries } = yield fetchHistoryMessages(params);
if (entries.length > 0) {

View File

@ -1,210 +1,35 @@
import os
import re
import sys
import json
import tempfile
import uuid
from chat.ask_codebase.chains.smart_qa import SmartQA
from chat.ask_codebase.store.qdrant import QdrantWrapper as Q, get_client
from chat.ask_codebase.indexing.embedding import EmbeddingWrapper as E
def query(question, lsp_brige_port):
root_path = os.getcwd()
from langchain.embeddings import HuggingFaceEmbeddings
from chat.ask_codebase.indexing.loader.file import (
FileLoader,
FileSource,
gen_local_reference_maker,
)
from chat.util.misc import is_source_code
from chat.ask_codebase.chains.simple_qa import SimpleQA
from chat.ask_codebase.chains.stuff_dc_qa import StuffDocumentCodeQa
# Create an instance of SmartQA
smart_qa = SmartQA(root_path)
# Use SmartQA to get the answer
answer = smart_qa.run(question=question, verbose=False, dfs_depth=3, dfs_max_visit=10, bridge_url=f'http://localhost:{lsp_brige_port}' )
def get_app_data_dir(app_name):
home = os.path.expanduser("~")
if os.name == "nt": # For Windows
appPath = os.path.join(home, "AppData", "Roaming", app_name)
else: # For Unix and Linux
appPath = os.path.join(home, ".local", "share", app_name)
if not os.path.exists(appPath):
os.makedirs(appPath)
return appPath
supportedFileTypes = []
STORAGE_FILE = os.path.join(get_app_data_dir("devchat"), "qdrant_storage2")
SOURCE_NAME = ""
# 为已经分析的文件记录最后修改时间
g_file_last_modified_saved = {}
def load_file_last_modified(filePath: str):
# filePath表示存储了文件最后修改时间的文件名内容实用JSON存储
# 如果文件不存在,表示尚未进行分析,结束函数执行
if not os.path.exists(filePath):
return {}
# 如果文件存在,读取文件内容,解析文件中记录的每一个文件的最后修改时间
with open(filePath, 'r', encoding="utf-8") as f:
fileLastModified = json.load(f)
return fileLastModified
def save_file_last_modified(filePath: str, fileLastModified: dict):
# filePath表示存储了文件最后修改时间的文件名内容实用JSON存储
with open(filePath, 'w+', encoding="utf-8") as f:
json.dump(fileLastModified, f)
return fileLastModified
def is_source_code_new(filePath: str):
# 使用正则表达式来判断一个文件是否是源码文件
for pattern in supportedFileTypes:
if re.match(pattern.strip(), filePath):
return True
return False
def is_file_modified(filePath: str) -> bool:
if not is_source_code_new(filePath):
return False
# 获取当前路径
currentPath = os.getcwd()
# 将filePath转换为相对路径
relativePath = os.path.relpath(filePath, currentPath)
# 检查文件路径中是否包含'.xxx'形式的目录
for part in relativePath.split(os.sep):
if part.startswith('.') or part in ["node_modules", "__pycache__"]:
return False
# 获取文件上次分析时记录的最后修改时间
fileLastModified = g_file_last_modified_saved.get(relativePath, 0)
# 获取文件当前的最后修改时间
fileCurrentModified = os.path.getmtime(filePath)
# 如果最后修改时间不同那么更新记录的最后修改时间并返回True
if fileLastModified != fileCurrentModified:
g_file_last_modified_saved[relativePath] = fileCurrentModified
return True
return False
def index(repo_path: str):
try:
client = get_client(STORAGE_FILE)
source = FileSource(
path=repo_path,
rel_root=repo_path,
ref_maker=gen_local_reference_maker(repo_path),
file_filter=is_file_modified,
)
loader = FileLoader(sources=[source])
documents = loader.load()
e = E(embedding=HuggingFaceEmbeddings())
data = e.embed(documents)
q = Q.create(
source_name=SOURCE_NAME,
embedding_cls=HuggingFaceEmbeddings,
client=client,
)
q.insert(data)
except Exception as e:
print(e)
sys.exit(1)
import json
def query(question: str, doc_context: str):
try:
client = get_client(mode=STORAGE_FILE)
q = Q.reuse(
source_name=SOURCE_NAME,
embedding_cls=HuggingFaceEmbeddings,
client=client,
)
chain = StuffDocumentCodeQa(q)
ans, docs = chain.run(question)
print(f"\n# Question: \n{question}")
print(f"\n# Answer: \n{ans}")
print(f"\n# Relevant Documents: \n")
doc_dict = {"path": "AskCode Context","content": json.dumps([{"filepath": d.metadata.get('filepath'), "content": d.page_content} for d in docs])}
with open(doc_context, 'w') as f:
json.dump(doc_dict, f)
for d in docs:
print(f"- filepath: {d.metadata.get('filepath')}")
print(f" location: {d.metadata.get('reference')}\n")
print(f"Save doc context to {doc_context}")
except Exception as e:
print(e)
sys.exit(1)
# Print the answer
print(answer[0])
def main():
try:
global supportedFileTypes
if len(sys.argv) < 2:
print("Usage: python index_and_query.py [command] [args]")
print("Available commands: index, query")
sys.exit(1)
command = sys.argv[1]
if command == "index":
if len(sys.argv) < 4:
print("Usage: python index_and_query.py index [repo_path] [supportedFileTypes]")
sys.exit(1)
repo_path = sys.argv[2]
# 获取supportedFileTypes的值
supportedFileTypes = sys.argv[3].split(',')
index(repo_path)
elif command == "query":
if len(sys.argv) < 4:
print("Usage: python index_and_query.py query [question] [doc_context]")
print("Usage: python index_and_query.py query [question] [port]")
sys.exit(1)
question = sys.argv[2]
doc_context = sys.argv[3]
query(question, doc_context)
else:
print("Invalid command. Available commands: index, query")
sys.exit(1)
port = sys.argv[3]
query(question, port)
sys.exit(0)
except Exception as e:
print(e)
sys.exit(1)
if __name__ == "__main__":
try:
currentPath = os.getcwd()
g_file_last_modified_saved = load_file_last_modified('./.chat/.index_modified.json')
if os.path.exists(".chat/askcode.json"):
with open(".chat/askcode.json", "r") as f:
askcode_data = json.load(f)
SOURCE_NAME = askcode_data.get("SOURCE_NAME", str(uuid.uuid4()))
else:
SOURCE_NAME = str(uuid.uuid4())
currentPath = os.getcwd()
with open(".chat/askcode.json", "w+") as f:
json.dump({"SOURCE_NAME": SOURCE_NAME}, f)
main()
save_file_last_modified('./.chat/.index_modified.json', g_file_last_modified_saved)
sys.exit(0)
except Exception as e:
print(e)
sys.exit(1)