diff --git a/gui b/gui index 2af2f07..96f6f79 160000 --- a/gui +++ b/gui @@ -1 +1 @@ -Subproject commit 2af2f079c1d719c1ff0463ce89ea84397860e2ab +Subproject commit 96f6f79d3c41d2920adcd5bafab06aa95c93de99 diff --git a/package.json b/package.json index 6cf2f16..69b9164 100644 --- a/package.json +++ b/package.json @@ -1,840 +1,858 @@ { - "name": "devchat", - "displayName": "DevChat", - "description": "Write prompts, not code", - "version": "0.1.65", - "icon": "assets/devchat.png", - "publisher": "merico", - "engines": { - "vscode": "^1.75.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/devchat-ai/devchat-vscode.git" - }, - "categories": [ - "Programming Languages", - "Snippets", - "Machine Learning", - "Education" - ], - "keywords": [ - "ai", - "anthropic", - "assistant", - "autocomplete", - "bot", - "chat", - "chatbot", - "codegen", - "c#", - "c++", - "codex", - "co-pilot", - "devchat", - "documentation", - "go", - "golang", - "intellisense", - "java", - "javascript", - "keybindings", - "kotlin", - "llm", - "model", - "openai", - "php", - "pilot", - "python", - "refactor", - "ruby", - "sourcegraph", - "snippets", - "test", - "typescript" - ], - "activationEvents": [ - "*" - ], - "main": "./dist/extension.js", - "files": [ - "dist/*", - "bin/*", - "assets/*", - "tools/*", - "workflows/*", - "workflowsCommands/*", - "LICENSE", - "README.md" - ], - "contributes": { - "configuration": { - "title": "DevChat", - "properties": { - "devchat.Provider.devchat": { - "type": "object", - "properties": { - "access_key": { - "type": "string", - "default": "", - "description": "[required*] Specify access key for selected provider." - }, - "api_base": { - "type": "string", - "default": "", - "description": "[optional*] Specify the api base for selected provider. Leave it blank if you want to use default api base." - } - }, - "required": [ - "access_key" - ], - "additionalProperties": false, - "order": 0, - "markdownDescription": "Specify the properties for devchat provider." - }, - "devchat.Provider.openai": { - "type": "object", - "properties": { - "access_key": { - "type": "string", - "default": "", - "description": "[required*] Specify access key for selected provider." - }, - "api_base": { - "type": "string", - "default": "", - "description": "[optional*] Specify the api base for selected provider. Leave it blank if you want to use default api base." - } - }, - "required": [ - "access_key" - ], - "additionalProperties": false, - "order": 1, - "markdownDescription": "Specify the properties for openai provider." - }, - "devchat.Model.gpt-3-5": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat", - "openai" - ], - "description": "[required*] Specify which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.3, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 1000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 13000, - "description": "[optional*] Maximum text length for input to AI." - }, - "presence_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." - }, - "frequency_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." - } - }, - "required": [ - "provider", - "key" - ], - "additionalProperties": false, - "order": 2, - "markdownDescription": "Specify the properties for gpt-3.5-turbo model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" - }, - "devchat.Model.gpt-4": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat", - "openai" - ], - "description": "[required*] Specify which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.3, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 1000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 6000, - "description": "[optional*] Maximum text length for input to AI." - }, - "presence_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." - }, - "frequency_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." - } - }, - "additionalProperties": false, - "order": 5, - "markdownDescription": "properties for gpt-4 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" - }, - "devchat.Model.gpt-4-turbo": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat", - "openai" - ], - "description": "[required*] Specify which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.3, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 1000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 32000, - "description": "[optional*] Maximum text length for input to AI." - }, - "presence_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." - }, - "frequency_penalty": { - "type": "number", - "default": 0, - "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." - } - }, - "additionalProperties": false, - "order": 6, - "markdownDescription": "properties for gpt-4-turbo model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" - }, - "devchat.Model.claude-2": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.3, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 1000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 32000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 7, - "markdownDescription": "properties for claude-2 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.xinghuo-2": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 2048, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 6000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 8, - "markdownDescription": "properties for xinghuo-2 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.chatglm_pro": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 8000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 9, - "markdownDescription": "properties for chatglm_pro model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.ERNIE-Bot": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 8000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 10, - "markdownDescription": "properties for ERNIE-Bot model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.CodeLlama-70b": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 2000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 11, - "markdownDescription": "properties for togetherai/codellama/CodeLlama-70b-Instruct-hf. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.Mixtral-8x7B": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 2000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 11, - "markdownDescription": "properties for togetherai/mistralai/Mixtral-8x7B-Instruct-v0.1. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.llama-2-70b-chat": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 2000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 12, - "markdownDescription": "properties for llama-2-70b-chat. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.Model.Minimax-abab6": { - "type": "object", - "properties": { - "provider": { - "type": "string", - "default": "devchat", - "enum": [ - "devchat" - ], - "description": "[required*] which provider host this llm model" - }, - "temperature": { - "type": "number", - "default": 0.5, - "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." - }, - "max_tokens": { - "type": "number", - "default": 2000, - "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." - }, - "max_input_tokens": { - "type": "number", - "default": 4000, - "description": "[optional*] Maximum text length for input to AI." - } - }, - "additionalProperties": false, - "order": 12, - "markdownDescription": "properties for minimax/abab6-chat. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " - }, - "devchat.defaultModel": { - "oneOf": [ - { - "type": "string", - "default": "gpt-3.5-turbo", - "enum": [ - "gpt-3.5-turbo", - "gpt-4", - "gpt-4-turbo-preview", - "claude-2.1", - "xinghuo-3.5", - "GLM-4", - "ERNIE-Bot-4.0", - "togetherai/codellama/CodeLlama-70b-Instruct-hf", - "togetherai/mistralai/Mixtral-8x7B-Instruct-v0.1", - "minimax/abab6-chat", - "llama-2-70b-chat" - ] - }, - { - "type": "string", - "default": "gpt-3.5-turbo" - } - ], - "order": 13, - "markdownDescription": "Specify the default llm model for DevChat. [Price of each model](https://web.devchat.ai/pricing)" - }, - "DevChat.OpenAI.stream": { - "type": "boolean", - "default": true, - "order": 14, - "description": "Whether to stream a response." - }, - "DevChat.EnableFunctionCalling": { - "type": "boolean", - "default": false, - "order": 15, - "description": "Enable function calling for GPT." - }, - "DevChat.betaInvitationCode": { - "type": "string", - "default": "", - "order": 17, - "description": "The invitation code for beta testing." - }, - "DevChat.maxLogCount": { - "type": "number", - "default": 20, - "order": 18, - "description": "Limit the number of prompts in the chat view." - }, - "DevChat.PythonForChat": { - "type": "string", - "default": "", - "input": { - "type": "file", - "filter": { - "All files": [ - "python*" - ] - } - }, - "description": "Which Python interpreter to use with DevChat?", - "order": 19 - }, - "DevChat.PythonForCommands": { - "type": "string", - "default": "", - "description": "Path to the Python virtual environment for AskCode.", - "order": 20 - }, - "DevChat.Language": { - "type": "string", - "default": "", - "enum": [ - "en", - "zh" - ], - "enumDescriptions": [ - "English", - "Simplified Chinese" - ], - "description": "The language used for DevChat interface.", - "order": 21 - } - } - }, - "viewsContainers": { - "activitybar": [ - { - "id": "devchat-sidebar", - "title": "DevChat", - "icon": "assets/devchat_icon.svg" - } - ] - }, - "views": { - "devchat-sidebar": [ - { - "type": "webview", - "id": "devchat-view", - "name": "DevChat" - } - ] - }, - "commands": [ - { - "command": "devchat.applyDiffResult", - "title": "Apply Diff", - "icon": "assets/devchat_apply.svg" - }, - { - "command": "devchat.createEntry", - "title": "Create Entry" - }, - { - "command": "DevChat.AccessKey.OpenAI", - "title": "Input OpenAI API Key", - "category": "DevChat" - }, - { - "command": "DevChat.AccessKey.DevChat", - "title": "Input DevChat Access Key", - "category": "DevChat" - }, - { - "command": "DevChat.PythonPath", - "title": "Set Python Path", - "category": "DevChat" - }, - { - "command": "devchat.openChatPanel", - "title": "DevChat" - }, - { - "command": "devchat.addContext", - "title": "Add to DevChat" - }, - { - "command": "devchat.askForCode", - "title": "Add to DevChat" - }, - { - "command": "devchat.askForFile", - "title": "Add to DevChat" - }, - { - "command": "devchat.addConext_chinese", - "title": "添加到DevChat" - }, - { - "command": "devchat.askForCode_chinese", - "title": "添加到DevChat" - }, - { - "command": "devchat.askForFile_chinese", - "title": "添加到DevChat" - }, - { - "command": "DevChat.InstallCommands", - "title": "Install slash commands", - "category": "DevChat" - }, - { - "command": "DevChat.UpdataChatModels", - "title": "Update Chat Models", - "category": "DevChat" - }, - { - "command": "DevChat.InstallCommandPython", - "title": "Install Python for Commands", - "category": "DevChat" - }, - { - "command": "DevChat.Chat", - "title": "Chat with DevChat", - "category": "DevChat" - } - ], - "keybindings": [ - { - "command": "devchat.openChatPanel", - "key": "ctrl+shift+/", - "mac": "cmd+shift+/" - } - ], - "menus": { - "editor/title": [ - { - "command": "devchat.applyDiffResult", - "group": "navigation", - "when": "editorTextFocus && isInDiffEditor" - } - ], - "commandPalette": [ - { - "command": "devchat.applyDiffResult", - "when": "false" - }, - { - "command": "devchat.addContext", - "when": "false" - }, - { - "command": "devchat.askForCode", - "when": "false" - }, - { - "command": "devchat.askForFile", - "when": "false" - }, - { - "command": "devchat.addConext_chinese", - "when": "false" - }, - { - "command": "devchat.askForCode_chinese", - "when": "false" - }, - { - "command": "devchat.askForFile_chinese", - "when": "false" - }, - { - "command": "DevChat.Chat", - "when": "false" - } - ], - "explorer/context": [ - { - "when": "isChineseLocale && resourceLangId != 'git'", - "command": "devchat.addConext_chinese", - "group": "navigation" - }, - { - "when": "!isChineseLocale && resourceLangId != 'git'", - "command": "devchat.addContext", - "group": "navigation" - } - ], - "editor/context": [ - { - "command": "devchat.askForCode_chinese", - "when": "isChineseLocale && editorTextFocus && editorHasSelection", - "group": "navigation" - }, - { - "command": "devchat.askForCode", - "when": "!isChineseLocale && editorTextFocus && editorHasSelection", - "group": "navigation" - }, - { - "command": "devchat.askForFile", - "when": "!isChineseLocale && editorTextFocus && !editorHasSelection", - "group": "navigation" - }, - { - "command": "devchat.askForFile_chinese", - "when": "isChineseLocale && editorTextFocus && !editorHasSelection", - "group": "navigation" - } - ] - } - }, - "scripts": { - "build:gui": "cd ./gui && yarn && yarn vscode", - "vscode:uninstall": "node ./dist/uninstall", - "vscode:prepublish": "npm run package", - "compile": "webpack", - "watch": "webpack --watch", - "package": "webpack --mode production --devtool hidden-source-map", - "compile-tests": "tsc -p . --outDir out", - "watch-tests": "tsc -p . -w --outDir out", - "pretest": "npm run compile-tests && npm run compile && npm run lint", - "lint": "eslint src --ext ts", - "test": "mocha", - "build": "webpack --config webpack.config.js && cd ./gui && yarn && yarn vscode", - "dev": "webpack serve --config webpack.config.js --open", - "idea": "webpack --config webpack.idea.config.js && mv dist/main.js dist/main.html ../devchat-intellij/src/main/resources/static && echo '🎆done'" - }, - "devDependencies": { - "@babel/core": "^7.21.8", - "@babel/preset-env": "^7.23.6", - "@babel/preset-react": "^7.23.3", - "@babel/preset-typescript": "^7.21.5", - "@types/chai": "^4.3.5", - "@types/glob": "^8.1.0", - "@types/mocha": "^10.0.1", - "@types/mock-fs": "^4.13.1", - "@types/ncp": "^2.0.5", - "@types/node": "16.x", - "@types/proxyquire": "^1.3.28", - "@types/shell-escape": "^0.2.1", - "@types/sinon": "^10.0.15", - "@types/uuid": "^9.0.1", - "@types/vscode": "^1.75.0", - "@typescript-eslint/eslint-plugin": "^5.56.0", - "@typescript-eslint/parser": "^5.56.0", - "@vscode/test-electron": "^2.3.0", - "babel-loader": "^9.1.2", - "chai": "^4.3.7", - "copy-webpack-plugin": "^11.0.0", - "dotenv": "^16.0.3", - "eslint": "^8.36.0", - "file-loader": "^6.2.0", - "glob": "^8.1.0", - "html-webpack-plugin": "^5.5.1", - "jest": "^29.5.0", - "json-loader": "^0.5.7", - "mocha": "^10.2.0", - "mock-fs": "^5.2.0", - "proxyquire": "^2.1.3", - "sinon": "^15.1.0", - "ts-jest": "^29.1.0", - "ts-loader": "^9.4.2", - "ts-node": "^10.9.1", - "typescript": "^4.9.5", - "vscode-test": "^1.6.1", - "webpack": "^5.76.3", - "webpack-cli": "^5.0.1", - "webpack-dev-server": "^4.13.3" - }, - "dependencies": { - "@tiptap/extension-link": "^2.0.3", - "@tiptap/pm": "^2.0.0", - "@tiptap/starter-kit": "^2.0.3", - "axios": "^1.3.6", - "clean-webpack-plugin": "^4.0.0", - "dayjs": "^1.11.10", - "dotenv": "^16.0.3", - "js-yaml": "^4.1.0", - "mdast": "^3.0.0", - "mobx": "^6.12.0", - "ncp": "^2.0.0", - "node-fetch": "^3.3.1", - "nonce": "^1.0.4", - "quote": "^0.4.0", - "rehype-raw": "^6.1.1", - "shell-escape": "^0.2.0", - "string-argv": "^0.3.2", - "tree-kill": "^1.2.2", - "unified": "^11.0.3", - "unist-util-visit": "^5.0.0", - "uuid": "^9.0.0", - "yaml": "^2.3.2" - } -} \ No newline at end of file + "name": "devchat", + "displayName": "DevChat", + "description": "Write prompts, not code", + "version": "0.1.65", + "icon": "assets/devchat.png", + "publisher": "merico", + "engines": { + "vscode": "^1.75.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/devchat-ai/devchat-vscode.git" + }, + "categories": [ + "Programming Languages", + "Snippets", + "Machine Learning", + "Education" + ], + "keywords": [ + "ai", + "anthropic", + "assistant", + "autocomplete", + "bot", + "chat", + "chatbot", + "codegen", + "c#", + "c++", + "codex", + "co-pilot", + "devchat", + "documentation", + "go", + "golang", + "intellisense", + "java", + "javascript", + "keybindings", + "kotlin", + "llm", + "model", + "openai", + "php", + "pilot", + "python", + "refactor", + "ruby", + "sourcegraph", + "snippets", + "test", + "typescript" + ], + "activationEvents": [ + "*" + ], + "main": "./dist/extension.js", + "files": [ + "dist/*", + "bin/*", + "assets/*", + "tools/*", + "workflows/*", + "workflowsCommands/*", + "LICENSE", + "README.md" + ], + "contributes": { + "configuration": { + "title": "DevChat", + "properties": { + "devchat.Provider.devchat": { + "type": "object", + "properties": { + "access_key": { + "type": "string", + "default": "", + "description": "[required*] Specify access key for selected provider." + }, + "api_base": { + "type": "string", + "default": "", + "description": "[optional*] Specify the api base for selected provider. Leave it blank if you want to use default api base." + } + }, + "required": [ + "access_key" + ], + "additionalProperties": false, + "order": 0, + "markdownDescription": "Specify the properties for devchat provider." + }, + "devchat.Provider.openai": { + "type": "object", + "properties": { + "access_key": { + "type": "string", + "default": "", + "description": "[required*] Specify access key for selected provider." + }, + "api_base": { + "type": "string", + "default": "", + "description": "[optional*] Specify the api base for selected provider. Leave it blank if you want to use default api base." + } + }, + "required": [ + "access_key" + ], + "additionalProperties": false, + "order": 1, + "markdownDescription": "Specify the properties for openai provider." + }, + "devchat.Model.gpt-3-5": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat", + "openai" + ], + "description": "[required*] Specify which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.3, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 1000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 13000, + "description": "[optional*] Maximum text length for input to AI." + }, + "presence_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." + }, + "frequency_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." + } + }, + "required": [ + "provider", + "key" + ], + "additionalProperties": false, + "order": 2, + "markdownDescription": "Specify the properties for gpt-3.5-turbo model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" + }, + "devchat.Model.gpt-4": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat", + "openai" + ], + "description": "[required*] Specify which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.3, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 1000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 6000, + "description": "[optional*] Maximum text length for input to AI." + }, + "presence_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." + }, + "frequency_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." + } + }, + "additionalProperties": false, + "order": 5, + "markdownDescription": "properties for gpt-4 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" + }, + "devchat.Model.gpt-4-turbo": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat", + "openai" + ], + "description": "[required*] Specify which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.3, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 1000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 32000, + "description": "[optional*] Maximum text length for input to AI." + }, + "presence_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics." + }, + "frequency_penalty": { + "type": "number", + "default": 0, + "description": "[optional*] Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." + } + }, + "additionalProperties": false, + "order": 6, + "markdownDescription": "properties for gpt-4-turbo model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature)" + }, + "devchat.Model.claude-2": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.3, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 1000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 32000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 7, + "markdownDescription": "properties for claude-2 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.xinghuo-2": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 2048, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 6000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 8, + "markdownDescription": "properties for xinghuo-2 model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.chatglm_pro": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 8000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 9, + "markdownDescription": "properties for chatglm_pro model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.ERNIE-Bot": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 8000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 10, + "markdownDescription": "properties for ERNIE-Bot model. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.CodeLlama-70b": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 2000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 11, + "markdownDescription": "properties for togetherai/codellama/CodeLlama-70b-Instruct-hf. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.Mixtral-8x7B": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 2000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 11, + "markdownDescription": "properties for togetherai/mistralai/Mixtral-8x7B-Instruct-v0.1. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.llama-2-70b-chat": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 2000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 12, + "markdownDescription": "properties for llama-2-70b-chat. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.Model.Minimax-abab6": { + "type": "object", + "properties": { + "provider": { + "type": "string", + "default": "devchat", + "enum": [ + "devchat" + ], + "description": "[required*] which provider host this llm model" + }, + "temperature": { + "type": "number", + "default": 0.5, + "description": "[optional*] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic." + }, + "max_tokens": { + "type": "number", + "default": 2000, + "description": "[optional*] The maximum number of tokens to generate in the chat completion.\nThe total length of input tokens and generated tokens is limited by the model's context length. Example Python code for counting tokens." + }, + "max_input_tokens": { + "type": "number", + "default": 4000, + "description": "[optional*] Maximum text length for input to AI." + } + }, + "additionalProperties": false, + "order": 12, + "markdownDescription": "properties for minimax/abab6-chat. Leave it blank if you won't use this llm model. [how to set?](https://platform.openai.com/docs/api-reference/chat/create#temperature) " + }, + "devchat.defaultModel": { + "oneOf": [ + { + "type": "string", + "default": "gpt-3.5-turbo", + "enum": [ + "gpt-3.5-turbo", + "gpt-4", + "gpt-4-turbo-preview", + "claude-2.1", + "xinghuo-3.5", + "GLM-4", + "ERNIE-Bot-4.0", + "togetherai/codellama/CodeLlama-70b-Instruct-hf", + "togetherai/mistralai/Mixtral-8x7B-Instruct-v0.1", + "minimax/abab6-chat", + "llama-2-70b-chat" + ] + }, + { + "type": "string", + "default": "gpt-3.5-turbo" + } + ], + "order": 13, + "markdownDescription": "Specify the default llm model for DevChat. [Price of each model](https://web.devchat.ai/pricing)" + }, + "DevChat.OpenAI.stream": { + "type": "boolean", + "default": true, + "order": 14, + "description": "Whether to stream a response." + }, + "DevChat.EnableFunctionCalling": { + "type": "boolean", + "default": false, + "order": 15, + "description": "Enable function calling for GPT." + }, + "DevChat.betaInvitationCode": { + "type": "string", + "default": "", + "order": 17, + "description": "The invitation code for beta testing." + }, + "DevChat.maxLogCount": { + "type": "number", + "default": 20, + "order": 18, + "description": "Limit the number of prompts in the chat view." + }, + "DevChat.PythonForChat": { + "type": "string", + "default": "", + "input": { + "type": "file", + "filter": { + "All files": [ + "python*" + ] + } + }, + "description": "Which Python interpreter to use with DevChat?", + "order": 19 + }, + "DevChat.PythonForCommands": { + "type": "string", + "default": "", + "description": "Path to the Python virtual environment for AskCode.", + "order": 20 + }, + "DevChat.Language": { + "type": "string", + "default": "", + "enum": [ + "en", + "zh" + ], + "enumDescriptions": [ + "English", + "Simplified Chinese" + ], + "description": "The language used for DevChat interface.", + "order": 21 + } + } + }, + "viewsContainers": { + "activitybar": [ + { + "id": "devchat-sidebar", + "title": "DevChat", + "icon": "assets/devchat_icon.svg" + } + ] + }, + "views": { + "devchat-sidebar": [ + { + "type": "webview", + "id": "devchat-view", + "name": "DevChat" + } + ] + }, + "commands": [ + { + "command": "devchat.applyDiffResult", + "title": "Apply Diff", + "icon": "assets/devchat_apply.svg" + }, + { + "command": "devchat.createEntry", + "title": "Create Entry" + }, + { + "command": "DevChat.AccessKey.OpenAI", + "title": "Input OpenAI API Key", + "category": "DevChat" + }, + { + "command": "DevChat.AccessKey.DevChat", + "title": "Input DevChat Access Key", + "category": "DevChat" + }, + { + "command": "DevChat.PythonPath", + "title": "Set Python Path", + "category": "DevChat" + }, + { + "command": "devchat.openChatPanel", + "title": "DevChat" + }, + { + "command": "devchat.addContext", + "title": "Add to DevChat" + }, + { + "command": "devchat.askForCode", + "title": "Add to DevChat" + }, + { + "command": "devchat.askForFile", + "title": "Add to DevChat" + }, + { + "command": "devchat.addConext_chinese", + "title": "添加到DevChat" + }, + { + "command": "devchat.askForCode_chinese", + "title": "添加到DevChat" + }, + { + "command": "devchat.askForFile_chinese", + "title": "添加到DevChat" + }, + { + "command": "DevChat.InstallCommands", + "title": "Install slash commands", + "category": "DevChat" + }, + { + "command": "DevChat.UpdataChatModels", + "title": "Update Chat Models", + "category": "DevChat" + }, + { + "command": "DevChat.InstallCommandPython", + "title": "Install Python for Commands", + "category": "DevChat" + }, + { + "command": "DevChat.Chat", + "title": "Chat with DevChat", + "category": "DevChat" + }, + { + "command": "devchat.explain", + "title": "Explain" + }, + { + "command": "devchat.explain_chinese", + "title": "代码解释" + } + ], + "keybindings": [ + { + "command": "devchat.openChatPanel", + "key": "ctrl+shift+/", + "mac": "cmd+shift+/" + } + ], + "menus": { + "editor/title": [ + { + "command": "devchat.applyDiffResult", + "group": "navigation", + "when": "editorTextFocus && isInDiffEditor" + } + ], + "commandPalette": [ + { + "command": "devchat.applyDiffResult", + "when": "false" + }, + { + "command": "devchat.addContext", + "when": "false" + }, + { + "command": "devchat.askForCode", + "when": "false" + }, + { + "command": "devchat.askForFile", + "when": "false" + }, + { + "command": "devchat.addConext_chinese", + "when": "false" + }, + { + "command": "devchat.askForCode_chinese", + "when": "false" + }, + { + "command": "devchat.askForFile_chinese", + "when": "false" + }, + { + "command": "DevChat.Chat", + "when": "false" + } + ], + "explorer/context": [ + { + "when": "isChineseLocale && resourceLangId != 'git'", + "command": "devchat.addConext_chinese", + "group": "navigation" + }, + { + "when": "!isChineseLocale && resourceLangId != 'git'", + "command": "devchat.addContext", + "group": "navigation" + } + ], + "editor/context": [ + { + "command": "devchat.askForCode_chinese", + "when": "isChineseLocale && editorTextFocus && editorHasSelection", + "group": "navigation" + }, + { + "command": "devchat.askForCode", + "when": "!isChineseLocale && editorTextFocus && editorHasSelection", + "group": "navigation" + }, + { + "command": "devchat.askForFile", + "when": "!isChineseLocale && editorTextFocus && !editorHasSelection", + "group": "navigation" + }, + { + "command": "devchat.askForFile_chinese", + "when": "isChineseLocale && editorTextFocus && !editorHasSelection", + "group": "navigation" + }, + { + "command": "devchat.explain", + "when": "!isChineseLocale && editorTextFocus && editorHasSelection", + "group": "navigation" + }, + { + "command": "devchat.explain_chinese", + "when": "isChineseLocale && editorTextFocus && editorHasSelection", + "group": "navigation" + } + ] + } + }, + "scripts": { + "build:gui": "cd ./gui && yarn && yarn vscode", + "vscode:uninstall": "node ./dist/uninstall", + "vscode:prepublish": "npm run package", + "compile": "webpack", + "watch": "webpack --watch", + "package": "webpack --mode production --devtool hidden-source-map", + "compile-tests": "tsc -p . --outDir out", + "watch-tests": "tsc -p . -w --outDir out", + "pretest": "npm run compile-tests && npm run compile && npm run lint", + "lint": "eslint src --ext ts", + "test": "mocha", + "build": "webpack --config webpack.config.js && cd ./gui && yarn && yarn vscode", + "dev": "webpack serve --config webpack.config.js --open", + "idea": "webpack --config webpack.idea.config.js && mv dist/main.js dist/main.html ../devchat-intellij/src/main/resources/static && echo '🎆done'" + }, + "devDependencies": { + "@babel/core": "^7.21.8", + "@babel/preset-env": "^7.23.6", + "@babel/preset-react": "^7.23.3", + "@babel/preset-typescript": "^7.21.5", + "@types/chai": "^4.3.5", + "@types/glob": "^8.1.0", + "@types/mocha": "^10.0.1", + "@types/mock-fs": "^4.13.1", + "@types/ncp": "^2.0.5", + "@types/node": "16.x", + "@types/proxyquire": "^1.3.28", + "@types/shell-escape": "^0.2.1", + "@types/sinon": "^10.0.15", + "@types/uuid": "^9.0.1", + "@types/vscode": "^1.75.0", + "@typescript-eslint/eslint-plugin": "^5.56.0", + "@typescript-eslint/parser": "^5.56.0", + "@vscode/test-electron": "^2.3.0", + "babel-loader": "^9.1.2", + "chai": "^4.3.7", + "copy-webpack-plugin": "^11.0.0", + "dotenv": "^16.0.3", + "eslint": "^8.36.0", + "file-loader": "^6.2.0", + "glob": "^8.1.0", + "html-webpack-plugin": "^5.5.1", + "jest": "^29.5.0", + "json-loader": "^0.5.7", + "mocha": "^10.2.0", + "mock-fs": "^5.2.0", + "proxyquire": "^2.1.3", + "sinon": "^15.1.0", + "ts-jest": "^29.1.0", + "ts-loader": "^9.4.2", + "ts-node": "^10.9.1", + "typescript": "^4.9.5", + "vscode-test": "^1.6.1", + "webpack": "^5.76.3", + "webpack-cli": "^5.0.1", + "webpack-dev-server": "^4.13.3" + }, + "dependencies": { + "@tiptap/extension-link": "^2.0.3", + "@tiptap/pm": "^2.0.0", + "@tiptap/starter-kit": "^2.0.3", + "axios": "^1.3.6", + "clean-webpack-plugin": "^4.0.0", + "dayjs": "^1.11.10", + "dotenv": "^16.0.3", + "js-yaml": "^4.1.0", + "mdast": "^3.0.0", + "mobx": "^6.12.0", + "ncp": "^2.0.0", + "node-fetch": "^3.3.1", + "nonce": "^1.0.4", + "quote": "^0.4.0", + "rehype-raw": "^6.1.1", + "shell-escape": "^0.2.0", + "string-argv": "^0.3.2", + "tree-kill": "^1.2.2", + "unified": "^11.0.3", + "unist-util-visit": "^5.0.0", + "uuid": "^9.0.0", + "yaml": "^2.3.2" + } +} diff --git a/src/contributes/commands.ts b/src/contributes/commands.ts index 4de2f18..463f09a 100644 --- a/src/contributes/commands.ts +++ b/src/contributes/commands.ts @@ -1,23 +1,29 @@ -import * as vscode from 'vscode'; -import * as fs from 'fs'; -import * as os from 'os'; -import * as path from 'path'; -import * as util from 'util'; -import { sendFileSelectMessage, sendCodeSelectMessage } from './util'; -import { ExtensionContextHolder } from '../util/extensionContext'; -import { FilePairManager } from '../util/diffFilePairs'; -import { ApiKeyManager } from '../util/apiKey'; -import { UiUtilWrapper } from '../util/uiUtil'; -import { isValidApiKey } from '../handler/historyMessagesBase'; +import * as vscode from "vscode"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import * as util from "util"; +import { sendFileSelectMessage, sendCodeSelectMessage } from "./util"; +import { ExtensionContextHolder } from "../util/extensionContext"; +import { FilePairManager } from "../util/diffFilePairs"; +import { ApiKeyManager } from "../util/apiKey"; +import { UiUtilWrapper } from "../util/uiUtil"; +import { isValidApiKey } from "../handler/historyMessagesBase"; -import { logger } from '../util/logger'; +import { logger } from "../util/logger"; -import { sendCommandListByDevChatRun, updateChatModels } from '../handler/workflowCommandHandler'; +import { + sendCommandListByDevChatRun, + updateChatModels, +} from "../handler/workflowCommandHandler"; import DevChat from "../toolwrapper/devchat"; -import { createEnvByConda, createEnvByMamba } from '../util/python_installer/app_install'; -import { installRequirements } from '../util/python_installer/package_install'; -import { chatWithDevChat } from '../handler/chatHandler'; -import { focusDevChatInput } from '../handler/focusHandler'; +import { + createEnvByConda, + createEnvByMamba, +} from "../util/python_installer/app_install"; +import { installRequirements } from "../util/python_installer/package_install"; +import { chatWithDevChat } from "../handler/chatHandler"; +import { focusDevChatInput } from "../handler/focusHandler"; const readdir = util.promisify(fs.readdir); const stat = util.promisify(fs.stat); @@ -25,348 +31,529 @@ const mkdir = util.promisify(fs.mkdir); const copyFile = util.promisify(fs.copyFile); async function copyDirectory(src: string, dest: string): Promise { - await mkdir(dest, { recursive: true }); - const entries = await readdir(src, { withFileTypes: true }); + await mkdir(dest, { recursive: true }); + const entries = await readdir(src, { withFileTypes: true }); - for (let entry of entries) { - const srcPath = path.join(src, entry.name); - const destPath = path.join(dest, entry.name); + for (let entry of entries) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); - if (entry.isDirectory()) { - await copyDirectory(srcPath, destPath); - } else { - await copyFile(srcPath, destPath); - } + if (entry.isDirectory()) { + await copyDirectory(srcPath, destPath); + } else { + await copyFile(srcPath, destPath); } + } } function registerOpenChatPanelCommand(context: vscode.ExtensionContext) { - let disposable = vscode.commands.registerCommand('devchat.openChatPanel', async () => { - await vscode.commands.executeCommand('devchat-view.focus'); - await focusDevChatInput(ExtensionContextHolder.provider?.view()!); - }); - context.subscriptions.push(disposable); + let disposable = vscode.commands.registerCommand( + "devchat.openChatPanel", + async () => { + await vscode.commands.executeCommand("devchat-view.focus"); + await focusDevChatInput(ExtensionContextHolder.provider?.view()!); + } + ); + context.subscriptions.push(disposable); } -async function ensureChatPanel(context: vscode.ExtensionContext): Promise { - await vscode.commands.executeCommand('devchat-view.focus'); - return true; +async function ensureChatPanel( + context: vscode.ExtensionContext +): Promise { + await vscode.commands.executeCommand("devchat-view.focus"); + return true; } function registerAddContextCommand(context: vscode.ExtensionContext) { - const callback = async (uri: { fsPath: any; }) => { - if (!await ensureChatPanel(context)) { - return; - } + const callback = async (uri: { fsPath: any }) => { + if (!(await ensureChatPanel(context))) { + return; + } - await sendFileSelectMessage(ExtensionContextHolder.provider?.view()!, uri.fsPath); - }; - context.subscriptions.push(vscode.commands.registerCommand('devchat.addContext', callback)); - context.subscriptions.push(vscode.commands.registerCommand('devchat.addConext_chinese', callback)); + await sendFileSelectMessage( + ExtensionContextHolder.provider?.view()!, + uri.fsPath + ); + }; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.addContext", callback) + ); + context.subscriptions.push( + vscode.commands.registerCommand("devchat.addConext_chinese", callback) + ); } function registerAskForCodeCommand(context: vscode.ExtensionContext) { - const callback = async () => { - const editor = vscode.window.activeTextEditor; - if (editor) { - if (!await ensureChatPanel(context)) { - return; - } + const callback = async () => { + const editor = vscode.window.activeTextEditor; + if (editor) { + if (!(await ensureChatPanel(context))) { + return; + } - const selectedText = editor.document.getText(editor.selection); - await sendCodeSelectMessage(ExtensionContextHolder.provider?.view()!, editor.document.fileName, selectedText, editor.selection.start.line); - } - }; - context.subscriptions.push(vscode.commands.registerCommand('devchat.askForCode', callback)); - context.subscriptions.push(vscode.commands.registerCommand('devchat.askForCode_chinese', callback)); + const selectedText = editor.document.getText(editor.selection); + await sendCodeSelectMessage( + ExtensionContextHolder.provider?.view()!, + editor.document.fileName, + selectedText, + editor.selection.start.line + ); + } + }; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.askForCode", callback) + ); + context.subscriptions.push( + vscode.commands.registerCommand("devchat.askForCode_chinese", callback) + ); } function registerAskForFileCommand(context: vscode.ExtensionContext) { - const callback = async () => { - const editor = vscode.window.activeTextEditor; - if (editor) { - if (!await ensureChatPanel(context)) { - return; - } + const callback = async () => { + const editor = vscode.window.activeTextEditor; + if (editor) { + if (!(await ensureChatPanel(context))) { + return; + } - await sendFileSelectMessage(ExtensionContextHolder.provider?.view()!, editor.document.fileName); - } - }; - context.subscriptions.push(vscode.commands.registerCommand('devchat.askForFile', callback)); - context.subscriptions.push(vscode.commands.registerCommand('devchat.askForFile_chinese', callback)); + await sendFileSelectMessage( + ExtensionContextHolder.provider?.view()!, + editor.document.fileName + ); + } + }; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.askForFile", callback) + ); + context.subscriptions.push( + vscode.commands.registerCommand("devchat.askForFile_chinese", callback) + ); } -function regAccessKeyCommand(context: vscode.ExtensionContext, provider: string) { - context.subscriptions.push( - vscode.commands.registerCommand(`DevChat.AccessKey.${provider}`, async () => { - vscode.commands.executeCommand("devchat-view.focus"); - const passwordInput: string | undefined = await vscode.window.showInputBox({ - password: true, - title: `Set ${provider} Key`, - placeHolder: `Input your ${provider} key. (Leave blank to clear the stored key.)` - }) ?? undefined; - if (passwordInput === undefined) { - return; - } - if (provider === "DevChat" && passwordInput.trim() !== "") { - if (!passwordInput.trim().startsWith("DC.")) { - UiUtilWrapper.showErrorMessage("Your key is invalid! DevChat Access Key is: DC.xxxxx"); - return; - } - } +function regAccessKeyCommand( + context: vscode.ExtensionContext, + provider: string +) { + context.subscriptions.push( + vscode.commands.registerCommand( + `DevChat.AccessKey.${provider}`, + async () => { + vscode.commands.executeCommand("devchat-view.focus"); + const passwordInput: string | undefined = + (await vscode.window.showInputBox({ + password: true, + title: `Set ${provider} Key`, + placeHolder: `Input your ${provider} key. (Leave blank to clear the stored key.)`, + })) ?? undefined; + if (passwordInput === undefined) { + return; + } + if (provider === "DevChat" && passwordInput.trim() !== "") { + if (!passwordInput.trim().startsWith("DC.")) { + UiUtilWrapper.showErrorMessage( + "Your key is invalid! DevChat Access Key is: DC.xxxxx" + ); + return; + } + } - if (passwordInput.trim() !== "" && !isValidApiKey(passwordInput)) { - UiUtilWrapper.showErrorMessage("Your key is invalid!"); - return ; - } - await ApiKeyManager.writeApiKeySecret(passwordInput, provider); + if (passwordInput.trim() !== "" && !isValidApiKey(passwordInput)) { + UiUtilWrapper.showErrorMessage("Your key is invalid!"); + return; + } + await ApiKeyManager.writeApiKeySecret(passwordInput, provider); - // update default model - const accessKey = await ApiKeyManager.getApiKey(); - if (!accessKey) { - const modelList = await ApiKeyManager.getValidModels(); - if (modelList && modelList.length > 0) { - // update default llm model - await UiUtilWrapper.updateConfiguration('devchat', 'defaultModel', modelList[0]); - } - } + // update default model + const accessKey = await ApiKeyManager.getApiKey(); + if (!accessKey) { + const modelList = await ApiKeyManager.getValidModels(); + if (modelList && modelList.length > 0) { + // update default llm model + await UiUtilWrapper.updateConfiguration( + "devchat", + "defaultModel", + modelList[0] + ); + } + } - // reload webview - ExtensionContextHolder.provider?.reloadWebview(); - }) - ); + // reload webview + ExtensionContextHolder.provider?.reloadWebview(); + } + ) + ); } -export function registerAccessKeySettingCommand(context: vscode.ExtensionContext) { - regAccessKeyCommand(context, "OpenAI"); - regAccessKeyCommand(context, "Cohere"); - regAccessKeyCommand(context, "Anthropic"); - regAccessKeyCommand(context, "Replicate"); - regAccessKeyCommand(context, "HuggingFace"); - regAccessKeyCommand(context, "TogetherAI"); - regAccessKeyCommand(context, "OpenRouter"); - regAccessKeyCommand(context, "VertexAI"); - regAccessKeyCommand(context, "AI21"); - regAccessKeyCommand(context, "BaseTen"); - regAccessKeyCommand(context, "Azure"); - regAccessKeyCommand(context, "SageMaker"); - regAccessKeyCommand(context, "Bedrock"); - regAccessKeyCommand(context, "DevChat"); +export function registerAccessKeySettingCommand( + context: vscode.ExtensionContext +) { + regAccessKeyCommand(context, "OpenAI"); + regAccessKeyCommand(context, "Cohere"); + regAccessKeyCommand(context, "Anthropic"); + regAccessKeyCommand(context, "Replicate"); + regAccessKeyCommand(context, "HuggingFace"); + regAccessKeyCommand(context, "TogetherAI"); + regAccessKeyCommand(context, "OpenRouter"); + regAccessKeyCommand(context, "VertexAI"); + regAccessKeyCommand(context, "AI21"); + regAccessKeyCommand(context, "BaseTen"); + regAccessKeyCommand(context, "Azure"); + regAccessKeyCommand(context, "SageMaker"); + regAccessKeyCommand(context, "Bedrock"); + regAccessKeyCommand(context, "DevChat"); } -export function registerStatusBarItemClickCommand(context: vscode.ExtensionContext) { - context.subscriptions.push( - vscode.commands.registerCommand('devcaht.onStatusBarClick', async () => { - await vscode.commands.executeCommand('devchat-view.focus'); - }) - ); +export function registerStatusBarItemClickCommand( + context: vscode.ExtensionContext +) { + context.subscriptions.push( + vscode.commands.registerCommand("devcaht.onStatusBarClick", async () => { + await vscode.commands.executeCommand("devchat-view.focus"); + }) + ); } export function regPythonPathCommand(context: vscode.ExtensionContext) { - context.subscriptions.push( - vscode.commands.registerCommand('devchat.PythonPath', async () => { - const pythonPath = await vscode.window.showInputBox({ - title: "Set Python Path", - placeHolder: "Set Python Path" - }) ?? ''; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.PythonPath", async () => { + const pythonPath = + (await vscode.window.showInputBox({ + title: "Set Python Path", + placeHolder: "Set Python Path", + })) ?? ""; - if (pythonPath) { - vscode.workspace.getConfiguration("DevChat").update("PythonForChat", pythonPath, vscode.ConfigurationTarget.Global); - } - }) - ); + if (pythonPath) { + vscode.workspace + .getConfiguration("DevChat") + .update( + "PythonForChat", + pythonPath, + vscode.ConfigurationTarget.Global + ); + } + }) + ); } export function regApplyDiffResultCommand(context: vscode.ExtensionContext) { - context.subscriptions.push( - vscode.commands.registerCommand('devchat.applyDiffResult', async () => { - const activeEditor = vscode.window.activeTextEditor; - const fileName = activeEditor!.document.fileName; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.applyDiffResult", async () => { + const activeEditor = vscode.window.activeTextEditor; + const fileName = activeEditor!.document.fileName; - const [leftUri, rightUri] = FilePairManager.getInstance().findPair(fileName) || [undefined, undefined]; - if (leftUri && rightUri) { - // 获取对比的两个文件 - const leftDoc = await vscode.workspace.openTextDocument(leftUri); - const rightDoc = await vscode.workspace.openTextDocument(rightUri); + const [leftUri, rightUri] = FilePairManager.getInstance().findPair( + fileName + ) || [undefined, undefined]; + if (leftUri && rightUri) { + // 获取对比的两个文件 + const leftDoc = await vscode.workspace.openTextDocument(leftUri); + const rightDoc = await vscode.workspace.openTextDocument(rightUri); - // close rightDoc - await vscode.commands.executeCommand('workbench.action.closeActiveEditor'); - // 将右边文档的内容替换到左边文档 - const leftEditor = await vscode.window.showTextDocument(leftDoc); - await leftEditor.edit(editBuilder => { - const fullRange = new vscode.Range(0, 0, leftDoc.lineCount, 0); - editBuilder.replace(fullRange, rightDoc.getText()); - }); + // close rightDoc + await vscode.commands.executeCommand( + "workbench.action.closeActiveEditor" + ); + // 将右边文档的内容替换到左边文档 + const leftEditor = await vscode.window.showTextDocument(leftDoc); + await leftEditor.edit((editBuilder) => { + const fullRange = new vscode.Range(0, 0, leftDoc.lineCount, 0); + editBuilder.replace(fullRange, rightDoc.getText()); + }); - // 保存左边文档 - await leftDoc.save(); - } else { - vscode.window.showErrorMessage('No file to apply diff result.'); - } - }) - ); + // 保存左边文档 + await leftDoc.save(); + } else { + vscode.window.showErrorMessage("No file to apply diff result."); + } + }) + ); } +export function registerInstallCommandsCommand( + context: vscode.ExtensionContext +) { + let disposable = vscode.commands.registerCommand( + "DevChat.InstallCommands", + async () => { + const homePath = process.env.HOME || process.env.USERPROFILE || ""; + const sysDirPath = path.join(homePath, ".chat", "workflows", "sys"); + const pluginDirPath = path.join( + UiUtilWrapper.extensionPath(), + "workflowsCommands" + ); // Adjust this path as needed -export function registerInstallCommandsCommand(context: vscode.ExtensionContext) { - let disposable = vscode.commands.registerCommand('DevChat.InstallCommands', async () => { - const homePath = process.env.HOME || process.env.USERPROFILE || ''; - const sysDirPath = path.join(homePath, '.chat', 'workflows', 'sys'); - const pluginDirPath = path.join(UiUtilWrapper.extensionPath(), 'workflowsCommands'); // Adjust this path as needed + const devchat = new DevChat(); - const devchat = new DevChat(); + if (!fs.existsSync(sysDirPath)) { + await copyDirectory(pluginDirPath, sysDirPath); + } - if (!fs.existsSync(sysDirPath)) { - await copyDirectory(pluginDirPath, sysDirPath); + // Check if ~/.chat/workflows/sys directory exists + if (!fs.existsSync(sysDirPath)) { + // Directory does not exist, wait for updateSysCommand to finish + await devchat.updateSysCommand(); + sendCommandListByDevChatRun(); + } else { + // Directory exists, execute sendCommandListByDevChatRun immediately + await sendCommandListByDevChatRun(); + + // Then asynchronously execute updateSysCommand + await devchat.updateSysCommand(); + await sendCommandListByDevChatRun(); + } + } + ); + + context.subscriptions.push(disposable); +} + +export function registerUpdateChatModelsCommand( + context: vscode.ExtensionContext +) { + let disposable = vscode.commands.registerCommand( + "DevChat.UpdataChatModels", + async () => { + updateChatModels(); + } + ); + + context.subscriptions.push(disposable); +} + +export function registerInstallCommandsPython( + context: vscode.ExtensionContext +) { + let disposable = vscode.commands.registerCommand( + "DevChat.InstallCommandPython", + async () => { + // steps of install command python + // 1. install python >= 3.11 + // 2. check requirements.txt in ~/.chat dir + // 3. install requirements.txt + + // 1. install python >= 3.11 + logger.channel()?.info(`create env for python ...`); + logger.channel()?.info(`try to create env by mamba ...`); + let pythonCommand = await createEnvByMamba( + "devchat-commands", + "", + "3.11.4" + ); + + if (!pythonCommand || pythonCommand === "") { + logger + .channel() + ?.info(`create env by mamba failed, try to create env by conda ...`); + pythonCommand = await createEnvByConda( + "devchat-commands", + "", + "3.11.4" + ); + } + + if (!pythonCommand || pythonCommand === "") { + logger + .channel() + ?.error( + `create virtual python env failed, you need create it by yourself with command: "conda create -n devchat-commands python=3.11.4"` + ); + logger.channel()?.show(); + + return; + } + + // 2. check requirements.txt in ~/.chat dir + // ~/.chat/requirements.txt + const usrRequirementsFile = path.join( + os.homedir(), + ".chat", + "workflows", + "usr", + "requirements.txt" + ); + const orgRequirementsFile = path.join( + os.homedir(), + ".chat", + "workflows", + "org", + "requirements.txt" + ); + const sysRequirementsFile = path.join( + os.homedir(), + ".chat", + "workflows", + "sys", + "requirements.txt" + ); + let requirementsFile = sysRequirementsFile; + if (fs.existsSync(orgRequirementsFile)) { + requirementsFile = orgRequirementsFile; + } + if (fs.existsSync(usrRequirementsFile)) { + requirementsFile = usrRequirementsFile; + } + + if (!fs.existsSync(requirementsFile)) { + // logger.channel()?.warn(`requirements.txt not found in ~/.chat/workflows dir.`); + // logger.channel()?.show(); + // vscode.window.showErrorMessage(`Error: see OUTPUT for more detail!`); + return; + } + + // 3. install requirements.txt + // run command: pip install -r {requirementsFile} + let isInstalled = false; + // try 3 times + for (let i = 0; i < 4; i++) { + let otherSource: string | undefined = undefined; + if (i > 1) { + otherSource = "https://pypi.tuna.tsinghua.edu.cn/simple/"; } - - // Check if ~/.chat/workflows/sys directory exists - if (!fs.existsSync(sysDirPath)) { - // Directory does not exist, wait for updateSysCommand to finish - await devchat.updateSysCommand(); - sendCommandListByDevChatRun(); - } else { - // Directory exists, execute sendCommandListByDevChatRun immediately - await sendCommandListByDevChatRun(); - - // Then asynchronously execute updateSysCommand - await devchat.updateSysCommand(); - await sendCommandListByDevChatRun(); + isInstalled = await installRequirements( + pythonCommand, + requirementsFile, + otherSource + ); + if (isInstalled) { + break; } - }); + logger.channel()?.info(`Install packages failed, try again: ${i + 1}`); + } + if (!isInstalled) { + logger + .channel() + ?.error( + `Install packages failed, you can install it with command: "${pythonCommand} -m pip install -r ~/.chat/requirements.txt"` + ); + logger.channel()?.show(); + vscode.window.showErrorMessage(`Error: see OUTPUT for more detail!`); + return ""; + } - context.subscriptions.push(disposable); -} + UiUtilWrapper.updateConfiguration( + "DevChat", + "PythonForCommands", + pythonCommand.trim() + ); + // vscode.window.showInformationMessage(`All slash Commands are ready to use! Please input / to try workflow commands!`); + } + ); -export function registerUpdateChatModelsCommand(context: vscode.ExtensionContext) { - let disposable = vscode.commands.registerCommand('DevChat.UpdataChatModels', async () => { - updateChatModels(); - }); - - context.subscriptions.push(disposable); -} - -export function registerInstallCommandsPython(context: vscode.ExtensionContext) { - let disposable = vscode.commands.registerCommand('DevChat.InstallCommandPython', async () => { - // steps of install command python - // 1. install python >= 3.11 - // 2. check requirements.txt in ~/.chat dir - // 3. install requirements.txt - - // 1. install python >= 3.11 - logger.channel()?.info(`create env for python ...`); - logger.channel()?.info(`try to create env by mamba ...`); - let pythonCommand = await createEnvByMamba("devchat-commands", "", "3.11.4"); - - if (!pythonCommand || pythonCommand === "") { - logger.channel()?.info(`create env by mamba failed, try to create env by conda ...`); - pythonCommand = await createEnvByConda("devchat-commands", "", "3.11.4"); - } - - if (!pythonCommand || pythonCommand === "") { - logger.channel()?.error(`create virtual python env failed, you need create it by yourself with command: "conda create -n devchat-commands python=3.11.4"`); - logger.channel()?.show(); - - return ; - } - - // 2. check requirements.txt in ~/.chat dir - // ~/.chat/requirements.txt - const usrRequirementsFile = path.join(os.homedir(), '.chat', 'workflows', 'usr', 'requirements.txt'); - const orgRequirementsFile = path.join(os.homedir(), '.chat', 'workflows', 'org', 'requirements.txt'); - const sysRequirementsFile = path.join(os.homedir(), '.chat', 'workflows', 'sys', 'requirements.txt'); - let requirementsFile = sysRequirementsFile; - if (fs.existsSync(orgRequirementsFile)) { - requirementsFile = orgRequirementsFile; - } - if (fs.existsSync(usrRequirementsFile)) { - requirementsFile = usrRequirementsFile; - } - - if (!fs.existsSync(requirementsFile)) { - // logger.channel()?.warn(`requirements.txt not found in ~/.chat/workflows dir.`); - // logger.channel()?.show(); - // vscode.window.showErrorMessage(`Error: see OUTPUT for more detail!`); - return ; - } - - // 3. install requirements.txt - // run command: pip install -r {requirementsFile} - let isInstalled = false; - // try 3 times - for (let i = 0; i < 4; i++) { - let otherSource: string | undefined = undefined; - if (i>1) { - otherSource = 'https://pypi.tuna.tsinghua.edu.cn/simple/'; - } - isInstalled = await installRequirements(pythonCommand, requirementsFile, otherSource); - if (isInstalled) { - break; - } - logger.channel()?.info(`Install packages failed, try again: ${i + 1}`); - } - if (!isInstalled) { - logger.channel()?.error(`Install packages failed, you can install it with command: "${pythonCommand} -m pip install -r ~/.chat/requirements.txt"`); - logger.channel()?.show(); - vscode.window.showErrorMessage(`Error: see OUTPUT for more detail!`); - return ''; - } - - UiUtilWrapper.updateConfiguration("DevChat", "PythonForCommands", pythonCommand.trim()); - // vscode.window.showInformationMessage(`All slash Commands are ready to use! Please input / to try workflow commands!`); - }); - - context.subscriptions.push(disposable); + context.subscriptions.push(disposable); } export function registerDevChatChatCommand(context: vscode.ExtensionContext) { - let disposable = vscode.commands.registerCommand('DevChat.Chat', async (message: string) => { - ensureChatPanel(context); - if (!ExtensionContextHolder.provider?.view()) { - // wait 2 seconds - await new Promise((resolve, reject) => { - setTimeout(() => { - resolve(true); - }, 2000); - }); - } - chatWithDevChat(ExtensionContextHolder.provider?.view()!, message); - }); + let disposable = vscode.commands.registerCommand( + "DevChat.Chat", + async (message: string) => { + ensureChatPanel(context); + if (!ExtensionContextHolder.provider?.view()) { + // wait 2 seconds + await new Promise((resolve, reject) => { + setTimeout(() => { + resolve(true); + }, 2000); + }); + } + chatWithDevChat(ExtensionContextHolder.provider?.view()!, message); + } + ); - context.subscriptions.push(disposable); + context.subscriptions.push(disposable); } -export function registerHandleUri(context: vscode.ExtensionContext){ - context.subscriptions.push(vscode.window.registerUriHandler({ - async handleUri(uri) { - // 解析 URI 并执行相应的操作 - if (uri.path.includes('accesskey')) { - const accessKey = uri.path.split('/')[2]; - const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", 'Provider.devchat'); - const providerConfigNew:any = {} - if (Object.keys(modelConfig).length !== 0){ - for (const key of Object.keys(modelConfig || {})) { - const property = modelConfig![key]; - providerConfigNew[key] = property; - } - } - providerConfigNew.access_key = accessKey; - vscode.workspace.getConfiguration("devchat").update("Provider.devchat", providerConfigNew, vscode.ConfigurationTarget.Global); - ensureChatPanel(context); - await new Promise((resolve, reject) => { - setTimeout(() => { - resolve(true); - }, 1000); - }); - ExtensionContextHolder.provider?.reloadWebview(); - } - } - })); +export function registerCodeLensExplainCommand( + context: vscode.ExtensionContext +) { + let disposable = vscode.commands.registerCommand( + "CodeLens.Explain", + async (message: string, pos: { start: number; end: number }) => { + const editor = vscode.window.activeTextEditor; + if (editor) { + const range = new vscode.Range( + new vscode.Position(pos.start, 0), + new vscode.Position(pos.end, 0) + ); + editor.selection = new vscode.Selection(range.start, range.end); + } + ensureChatPanel(context); + if (!ExtensionContextHolder.provider?.view()) { + // wait 2 seconds + await new Promise((resolve, reject) => { + setTimeout(() => { + resolve(true); + }, 2000); + }); + } + chatWithDevChat(ExtensionContextHolder.provider?.view()!, message); + } + ); + + context.subscriptions.push(disposable); +} + +export function registerHandleUri(context: vscode.ExtensionContext) { + context.subscriptions.push( + vscode.window.registerUriHandler({ + async handleUri(uri) { + // 解析 URI 并执行相应的操作 + if (uri.path.includes("accesskey")) { + const accessKey = uri.path.split("/")[2]; + const modelConfig: any = UiUtilWrapper.getConfiguration( + "devchat", + "Provider.devchat" + ); + const providerConfigNew: any = {}; + if (Object.keys(modelConfig).length !== 0) { + for (const key of Object.keys(modelConfig || {})) { + const property = modelConfig![key]; + providerConfigNew[key] = property; + } + } + providerConfigNew.access_key = accessKey; + vscode.workspace + .getConfiguration("devchat") + .update( + "Provider.devchat", + providerConfigNew, + vscode.ConfigurationTarget.Global + ); + ensureChatPanel(context); + await new Promise((resolve, reject) => { + setTimeout(() => { + resolve(true); + }, 1000); + }); + ExtensionContextHolder.provider?.reloadWebview(); + } + }, + }) + ); +} + +function registerExplainCommand(context: vscode.ExtensionContext) { + const callback = async () => { + const editor = vscode.window.activeTextEditor; + if (editor) { + if (!(await ensureChatPanel(context))) { + return; + } + + chatWithDevChat(ExtensionContextHolder.provider?.view()!, "/explain"); + } + }; + context.subscriptions.push( + vscode.commands.registerCommand("devchat.explain", callback) + ); + context.subscriptions.push( + vscode.commands.registerCommand("devchat.explain_chinese", callback) + ); } export { - registerOpenChatPanelCommand, - registerAddContextCommand, - registerAskForCodeCommand, - registerAskForFileCommand, + registerOpenChatPanelCommand, + registerAddContextCommand, + registerAskForCodeCommand, + registerAskForFileCommand, + registerExplainCommand, }; diff --git a/src/extension.ts b/src/extension.ts index bb6a79e..bfe8f08 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -1,404 +1,469 @@ -import * as vscode from 'vscode'; +import * as vscode from "vscode"; import { - registerOpenChatPanelCommand, - registerAddContextCommand, - registerAskForCodeCommand, - registerAskForFileCommand, - registerAccessKeySettingCommand, - regApplyDiffResultCommand, - registerStatusBarItemClickCommand, - regPythonPathCommand, - registerInstallCommandsCommand, - registerUpdateChatModelsCommand, - registerInstallCommandsPython, - registerDevChatChatCommand, - registerHandleUri, -} from './contributes/commands'; -import { regLanguageContext } from './contributes/context'; -import { regDevChatView } from './contributes/views'; + registerOpenChatPanelCommand, + registerAddContextCommand, + registerAskForCodeCommand, + registerAskForFileCommand, + registerAccessKeySettingCommand, + regApplyDiffResultCommand, + registerStatusBarItemClickCommand, + regPythonPathCommand, + registerInstallCommandsCommand, + registerUpdateChatModelsCommand, + registerInstallCommandsPython, + registerDevChatChatCommand, + registerHandleUri, + registerExplainCommand, + registerCodeLensExplainCommand, +} from "./contributes/commands"; +import { regLanguageContext } from "./contributes/context"; +import { regDevChatView } from "./contributes/views"; -import { ExtensionContextHolder } from './util/extensionContext'; -import { logger } from './util/logger'; -import { LoggerChannelVscode } from './util/logger_vscode'; -import { createStatusBarItem } from './panel/statusBarView'; -import { UiUtilWrapper } from './util/uiUtil'; -import { UiUtilVscode } from './util/uiUtil_vscode'; -import { ApiKeyManager } from './util/apiKey'; -import { startRpcServer } from './ide_services/services'; -import { registerCodeLensProvider } from './panel/codeLens'; -import { stopDevChatBase } from './handler/sendMessageBase'; -import exp from 'constants'; +import { ExtensionContextHolder } from "./util/extensionContext"; +import { logger } from "./util/logger"; +import { LoggerChannelVscode } from "./util/logger_vscode"; +import { createStatusBarItem } from "./panel/statusBarView"; +import { UiUtilWrapper } from "./util/uiUtil"; +import { UiUtilVscode } from "./util/uiUtil_vscode"; +import { ApiKeyManager } from "./util/apiKey"; +import { startRpcServer } from "./ide_services/services"; +import { registerCodeLensProvider } from "./panel/codeLens"; +import { stopDevChatBase } from "./handler/sendMessageBase"; +import exp from "constants"; /** * ABC isProviderHasSetted - * @returns + * @returns */ async function isProviderHasSetted() { - try { - const providerProperty = "Provider.devchat"; - const providerConfig: any = UiUtilWrapper.getConfiguration("devchat", providerProperty); - if (providerConfig && Object.keys(providerConfig).length > 0) { - return true; - } + try { + const providerProperty = "Provider.devchat"; + const providerConfig: any = UiUtilWrapper.getConfiguration( + "devchat", + providerProperty + ); + if (providerConfig && Object.keys(providerConfig).length > 0) { + return true; + } - const providerPropertyOpenAI = "Provider.openai"; - const providerConfigOpenAI: any = UiUtilWrapper.getConfiguration("devchat", providerPropertyOpenAI); - if (providerConfigOpenAI && Object.keys(providerConfigOpenAI).length > 0) { - return true; - } + const providerPropertyOpenAI = "Provider.openai"; + const providerConfigOpenAI: any = UiUtilWrapper.getConfiguration( + "devchat", + providerPropertyOpenAI + ); + if (providerConfigOpenAI && Object.keys(providerConfigOpenAI).length > 0) { + return true; + } - const apiOpenaiKey = await ApiKeyManager.getProviderApiKey("openai"); - if (apiOpenaiKey) { - return true; - } - const devchatKey = await ApiKeyManager.getProviderApiKey("devchat"); - if (devchatKey) { - return true; - } + const apiOpenaiKey = await ApiKeyManager.getProviderApiKey("openai"); + if (apiOpenaiKey) { + return true; + } + const devchatKey = await ApiKeyManager.getProviderApiKey("devchat"); + if (devchatKey) { + return true; + } - return false; - } catch (error) { - return false; - } - + return false; + } catch (error) { + return false; + } } -async function configUpdateTo1115() { - const supportModels = [ - "Model.gpt-3-5-1106", - "Model.gpt-4-turbo", - ]; +async function configUpdateTo1115() { + const supportModels = ["Model.gpt-3-5-1106", "Model.gpt-4-turbo"]; - for (const model of supportModels) { - const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); - if (modelConfig1 && Object.keys(modelConfig1).length === 0) { - let modelConfigNew = {}; - modelConfigNew = {"provider": "devchat"}; - if (model.startsWith("Model.gpt-")) { - modelConfigNew = {"provider": "openai"}; - } + for (const model of supportModels) { + const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); + if (modelConfig1 && Object.keys(modelConfig1).length === 0) { + let modelConfigNew = {}; + modelConfigNew = { provider: "devchat" }; + if (model.startsWith("Model.gpt-")) { + modelConfigNew = { provider: "openai" }; + } - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelConfigNew, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); - } - } - } + try { + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelConfigNew, vscode.ConfigurationTarget.Global); + } catch (error) { + logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); + } + } + } } async function configUpdateTo0924() { - if (await isProviderHasSetted()) { - return ; - } - const defaultModel: any = UiUtilWrapper.getConfiguration("devchat", "defaultModel"); - - let devchatKey = UiUtilWrapper.getConfiguration('DevChat', 'Access_Key_DevChat'); - let openaiKey = UiUtilWrapper.getConfiguration('DevChat', 'Api_Key_OpenAI'); - const endpointKey = UiUtilWrapper.getConfiguration('DevChat', 'API_ENDPOINT'); - - devchatKey = undefined; - openaiKey = undefined; - if (!devchatKey && !openaiKey) { - openaiKey = await UiUtilWrapper.secretStorageGet("openai_OPENAI_API_KEY"); - devchatKey = await UiUtilWrapper.secretStorageGet("devchat_OPENAI_API_KEY"); - await UiUtilWrapper.storeSecret("openai_OPENAI_API_KEY", ""); - await UiUtilWrapper.storeSecret("devchat_OPENAI_API_KEY", ""); - } - if (!devchatKey && !openaiKey) { - openaiKey = process.env.OPENAI_API_KEY; - } - - let modelConfigNew = {}; - let providerConfigNew = {}; - if (openaiKey) { - providerConfigNew["access_key"] = openaiKey; - if (endpointKey) { - providerConfigNew["api_base"] = endpointKey; - } - - await vscode.workspace.getConfiguration("devchat").update("Provider.openai", providerConfigNew, vscode.ConfigurationTarget.Global); - } + if (await isProviderHasSetted()) { + return; + } + const defaultModel: any = UiUtilWrapper.getConfiguration( + "devchat", + "defaultModel" + ); - if (devchatKey) { - providerConfigNew["access_key"] = devchatKey; - if (endpointKey) { - providerConfigNew["api_base"] = endpointKey; - } - - await vscode.workspace.getConfiguration("devchat").update("Provider.devchat", providerConfigNew, vscode.ConfigurationTarget.Global); - } - - const supportModels = [ - "Model.gpt-3-5", - "Model.gpt-3-5-1106", - "Model.gpt-3-5-16k", - "Model.gpt-4", - "Model.gpt-4-turbo", - "Model.claude-2", - "Model.xinghuo-2", - "Model.chatglm_pro", - "Model.ERNIE-Bot", - "Model.CodeLlama-34b-Instruct", - "Model.llama-2-70b-chat" - ]; - - for (const model of supportModels) { - const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); - if (modelConfig1 && Object.keys(modelConfig1).length === 0) { - modelConfigNew = {"provider": "devchat"}; - if (model.startsWith("Model.gpt-")) { - modelConfigNew = {"provider": "openai"}; - } - - await vscode.workspace.getConfiguration("devchat").update(model, modelConfigNew, vscode.ConfigurationTarget.Global); - } - } - - if (!defaultModel) { - await vscode.workspace.getConfiguration("devchat").update("defaultModel", "claude-2.1", vscode.ConfigurationTarget.Global); - } + let devchatKey = UiUtilWrapper.getConfiguration( + "DevChat", + "Access_Key_DevChat" + ); + let openaiKey = UiUtilWrapper.getConfiguration("DevChat", "Api_Key_OpenAI"); + const endpointKey = UiUtilWrapper.getConfiguration("DevChat", "API_ENDPOINT"); + + devchatKey = undefined; + openaiKey = undefined; + if (!devchatKey && !openaiKey) { + openaiKey = await UiUtilWrapper.secretStorageGet("openai_OPENAI_API_KEY"); + devchatKey = await UiUtilWrapper.secretStorageGet("devchat_OPENAI_API_KEY"); + await UiUtilWrapper.storeSecret("openai_OPENAI_API_KEY", ""); + await UiUtilWrapper.storeSecret("devchat_OPENAI_API_KEY", ""); + } + if (!devchatKey && !openaiKey) { + openaiKey = process.env.OPENAI_API_KEY; + } + + let modelConfigNew = {}; + let providerConfigNew = {}; + if (openaiKey) { + providerConfigNew["access_key"] = openaiKey; + if (endpointKey) { + providerConfigNew["api_base"] = endpointKey; + } + + await vscode.workspace + .getConfiguration("devchat") + .update( + "Provider.openai", + providerConfigNew, + vscode.ConfigurationTarget.Global + ); + } + + if (devchatKey) { + providerConfigNew["access_key"] = devchatKey; + if (endpointKey) { + providerConfigNew["api_base"] = endpointKey; + } + + await vscode.workspace + .getConfiguration("devchat") + .update( + "Provider.devchat", + providerConfigNew, + vscode.ConfigurationTarget.Global + ); + } + + const supportModels = [ + "Model.gpt-3-5", + "Model.gpt-3-5-1106", + "Model.gpt-3-5-16k", + "Model.gpt-4", + "Model.gpt-4-turbo", + "Model.claude-2", + "Model.xinghuo-2", + "Model.chatglm_pro", + "Model.ERNIE-Bot", + "Model.CodeLlama-34b-Instruct", + "Model.llama-2-70b-chat", + ]; + + for (const model of supportModels) { + const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); + if (modelConfig1 && Object.keys(modelConfig1).length === 0) { + modelConfigNew = { provider: "devchat" }; + if (model.startsWith("Model.gpt-")) { + modelConfigNew = { provider: "openai" }; + } + + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelConfigNew, vscode.ConfigurationTarget.Global); + } + } + + if (!defaultModel) { + await vscode.workspace + .getConfiguration("devchat") + .update("defaultModel", "claude-2.1", vscode.ConfigurationTarget.Global); + } } - async function configUpdate0912To0924() { - if (await isProviderHasSetted()) { - return ; - } - - const oldModels = [ - "Model.gpt-3-5", - "Model.gpt-3-5-16k", - "Model.gpt-4", - "Model.claude-2" - ]; + if (await isProviderHasSetted()) { + return; + } - for (const model of oldModels) { - const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); - if (modelConfig && Object.keys(modelConfig).length !== 0) { - let modelProperties: any = {}; - for (const key of Object.keys(modelConfig || {})) { - const property = modelConfig![key]; - modelProperties[key] = property; - } + const oldModels = [ + "Model.gpt-3-5", + "Model.gpt-3-5-16k", + "Model.gpt-4", + "Model.claude-2", + ]; - if (modelConfig["api_key"]) { - let providerConfigNew = {}; - providerConfigNew["access_key"] = modelConfig["api_key"]; - if (modelConfig["api_base"]) { - providerConfigNew["api_base"] = modelConfig["api_base"]; - } + for (const model of oldModels) { + const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); + if (modelConfig && Object.keys(modelConfig).length !== 0) { + let modelProperties: any = {}; + for (const key of Object.keys(modelConfig || {})) { + const property = modelConfig![key]; + modelProperties[key] = property; + } - if (modelConfig["api_key"].startsWith("DC.")) { - modelProperties["provider"] = "devchat"; - await vscode.workspace.getConfiguration("devchat").update("Provider.devchat", providerConfigNew, vscode.ConfigurationTarget.Global); - } else { - modelProperties["provider"] = "openai"; - await vscode.workspace.getConfiguration("devchat").update("Provider.openai", providerConfigNew, vscode.ConfigurationTarget.Global); - } - - delete modelProperties["api_key"]; - delete modelProperties["api_base"]; - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelProperties, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`error: ${error}`); - } - } else { - if (!modelProperties["provider"]) { - delete modelProperties["api_base"]; - modelProperties["provider"] = "devchat"; - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelProperties, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`error: ${error}`); - } - } - } - } - } + if (modelConfig["api_key"]) { + let providerConfigNew = {}; + providerConfigNew["access_key"] = modelConfig["api_key"]; + if (modelConfig["api_base"]) { + providerConfigNew["api_base"] = modelConfig["api_base"]; + } + + if (modelConfig["api_key"].startsWith("DC.")) { + modelProperties["provider"] = "devchat"; + await vscode.workspace + .getConfiguration("devchat") + .update( + "Provider.devchat", + providerConfigNew, + vscode.ConfigurationTarget.Global + ); + } else { + modelProperties["provider"] = "openai"; + await vscode.workspace + .getConfiguration("devchat") + .update( + "Provider.openai", + providerConfigNew, + vscode.ConfigurationTarget.Global + ); + } + + delete modelProperties["api_key"]; + delete modelProperties["api_base"]; + try { + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelProperties, vscode.ConfigurationTarget.Global); + } catch (error) { + logger.channel()?.error(`error: ${error}`); + } + } else { + if (!modelProperties["provider"]) { + delete modelProperties["api_base"]; + modelProperties["provider"] = "devchat"; + try { + await vscode.workspace + .getConfiguration("devchat") + .update( + model, + modelProperties, + vscode.ConfigurationTarget.Global + ); + } catch (error) { + logger.channel()?.error(`error: ${error}`); + } + } + } + } + } } - async function configUpdateto240205() { - // rename Model.CodeLlama-34b-Instruct to Model.CodeLlama-70b - // add new Model.Mixtral-8x7B - // add new Model.Minimax-abab6 - const supportModels = [ - "Model.CodeLlama-70b", - "Model.Mixtral-8x7B", - "Model.Minimax-abab6" - ]; + // rename Model.CodeLlama-34b-Instruct to Model.CodeLlama-70b + // add new Model.Mixtral-8x7B + // add new Model.Minimax-abab6 + const supportModels = [ + "Model.CodeLlama-70b", + "Model.Mixtral-8x7B", + "Model.Minimax-abab6", + ]; - for (const model of supportModels) { - const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); - if (modelConfig1 && Object.keys(modelConfig1).length === 0) { - let modelConfigNew = {}; - modelConfigNew = {"provider": "devchat"}; - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelConfigNew, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`error: ${error}`); - } - } - } + for (const model of supportModels) { + const modelConfig1: any = UiUtilWrapper.getConfiguration("devchat", model); + if (modelConfig1 && Object.keys(modelConfig1).length === 0) { + let modelConfigNew = {}; + modelConfigNew = { provider: "devchat" }; + try { + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelConfigNew, vscode.ConfigurationTarget.Global); + } catch (error) { + logger.channel()?.error(`error: ${error}`); + } + } + } } - async function setLangDefaultValue() { - const lang = vscode.env.language; - if (!UiUtilWrapper.getConfiguration("DevChat", "Language")) { - if (lang.startsWith("zh-")) { - UiUtilWrapper.updateConfiguration("DevChat", "Language", "zh"); - } else { - UiUtilWrapper.updateConfiguration("DevChat", "Language", "en"); - } - } + const lang = vscode.env.language; + if (!UiUtilWrapper.getConfiguration("DevChat", "Language")) { + if (lang.startsWith("zh-")) { + UiUtilWrapper.updateConfiguration("DevChat", "Language", "zh"); + } else { + UiUtilWrapper.updateConfiguration("DevChat", "Language", "en"); + } + } } async function updateInvalidSettings() { - const oldModels = [ - "Model.gpt-3-5", - "Model.gpt-3-5-16k", - "Model.gpt-4", - "Model.claude-2" - ]; + const oldModels = [ + "Model.gpt-3-5", + "Model.gpt-3-5-16k", + "Model.gpt-4", + "Model.claude-2", + ]; - for (const model of oldModels) { - const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); - if (modelConfig && Object.keys(modelConfig).length !== 0) { - let modelProperties: any = {}; - for (const key of Object.keys(modelConfig || {})) { - const property = modelConfig![key]; - modelProperties[key] = property; - } + for (const model of oldModels) { + const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); + if (modelConfig && Object.keys(modelConfig).length !== 0) { + let modelProperties: any = {}; + for (const key of Object.keys(modelConfig || {})) { + const property = modelConfig![key]; + modelProperties[key] = property; + } - if (modelConfig["api_key"]) { - delete modelProperties["api_key"]; - delete modelProperties["api_base"]; - modelProperties["provider"] = "devchat"; - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelProperties, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`error: ${error}`); - } - } - } - } + if (modelConfig["api_key"]) { + delete modelProperties["api_key"]; + delete modelProperties["api_base"]; + modelProperties["provider"] = "devchat"; + try { + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelProperties, vscode.ConfigurationTarget.Global); + } catch (error) { + logger.channel()?.error(`error: ${error}`); + } + } + } + } } async function updateInvalidDefaultModel() { - const defaultModel: any = UiUtilWrapper.getConfiguration("devchat", "defaultModel"); - if (defaultModel === "gpt-3.5-turbo-1106" || defaultModel === "gpt-3.5-turbo-16k") { - try { - await vscode.workspace.getConfiguration("devchat").update("defaultModel", "gpt-3.5-turbo", vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); - } - } + const defaultModel: any = UiUtilWrapper.getConfiguration( + "devchat", + "defaultModel" + ); + if ( + defaultModel === "gpt-3.5-turbo-1106" || + defaultModel === "gpt-3.5-turbo-16k" + ) { + try { + await vscode.workspace + .getConfiguration("devchat") + .update( + "defaultModel", + "gpt-3.5-turbo", + vscode.ConfigurationTarget.Global + ); + } catch (error) { + logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); + } + } } // "gpt-3.5-turbo-1106", // "gpt-3.5-turbo-16k", async function configSetModelDefaultParams() { - const modelParams = { - "Model.gpt-3-5": { - "max_input_tokens": 13000 - }, - "Model.gpt-4": { - "max_input_tokens": 6000 - }, - "Model.gpt-4-turbo": { - "max_input_tokens": 32000 - }, - "Model.claude-2": { - "max_input_tokens": 32000 - }, - "Model.xinghuo-2": { - "max_input_tokens": 6000 - }, - "Model.chatglm_pro": { - "max_input_tokens": 8000 - }, - "Model.ERNIE-Bot": { - "max_input_tokens": 8000 - }, - "Model.CodeLlama-70b": { - "max_input_tokens": 4000 - }, - "Model.Mixtral-8x7B": { - "max_input_tokens": 4000 - }, - "Model.Minimax-abab6": { - "max_input_tokens": 4000 - }, - "Model.llama-2-70b-chat": { - "max_input_tokens": 4000 - } - }; + const modelParams = { + "Model.gpt-3-5": { + max_input_tokens: 13000, + }, + "Model.gpt-4": { + max_input_tokens: 6000, + }, + "Model.gpt-4-turbo": { + max_input_tokens: 32000, + }, + "Model.claude-2": { + max_input_tokens: 32000, + }, + "Model.xinghuo-2": { + max_input_tokens: 6000, + }, + "Model.chatglm_pro": { + max_input_tokens: 8000, + }, + "Model.ERNIE-Bot": { + max_input_tokens: 8000, + }, + "Model.CodeLlama-70b": { + max_input_tokens: 4000, + }, + "Model.Mixtral-8x7B": { + max_input_tokens: 4000, + }, + "Model.Minimax-abab6": { + max_input_tokens: 4000, + }, + "Model.llama-2-70b-chat": { + max_input_tokens: 4000, + }, + }; - // set default params - for (const model of Object.keys(modelParams)) { - const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); - if (!modelConfig["max_input_tokens"]) { - modelConfig["max_input_tokens"] = modelParams[model]["max_input_tokens"]; - try { - await vscode.workspace.getConfiguration("devchat").update(model, modelConfig, vscode.ConfigurationTarget.Global); - } catch (error) { - logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); - } - } - } + // set default params + for (const model of Object.keys(modelParams)) { + const modelConfig: any = UiUtilWrapper.getConfiguration("devchat", model); + if (!modelConfig["max_input_tokens"]) { + modelConfig["max_input_tokens"] = modelParams[model]["max_input_tokens"]; + try { + await vscode.workspace + .getConfiguration("devchat") + .update(model, modelConfig, vscode.ConfigurationTarget.Global); + } catch (error) { + logger.channel()?.error(`update Model.ERNIE-Bot error: ${error}`); + } + } + } } async function activate(context: vscode.ExtensionContext) { - ExtensionContextHolder.context = context; + ExtensionContextHolder.context = context; - logger.init(LoggerChannelVscode.getInstance()); - UiUtilWrapper.init(new UiUtilVscode()); - - await configUpdateTo0924(); - await configUpdate0912To0924(); - await configUpdateTo1115(); - await setLangDefaultValue(); - await updateInvalidSettings(); - await updateInvalidDefaultModel(); - await configUpdateto240205(); - await configSetModelDefaultParams(); - - regLanguageContext(); + logger.init(LoggerChannelVscode.getInstance()); + UiUtilWrapper.init(new UiUtilVscode()); - regDevChatView(context); + await configUpdateTo0924(); + await configUpdate0912To0924(); + await configUpdateTo1115(); + await setLangDefaultValue(); + await updateInvalidSettings(); + await updateInvalidDefaultModel(); + await configUpdateto240205(); + await configSetModelDefaultParams(); - registerAccessKeySettingCommand(context); - registerOpenChatPanelCommand(context); - registerAddContextCommand(context); - registerAskForCodeCommand(context); - registerAskForFileCommand(context); - registerStatusBarItemClickCommand(context); + regLanguageContext(); - registerInstallCommandsCommand(context); - registerUpdateChatModelsCommand(context); - registerInstallCommandsPython(context); + regDevChatView(context); - createStatusBarItem(context); + registerAccessKeySettingCommand(context); + registerOpenChatPanelCommand(context); + registerAddContextCommand(context); + registerAskForCodeCommand(context); + registerAskForFileCommand(context); + registerExplainCommand(context); + registerStatusBarItemClickCommand(context); - regApplyDiffResultCommand(context); + registerInstallCommandsCommand(context); + registerUpdateChatModelsCommand(context); + registerInstallCommandsPython(context); - regPythonPathCommand(context); - registerDevChatChatCommand(context); - registerCodeLensProvider(context); + createStatusBarItem(context); - startRpcServer(); - logger.channel()?.info(`registerHandleUri:`); - registerHandleUri(context) + regApplyDiffResultCommand(context); + + regPythonPathCommand(context); + registerDevChatChatCommand(context); + registerCodeLensExplainCommand(context); + registerCodeLensProvider(context); + + startRpcServer(); + logger.channel()?.info(`registerHandleUri:`); + registerHandleUri(context); } async function deactivate() { - // stop devchat - await stopDevChatBase({}); + // stop devchat + await stopDevChatBase({}); } exports.activate = activate; -exports.deactivate = deactivate; \ No newline at end of file +exports.deactivate = deactivate; diff --git a/src/panel/codeLens.ts b/src/panel/codeLens.ts index 0f47233..b4be559 100644 --- a/src/panel/codeLens.ts +++ b/src/panel/codeLens.ts @@ -1,20 +1,20 @@ -import * as vscode from 'vscode'; -import * as fs from 'fs'; -import * as path from 'path'; -import { logger } from '../util/logger'; -import { log } from 'console'; +import * as vscode from "vscode"; +import * as fs from "fs"; +import * as path from "path"; +import { logger } from "../util/logger"; +import { log } from "console"; interface FunctionDefinition { - name: string; - containerName: string | null; - containerRange: vscode.Range | null; - range: vscode.Range; + name: string; + containerName: string | null; + containerRange: vscode.Range | null; + range: vscode.Range; } type CodeLensRegistration = { - elementType: string; - objectName: string; - promptGenerator: string; + elementType: string; + objectName: string; + promptGenerator: string; }; export class CodeLensManager { @@ -23,7 +23,10 @@ export class CodeLensManager { private configFilePath: string; private constructor() { - this.configFilePath = path.join(process.env.HOME || process.env.USERPROFILE || '.', '.chat/ideconfig.json'); + this.configFilePath = path.join( + process.env.HOME || process.env.USERPROFILE || ".", + ".chat/ideconfig.json" + ); this.loadConfig(); } @@ -35,41 +38,47 @@ export class CodeLensManager { } private loadConfig(): void { - if (!fs.existsSync(this.configFilePath)) { - this.initializeConfig(); + if (fs.existsSync(this.configFilePath)) { + this.initializeConfig(); } else { - const data = fs.readFileSync(this.configFilePath, "utf-8"); - this.registrations = JSON.parse(data); + const data = fs.readFileSync(this.configFilePath, "utf-8"); + this.registrations = JSON.parse(data); - if (this.registrations.length === 0) { - this.initializeConfig(); - } + if (this.registrations.length === 0) { + this.initializeConfig(); + } } } private initializeConfig(): void { this.registrations = [ - { - "elementType": "function", - "objectName": "Add unit tests", - "promptGenerator": "/unit_tests {__filename__}:::{__functionName__}:::{__functionStartLine__}:::{__functionEndLine__}:::{__containerStartLine__}:::{__containerEndLine__}" - } - // { - // elementType: 'function', - // objectName: 'generate unit tests', - // promptGenerator: '/test generate unit tests for {__filename__} {__functionName__}' - // }, - // { - // elementType: 'inner_function', - // objectName: 'generate comment', - // promptGenerator: 'generate comment for \n ```code\n{__functionCode__}\n```\n' - // }, - // { - // elementType: 'function', - // objectName: 'generate comment', - // promptGenerator: 'generate comment for \n ```code\n{__functionCode__}\n```\n' - // } - ]; + { + elementType: "function", + objectName: "Add unit tests", + promptGenerator: + "/unit_tests {__filename__}:::{__functionName__}:::{__functionStartLine__}:::{__functionEndLine__}:::{__containerStartLine__}:::{__containerEndLine__}", + }, + { + elementType: "function", + objectName: "Explain", + promptGenerator: "/explain", + }, + // { + // elementType: 'function', + // objectName: 'generate unit tests', + // promptGenerator: '/test generate unit tests for {__filename__} {__functionName__}' + // }, + // { + // elementType: 'inner_function', + // objectName: 'generate comment', + // promptGenerator: 'generate comment for \n ```code\n{__functionCode__}\n```\n' + // }, + // { + // elementType: 'function', + // objectName: 'generate comment', + // promptGenerator: 'generate comment for \n ```code\n{__functionCode__}\n```\n' + // } + ]; this.saveConfig(); } @@ -78,127 +87,170 @@ export class CodeLensManager { if (!fs.existsSync(configDir)) { fs.mkdirSync(configDir, { recursive: true }); } - fs.writeFileSync(this.configFilePath, JSON.stringify(this.registrations, null, 2), 'utf8'); + fs.writeFileSync( + this.configFilePath, + JSON.stringify(this.registrations, null, 2), + "utf8" + ); } public getRegistrations(): CodeLensRegistration[] { - return this.registrations; + return this.registrations; } } +async function getFunctionDefinitions( + document: vscode.TextDocument, + inner_function: boolean = false +): Promise { + const symbols: vscode.DocumentSymbol[] | undefined = + await vscode.commands.executeCommand( + "vscode.executeDocumentSymbolProvider", + document.uri + ); -async function getFunctionDefinitions(document: vscode.TextDocument, inner_function: boolean = false): Promise { - const symbols: vscode.DocumentSymbol[] | undefined = await vscode.commands.executeCommand( - 'vscode.executeDocumentSymbolProvider', - document.uri - ); + if (!symbols) { + return []; + } - if (!symbols) { - return []; - } + function extractFunctions( + symbol: vscode.DocumentSymbol, + containerSymbol: vscode.DocumentSymbol | null, + hasInFunction: boolean = false + ): FunctionDefinition[] { + let functions: FunctionDefinition[] = []; + const isFunction = + symbol.kind === vscode.SymbolKind.Function || + symbol.kind === vscode.SymbolKind.Method; + if (isFunction) { + if (!inner_function || (inner_function && hasInFunction)) { + functions.push({ + name: symbol.name, + containerName: containerSymbol ? containerSymbol.name : null, + containerRange: containerSymbol ? containerSymbol.range : null, + range: symbol.range, + }); + } + hasInFunction = true; + } - function extractFunctions(symbol: vscode.DocumentSymbol, containerSymbol: vscode.DocumentSymbol | null, hasInFunction: boolean = false): FunctionDefinition[] { - let functions: FunctionDefinition[] = []; - const isFunction = symbol.kind === vscode.SymbolKind.Function || symbol.kind === vscode.SymbolKind.Method; - if (isFunction) { - if (!inner_function || (inner_function && hasInFunction)) { - functions.push({ - name: symbol.name, - containerName: containerSymbol? containerSymbol.name : null, - containerRange: containerSymbol? containerSymbol.range : null, - range: symbol.range - }); - } - hasInFunction = true; - } + if (inner_function || !isFunction) { + if (symbol.children && symbol.children.length > 0) { + symbol.children.forEach((child) => { + functions = functions.concat( + extractFunctions(child, symbol, hasInFunction) + ); + }); + } + } - if (inner_function || !isFunction) { - if (symbol.children && symbol.children.length > 0) { - symbol.children.forEach(child => { - functions = functions.concat(extractFunctions(child, symbol, hasInFunction)); - }); - } - } - - return functions; - } + return functions; + } - let functionSymbols: FunctionDefinition[] = []; - symbols.forEach(symbol => { - functionSymbols = functionSymbols.concat(extractFunctions(symbol, null)); - }); + let functionSymbols: FunctionDefinition[] = []; + symbols.forEach((symbol) => { + functionSymbols = functionSymbols.concat(extractFunctions(symbol, null)); + }); - return functionSymbols; + return functionSymbols; } - - class FunctionTestCodeLensProvider implements vscode.CodeLensProvider { - // The provideCodeLenses method should have the correct signature - async provideCodeLenses(document: vscode.TextDocument, token: vscode.CancellationToken): Promise { - // check whether document is a source file - if (document.languageId === 'log') { - return []; - } - const lenses: vscode.CodeLens[] = []; - const functionDefinitions = await getFunctionDefinitions(document); - const innerFunctionDefinitions = await getFunctionDefinitions(document, true); - - const matchElements = { - 'function': functionDefinitions, - 'inner_function': innerFunctionDefinitions - }; - - for (const [elementType, elements] of Object.entries(matchElements)) { - elements.forEach((funcDef) => { - const range = new vscode.Range( - new vscode.Position(funcDef.range.start.line, 0), - new vscode.Position(funcDef.range.end.line, 10000) - ); - - const codelenRegisters: CodeLensRegistration[] = CodeLensManager.getInstance().getRegistrations(); - // Iterate over codelenRegisters with 'of' instead of 'in' - for (const codelenRegister of codelenRegisters) { - if (codelenRegister.elementType !== elementType) { - continue; - } - - // Read range content in document - const functionCode = document.getText(range); - const parentRange = funcDef.containerRange; - - // Fix the string replacement syntax and closing parentheses - const prompt = codelenRegister.promptGenerator - .replace(/{__filename__}/g, document.uri.fsPath) - .replace(/{__functionName__}/g, funcDef.name) - .replace(/{__functionStartLine__}/g, `${range.start.line}`) - .replace(/{__functionEndLine__}/g, `${range.end.line}`) - .replace(/{__containerName__}/g, funcDef.containerName || '') - .replace(/{__containerStartLine__}/g, `${parentRange ? parentRange.start.line : -1}`) - .replace(/{__containerEndLine__}/g, `${parentRange ? parentRange.end.line : -1}`) - .replace(/{__functionCode__}/g, functionCode); // Fixed syntax to replace all occurrences - const lens = new vscode.CodeLens(range, { - title: codelenRegister.objectName, - command: "DevChat.Chat", - // arguments: [document.uri.fsPath, range, funcDef.name] // Commented out as it's not used - arguments: [prompt] - }); - - lenses.push(lens); - } - }); - } - - // log info find how many functionDefinitions, innerFunctionDefinitions, lenses - logger.channel()?.info(`found ${functionDefinitions.length} functions, ${innerFunctionDefinitions.length} inner functions, ${lenses.length} registered codeLenses in document: ${document.fileName}`); - return lenses; + // The provideCodeLenses method should have the correct signature + async provideCodeLenses( + document: vscode.TextDocument, + token: vscode.CancellationToken + ): Promise { + // check whether document is a source file + if (document.languageId === "log") { + return []; } -} + const lenses: vscode.CodeLens[] = []; + const functionDefinitions = await getFunctionDefinitions(document); + const innerFunctionDefinitions = await getFunctionDefinitions( + document, + true + ); + const matchElements = { + function: functionDefinitions, + inner_function: innerFunctionDefinitions, + }; + + for (const [elementType, elements] of Object.entries(matchElements)) { + elements.forEach((funcDef) => { + const range = new vscode.Range( + new vscode.Position(funcDef.range.start.line, 0), + new vscode.Position(funcDef.range.end.line, 10000) + ); + + const codelenRegisters: CodeLensRegistration[] = + CodeLensManager.getInstance().getRegistrations(); + // Iterate over codelenRegisters with 'of' instead of 'in' + for (const codelenRegister of codelenRegisters) { + if (codelenRegister.elementType !== elementType) { + continue; + } + + // Read range content in document + const functionCode = document.getText(range); + const parentRange = funcDef.containerRange; + + // Fix the string replacement syntax and closing parentheses + const prompt = codelenRegister.promptGenerator + .replace(/{__filename__}/g, document.uri.fsPath) + .replace(/{__functionName__}/g, funcDef.name) + .replace(/{__functionStartLine__}/g, `${range.start.line}`) + .replace(/{__functionEndLine__}/g, `${range.end.line}`) + .replace(/{__containerName__}/g, funcDef.containerName || "") + .replace( + /{__containerStartLine__}/g, + `${parentRange ? parentRange.start.line : -1}` + ) + .replace( + /{__containerEndLine__}/g, + `${parentRange ? parentRange.end.line : -1}` + ) + .replace(/{__functionCode__}/g, functionCode); // Fixed syntax to replace all occurrences + if (codelenRegister.objectName === "Explain") { + const lens = new vscode.CodeLens(range, { + title: codelenRegister.objectName, + command: "CodeLens.Explain", + // arguments: [document.uri.fsPath, range, funcDef.name] // Commented out as it's not used + arguments: [ + prompt, + { start: range.start.line, end: range.end.line }, + ], + }); + + lenses.push(lens); + } else { + const lens = new vscode.CodeLens(range, { + title: codelenRegister.objectName, + command: "DevChat.Chat", + // arguments: [document.uri.fsPath, range, funcDef.name] // Commented out as it's not used + arguments: [prompt], + }); + + lenses.push(lens); + } + } + }); + } + + // log info find how many functionDefinitions, innerFunctionDefinitions, lenses + logger + .channel() + ?.info( + `found ${functionDefinitions.length} functions, ${innerFunctionDefinitions.length} inner functions, ${lenses.length} registered codeLenses in document: ${document.fileName}` + ); + return lenses; + } +} export function registerCodeLensProvider(context) { - const provider = new FunctionTestCodeLensProvider(); - const disposable = vscode.languages.registerCodeLensProvider("*", provider); + const provider = new FunctionTestCodeLensProvider(); + const disposable = vscode.languages.registerCodeLensProvider("*", provider); - context.subscriptions.push(disposable); + context.subscriptions.push(disposable); } diff --git a/workflowsCommands b/workflowsCommands index d66ada9..dbdb1ba 160000 --- a/workflowsCommands +++ b/workflowsCommands @@ -1 +1 @@ -Subproject commit d66ada970b9379a13f5d4c8e1473725690969260 +Subproject commit dbdb1ba29db3536e4ae358113f4251b78845c879 diff --git a/yarn.lock b/yarn.lock index 4ce826e..86053b3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -23,7 +23,7 @@ resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz" integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== -"@babel/core@^7.0.0", "@babel/core@^7.0.0-0", "@babel/core@^7.0.0-0 || ^8.0.0-0 <8.0.0", "@babel/core@^7.11.6", "@babel/core@^7.12.0", "@babel/core@^7.12.3", "@babel/core@^7.13.0", "@babel/core@^7.20.2", "@babel/core@^7.21.8", "@babel/core@^7.4.0 || ^8.0.0-0 <8.0.0", "@babel/core@^7.8.0", "@babel/core@>=7.0.0-beta.0 <8": +"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.20.2", "@babel/core@^7.21.8": version "7.21.8" resolved "https://registry.npmjs.org/@babel/core/-/core-7.21.8.tgz" integrity sha512-YeM22Sondbo523Sz0+CirSPnbj9bG3P0CdHcBZdqUuaeOaYEFbOLoGU7lebvGP6P5J/WE9wOn7u7C4J9HvS1xQ== @@ -1302,7 +1302,7 @@ slash "^3.0.0" write-file-atomic "^4.0.2" -"@jest/types@^29.0.0", "@jest/types@^29.5.0": +"@jest/types@^29.5.0": version "29.5.0" resolved "https://registry.npmjs.org/@jest/types/-/types-29.5.0.tgz" integrity sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog== @@ -1323,7 +1323,7 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@3.1.0": +"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== @@ -1341,19 +1341,11 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@1.4.14": +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.15", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz" - integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== - dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" - "@jridgewell/trace-mapping@0.3.9": version "0.3.9" resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz" @@ -1362,6 +1354,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.15", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.18" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz" @@ -1415,7 +1415,7 @@ "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" -"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== @@ -1503,7 +1503,7 @@ resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz" integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== -"@tiptap/core@^2.0.0", "@tiptap/core@^2.0.3": +"@tiptap/core@^2.0.3": version "2.0.3" resolved "https://registry.npmjs.org/@tiptap/core/-/core-2.0.3.tgz" integrity sha512-jLyVIWAdjjlNzrsRhSE2lVL/7N8228/1R1QtaVU85UlMIwHFAcdzhD8FeiKkqxpTnGpaDVaTy7VNEtEgaYdCyA== @@ -2045,7 +2045,7 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/parser@^5.0.0", "@typescript-eslint/parser@^5.56.0": +"@typescript-eslint/parser@^5.56.0": version "5.59.0" resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.0.tgz" integrity sha512-qK9TZ70eJtjojSUMrrEwA9ZDQ4N0e/AuoOIgXuNBorXYcBDk397D2r5MIe1B3cok/oCtdNC5j+lUUpVB+Dpb+w== @@ -2123,7 +2123,7 @@ jszip "^3.10.1" semver "^7.3.8" -"@webassemblyjs/ast@^1.11.5", "@webassemblyjs/ast@1.11.5": +"@webassemblyjs/ast@1.11.5", "@webassemblyjs/ast@^1.11.5": version "1.11.5" resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.5.tgz" integrity sha512-LHY/GSAZZRpsNQH+/oHqhRQ5FT7eoULcBqgfyTB5nQHogFnK3/7QoN7dLnwSE/JkUAF0SrRuclT7ODqMFtWxxQ== @@ -2224,7 +2224,7 @@ "@webassemblyjs/wasm-gen" "1.11.5" "@webassemblyjs/wasm-parser" "1.11.5" -"@webassemblyjs/wasm-parser@^1.11.5", "@webassemblyjs/wasm-parser@1.11.5": +"@webassemblyjs/wasm-parser@1.11.5", "@webassemblyjs/wasm-parser@^1.11.5": version "1.11.5" resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.5.tgz" integrity sha512-SVXUIwsLQlc8srSD7jejsfTU83g7pIGr2YYNb9oHdtldSxaOhvA5xwvIiWIfcX8PlSakgqMXsLpLfbbJ4cBYew== @@ -2292,7 +2292,7 @@ acorn-walk@^8.1.1: resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -"acorn@^6.0.0 || ^7.0.0 || ^8.0.0", acorn@^8, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: +acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: version "8.8.2" resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz" integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== @@ -2323,7 +2323,7 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" -ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.9.1: +ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2333,17 +2333,7 @@ ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.9.1: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0: - version "8.12.0" - resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ajv@^8.8.2, ajv@^8.9.0: +ajv@^8.0.0, ajv@^8.9.0: version "8.12.0" resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== @@ -2419,16 +2409,16 @@ argparse@^2.0.1: resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -array-flatten@^2.1.2: - version "2.1.2" - resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== - array-flatten@1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== +array-flatten@^2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + array-union@^1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz" @@ -2465,7 +2455,7 @@ axios@^1.3.6: form-data "^4.0.0" proxy-from-env "^1.1.0" -babel-jest@^29.0.0, babel-jest@^29.5.0: +babel-jest@^29.5.0: version "29.5.0" resolved "https://registry.npmjs.org/babel-jest/-/babel-jest-29.5.0.tgz" integrity sha512-mA4eCDh5mSo2EcA9xQjVTpmbbNk32Zb3Q3QFQsNhaK56Q+yoXowzFodLux30HRgyOho5rsQ6B0P9QpMkvvnJ0Q== @@ -2668,7 +2658,7 @@ browser-stdout@1.3.1: resolved "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== -browserslist@^4.14.5, browserslist@^4.22.2, "browserslist@>= 4.21.0": +browserslist@^4.14.5, browserslist@^4.22.2: version "4.22.2" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.22.2.tgz" integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== @@ -2805,7 +2795,7 @@ check-error@^1.0.2: resolved "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" integrity sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA== -chokidar@^3.5.3, chokidar@3.5.3: +chokidar@3.5.3, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -2900,16 +2890,16 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - color-name@1.1.3: version "1.1.3" resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + colorette@^2.0.10, colorette@^2.0.14: version "2.0.20" resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz" @@ -2989,12 +2979,7 @@ content-type@~1.0.4: resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz" integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -convert-source-map@^1.6.0: - version "1.9.0" - resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" - integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== - -convert-source-map@^1.7.0: +convert-source-map@^1.6.0, convert-source-map@^1.7.0: version "1.9.0" resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== @@ -3088,13 +3073,6 @@ dayjs@^1.11.10: resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz" integrity sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ== -debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4, debug@4, debug@4.3.4: - version "4.3.4" - resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - debug@2.6.9: version "2.6.9" resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" @@ -3102,6 +3080,13 @@ debug@2.6.9: dependencies: ms "2.0.0" +debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + decamelize@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz" @@ -3164,16 +3149,16 @@ delayed-stream@~1.0.0: resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" - integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== - depd@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + dequal@^2.0.0: version "2.0.3" resolved "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz" @@ -3206,6 +3191,11 @@ diff-sequences@^29.4.3: resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.4.3.tgz" integrity sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA== +diff@5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz" + integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== + diff@^4.0.1: version "4.0.2" resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" @@ -3216,11 +3206,6 @@ diff@^5.1.0: resolved "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz" integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== -diff@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" @@ -3379,6 +3364,11 @@ escape-html@~1.0.3: resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== +escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" @@ -3389,12 +3379,7 @@ escape-string-regexp@^2.0.0: resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== -escape-string-regexp@^4.0.0, escape-string-regexp@4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -eslint-scope@^5.1.1, eslint-scope@5.1.1: +eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== @@ -3415,7 +3400,7 @@ eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.0: resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.0.tgz" integrity sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ== -eslint@*, "eslint@^6.0.0 || ^7.0.0 || ^8.0.0", "eslint@^6.0.0 || ^7.0.0 || >=8.0.0", eslint@^8.36.0: +eslint@^8.36.0: version "8.38.0" resolved "https://registry.npmjs.org/eslint/-/eslint-8.38.0.tgz" integrity sha512-pIdsD2jwlUGf/U38Jv97t8lq6HpaU/G9NKbYmpWpZGw3LdTNhZLbJePqxOXGB5+JEKfOPU/XLxYxFh03nr1KTg== @@ -3494,12 +3479,7 @@ estraverse@^4.1.1: resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== -estraverse@^5.1.0: - version "5.3.0" - resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -estraverse@^5.2.0: +estraverse@^5.1.0, estraverse@^5.2.0: version "5.3.0" resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== @@ -3613,7 +3593,7 @@ fast-glob@^3.2.11, fast-glob@^3.2.9: merge2 "^1.3.0" micromatch "^4.0.4" -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0, fast-json-stable-stringify@2.x: +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== @@ -3709,23 +3689,7 @@ find-cache-dir@^3.3.2: make-dir "^3.0.2" pkg-dir "^4.1.0" -find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^5.0.0, find-up@5.0.0: +find-up@5.0.0, find-up@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== @@ -3733,6 +3697,14 @@ find-up@^5.0.0, find-up@5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" @@ -3846,7 +3818,7 @@ get-stream@^6.0.0: resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -glob-parent@^5.1.2: +glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -3860,31 +3832,24 @@ glob-parent@^6.0.1, glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" -glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - glob-to-regexp@^0.4.1: version "0.4.1" resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== -glob@^7.0.3, glob@^7.1.3: - version "7.2.3" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== +glob@7.2.0: + version "7.2.0" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.1.1" + minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.1.4: +glob@^7.0.3, glob@^7.1.3, glob@^7.1.4: version "7.2.3" resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -3907,18 +3872,6 @@ glob@^8.1.0: minimatch "^5.0.1" once "^1.3.0" -glob@7.2.0: - version "7.2.0" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz" - integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - globals@^11.1.0: version "11.12.0" resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" @@ -4062,7 +4015,7 @@ hastscript@^7.0.0: property-information "^6.0.0" space-separated-tokens "^2.0.0" -he@^1.2.0, he@1.2.0: +he@1.2.0, he@^1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== @@ -4131,16 +4084,6 @@ http-deceiver@^1.2.7: resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - http-errors@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" @@ -4152,6 +4095,16 @@ http-errors@2.0.0: statuses "2.0.1" toidentifier "1.0.1" +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + http-parser-js@>=0.5.1: version "0.5.8" resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" @@ -4245,7 +4198,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.3, inherits@2, inherits@2.0.4: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.3: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -4260,16 +4213,16 @@ interpret@^3.1.1: resolved "https://registry.npmjs.org/interpret/-/interpret-3.1.1.tgz" integrity sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ== -ipaddr.js@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz" - integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== - ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" @@ -4401,16 +4354,16 @@ is-wsl@^2.2.0: dependencies: is-docker "^2.0.0" -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - isarray@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + isexe@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" @@ -4667,7 +4620,7 @@ jest-resolve-dependencies@^29.5.0: jest-regex-util "^29.4.3" jest-snapshot "^29.5.0" -jest-resolve@*, jest-resolve@^29.5.0: +jest-resolve@^29.5.0: version "29.5.0" resolved "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.5.0.tgz" integrity sha512-1TzxJ37FQq7J10jPtQjcc+MkCkE3GBpBecsSUWJ0qZNJpmg6m0D9/7II03yJulm3H/fvVjgqLh/k2eYg+ui52w== @@ -4823,7 +4776,7 @@ jest-worker@^29.5.0: merge-stream "^2.0.0" supports-color "^8.0.0" -jest@^29.0.0, jest@^29.5.0: +jest@^29.5.0: version "29.5.0" resolved "https://registry.npmjs.org/jest/-/jest-29.5.0.tgz" integrity sha512-juMg3he2uru1QoXX078zTa7pO85QyB9xajZc6bU+d9yEGwrKX6+vGmJQ3UdVZsvTEUARIdObzH68QItim6OSSQ== @@ -4843,6 +4796,13 @@ js-tokens@^4.0.0: resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== +js-yaml@4.1.0, js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + js-yaml@^3.13.1: version "3.14.1" resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" @@ -4851,13 +4811,6 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" -js-yaml@^4.1.0, js-yaml@4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" @@ -5069,7 +5022,7 @@ make-dir@^3.0.0, make-dir@^3.0.2: dependencies: semver "^6.0.0" -make-error@^1.1.1, make-error@^1.3.6, make-error@1.x: +make-error@1.x, make-error@^1.1.1, make-error@^1.3.6: version "1.3.6" resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -5114,7 +5067,7 @@ memfs@^3.4.3: dependencies: fs-monkey "^1.0.3" -merge-descriptors@~1.0.0, merge-descriptors@1.0.1: +merge-descriptors@1.0.1, merge-descriptors@~1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== @@ -5142,7 +5095,7 @@ micromatch@^4.0.0, micromatch@^4.0.2, micromatch@^4.0.4: braces "^3.0.2" picomatch "^2.3.1" -"mime-db@>= 1.43.0 < 2", mime-db@1.52.0: +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": version "1.52.0" resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== @@ -5169,6 +5122,13 @@ minimalistic-assert@^1.0.0: resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== +minimatch@5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz" + integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== + dependencies: + brace-expansion "^2.0.1" + minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" @@ -5183,13 +5143,6 @@ minimatch@^5.0.1: dependencies: brace-expansion "^2.0.1" -minimatch@5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz" - integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== - dependencies: - brace-expansion "^2.0.1" - minimist@^1.2.6: version "1.2.8" resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" @@ -5569,6 +5522,11 @@ path-parse@^1.0.7: resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + path-to-regexp@^1.7.0: version "1.8.0" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" @@ -5576,11 +5534,6 @@ path-to-regexp@^1.7.0: dependencies: isarray "0.0.1" -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - path-type@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" @@ -5784,7 +5737,7 @@ prosemirror-schema-list@^1.2.2: prosemirror-state "^1.0.0" prosemirror-transform "^1.0.0" -prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.1, prosemirror-state@^1.4.2: +prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.1: version "1.4.2" resolved "https://registry.npmjs.org/prosemirror-state/-/prosemirror-state-1.4.2.tgz" integrity sha512-puuzLD2mz/oTdfgd8msFbe0A42j5eNudKAAPDB0+QJRw8cO1ygjLmhLrg9RvDpf87Dkd6D4t93qdef00KKNacQ== @@ -5821,7 +5774,7 @@ prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transfor dependencies: prosemirror-model "^1.0.0" -prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.28.2, prosemirror-view@^1.30.2, prosemirror-view@^1.31.0: +prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.28.2, prosemirror-view@^1.31.0: version "1.31.1" resolved "https://registry.npmjs.org/prosemirror-view/-/prosemirror-view-1.31.1.tgz" integrity sha512-9NKJdXnGV4+1qFRi16XFZxpnx6zNok9MEj/HElkqUJ1HtOyKOICffKxqoXUUCAdHrrP+yMDvdXc6wT7GGWBL3A== @@ -6066,7 +6019,7 @@ reusify@^1.0.4: resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rimraf@^2.6.3: +rimraf@2, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== @@ -6080,13 +6033,6 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" -rimraf@2: - version "2.7.1" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - rope-sequence@^1.3.0: version "1.3.3" resolved "https://registry.npmjs.org/rope-sequence/-/rope-sequence-1.3.3.tgz" @@ -6099,7 +6045,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -safe-buffer@^5.1.0, safe-buffer@>=5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1, safe-buffer@5.1.2: +safe-buffer@5.1.2, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== @@ -6145,28 +6091,18 @@ selfsigned@^2.1.1: dependencies: node-forge "^1" -semver@^6.0.0: - version "6.3.1" - resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^6.3.0: - version "6.3.1" - resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^6.3.1: - version "6.3.1" - resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@7.x: +semver@7.x, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: version "7.5.4" resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" +semver@^6.0.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + send@0.18.0: version "0.18.0" resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" @@ -6186,7 +6122,7 @@ send@0.18.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-javascript@^6.0.0, serialize-javascript@6.0.0: +serialize-javascript@6.0.0, serialize-javascript@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz" integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== @@ -6317,14 +6253,6 @@ sockjs@^0.3.24: uuid "^8.3.2" websocket-driver "^0.7.4" -source-map-support@~0.5.20: - version "0.5.21" - resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - source-map-support@0.5.13: version "0.5.13" resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz" @@ -6333,6 +6261,14 @@ source-map-support@0.5.13: buffer-from "^1.0.0" source-map "^0.6.0" +source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0: version "0.6.1" resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" @@ -6378,22 +6314,15 @@ stack-utils@^2.0.3: dependencies: escape-string-regexp "^2.0.0" -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - statuses@2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== -string_decoder@^1.1.1, string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== string-argv@^0.3.2: version "0.3.2" @@ -6417,6 +6346,13 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string_decoder@^1.1.1, string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" @@ -6434,11 +6370,18 @@ strip-final-newline@^2.0.0: resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1, strip-json-comments@3.1.1: +strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== +supports-color@8.1.1, supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-color@^5.3.0: version "5.5.0" resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" @@ -6453,20 +6396,6 @@ supports-color@^7.1.0, supports-color@^7.2.0: dependencies: has-flag "^4.0.0" -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-color@8.1.1: - version "8.1.1" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" @@ -6583,7 +6512,7 @@ ts-loader@^9.4.2: micromatch "^4.0.0" semver "^7.3.4" -ts-node@^10.9.1, ts-node@>=9.0.0: +ts-node@^10.9.1: version "10.9.1" resolved "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz" integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== @@ -6626,7 +6555,7 @@ type-check@^0.4.0, type-check@~0.4.0: dependencies: prelude-ls "^1.2.1" -type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8, type-detect@4.0.8: +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8: version "4.0.8" resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== @@ -6654,7 +6583,7 @@ type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -typescript@*, typescript@^4.9.5, typescript@>=2.7, "typescript@>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta", "typescript@>=4.3 <6": +typescript@^4.9.5: version "4.9.5" resolved "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== @@ -6782,7 +6711,7 @@ unist-util-visit@^5.0.0: unist-util-is "^6.0.0" unist-util-visit-parents "^6.0.0" -unpipe@~1.0.0, unpipe@1.0.0: +unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== @@ -6952,7 +6881,7 @@ web-streams-polyfill@^3.0.3: resolved "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz" integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== -webpack-cli@^5.0.1, webpack-cli@5.x.x: +webpack-cli@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.0.1.tgz" integrity sha512-S3KVAyfwUqr0Mo/ur3NzIp6jnerNpo7GUO6so51mxLi1spqsA17YcMXy0WOIJtBSnj748lthxC6XLbNKh/ZC+A== @@ -7031,7 +6960,7 @@ webpack-sources@^3.2.3: resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== -"webpack@^4.0.0 || ^5.0.0", "webpack@^4.37.0 || ^5.0.0", webpack@^5.0.0, webpack@^5.1.0, webpack@^5.20.0, webpack@^5.76.3, "webpack@>=4.0.0 <6.0.0", webpack@>=5, webpack@5.x.x: +webpack@^5.76.3: version "5.80.0" resolved "https://registry.npmjs.org/webpack/-/webpack-5.80.0.tgz" integrity sha512-OIMiq37XK1rWO8mH9ssfFKZsXg4n6klTEDL7S8/HqbAOBBaiy8ABvXvz0dDCXeEF9gqwxSvVk611zFPjS8hJxA== @@ -7061,7 +6990,7 @@ webpack-sources@^3.2.3: watchpack "^2.4.0" webpack-sources "^3.2.3" -websocket-driver@^0.7.4, websocket-driver@>=0.5.1: +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: version "0.7.4" resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== @@ -7144,17 +7073,12 @@ yaml@^2.3.2: resolved "https://registry.npmjs.org/yaml/-/yaml-2.3.2.tgz" integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== -yargs-parser@^20.2.2, yargs-parser@20.2.4: +yargs-parser@20.2.4, yargs-parser@^20.2.2: version "20.2.4" resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz" integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== -yargs-parser@^21.0.1: - version "21.1.1" - resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz" - integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== - -yargs-parser@^21.1.1: +yargs-parser@^21.0.1, yargs-parser@^21.1.1: version "21.1.1" resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== @@ -7169,19 +7093,6 @@ yargs-unparser@2.0.0: flat "^5.0.2" is-plain-obj "^2.1.0" -yargs@^17.3.1: - version "17.7.1" - resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz" - integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - yargs@16.2.0: version "16.2.0" resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" @@ -7195,6 +7106,19 @@ yargs@16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.3.1: + version "17.7.1" + resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz" + integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yn@3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz"