Sync up site-packages to devchat[main](3542049e)
This commit is contained in:
parent
e60bfbc312
commit
314fce5cdd
@ -18,7 +18,7 @@ devchat/__pycache__/message.cpython-38.pyc,,
|
|||||||
devchat/__pycache__/prompt.cpython-38.pyc,,
|
devchat/__pycache__/prompt.cpython-38.pyc,,
|
||||||
devchat/__pycache__/store.cpython-38.pyc,,
|
devchat/__pycache__/store.cpython-38.pyc,,
|
||||||
devchat/__pycache__/utils.cpython-38.pyc,,
|
devchat/__pycache__/utils.cpython-38.pyc,,
|
||||||
devchat/_cli/__init__.py,sha256=QGJsPz9XTBrKHInAxe9ucokiHZQ-XLeXUriS6fNIWis,345
|
devchat/_cli/__init__.py,sha256=mSJu3LdXlzc1Iyk9jU7VC9Hk59NPRHJjkJ11bNyR97I,349
|
||||||
devchat/_cli/__pycache__/__init__.cpython-38.pyc,,
|
devchat/_cli/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/_cli/__pycache__/errors.cpython-38.pyc,,
|
devchat/_cli/__pycache__/errors.cpython-38.pyc,,
|
||||||
devchat/_cli/__pycache__/log.cpython-38.pyc,,
|
devchat/_cli/__pycache__/log.cpython-38.pyc,,
|
||||||
@ -29,21 +29,21 @@ devchat/_cli/__pycache__/router.cpython-38.pyc,,
|
|||||||
devchat/_cli/__pycache__/run.cpython-38.pyc,,
|
devchat/_cli/__pycache__/run.cpython-38.pyc,,
|
||||||
devchat/_cli/__pycache__/topic.cpython-38.pyc,,
|
devchat/_cli/__pycache__/topic.cpython-38.pyc,,
|
||||||
devchat/_cli/__pycache__/utils.cpython-38.pyc,,
|
devchat/_cli/__pycache__/utils.cpython-38.pyc,,
|
||||||
devchat/_cli/errors.py,sha256=akl1b5EzZhAlhQyfcFNOSTaLmJ3zG5PSTVrRGPaJ1bg,58
|
devchat/_cli/errors.py,sha256=7RiLmM6Hp8UINRaMQEQDhvfEWQjvqMYQFfWk5tiU1MQ,56
|
||||||
devchat/_cli/log.py,sha256=QU7jXZC3FFEFH-_9KaHtMESiY8qGX4ocG8MKDpg0cSY,3278
|
devchat/_cli/log.py,sha256=DkBDNRsKNdLesTVyNyPe6a4nni34I7-qqhMd_tiar7c,3260
|
||||||
devchat/_cli/main.py,sha256=1IFbGSs6PbmoRmL3r0-SwlsGaNyl0mQ1tpUgYRW8GuQ,582
|
devchat/_cli/main.py,sha256=kNJgS628VIrd-o-svjxvpy27kbm0EJ3ESgk_0W-o0Gw,486
|
||||||
devchat/_cli/prompt.py,sha256=PniymJQqNecYz02mrduYHXO5LeSjDxdraWyI_7dEtrE,2916
|
devchat/_cli/prompt.py,sha256=gnSL0_gnrCMGiauwGwVvYyVuWiih1RpwPrSbAcuoNXQ,2889
|
||||||
devchat/_cli/route.py,sha256=9YVnry0OpW4B_RUiRtXqpblOZfQoFmSH6Xs7hflzmRg,2490
|
devchat/_cli/route.py,sha256=nX5xae6SOoae2R8losu2wqAFmpPcuwdMX5C7H7B_VYE,2386
|
||||||
devchat/_cli/router.py,sha256=XKzaSxQ5dVicVnoA-jqrecB-jXYDvIX82mVy5FlAWaU,6382
|
devchat/_cli/router.py,sha256=N01EwQ30LOPqYngVqUMhsUv4AUTjIthyn7pi-M6I1eU,6491
|
||||||
devchat/_cli/run.py,sha256=QAidaTAvVLPyqylROiGgd5y5370K4ylJvUORjZoziVo,6657
|
devchat/_cli/run.py,sha256=blxGEws2TxnFvcbJgigNGwJoqgb-6T9-Vrvt0zkxu-w,6381
|
||||||
devchat/_cli/topic.py,sha256=Nxiir_0ArOQt377ywM8vxmEfGIGUfIRCiHSVD2z-nP8,1167
|
devchat/_cli/topic.py,sha256=WgRgCVwn3aX_gZm9JlLWFT0LKuiCSTnvMqzkiQH8B0s,1119
|
||||||
devchat/_cli/utils.py,sha256=ooepOTF-0mLN6sg5wcnnwE52WkWWayZBo7NepEFxveI,5460
|
devchat/_cli/utils.py,sha256=NqquK-NHWuxiwUdSP_MHUjc7EqbYtTKMSQqQcSRobbY,5362
|
||||||
devchat/anthropic/__init__.py,sha256=xaFR1uXxn0sVHBhCJdJKuWKVVgPnSLw3mlaCFFivD_8,97
|
devchat/anthropic/__init__.py,sha256=FDPLNVL3UmINQbdCbVdQhJ2d9Ti6AkpZ3L2qm9rSwaU,91
|
||||||
devchat/anthropic/__pycache__/__init__.cpython-38.pyc,,
|
devchat/anthropic/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/anthropic/__pycache__/anthropic_chat.cpython-38.pyc,,
|
devchat/anthropic/__pycache__/anthropic_chat.cpython-38.pyc,,
|
||||||
devchat/anthropic/anthropic_chat.py,sha256=OujoXOQywPQf4gjLhdZBYTwKoRDs8hujktss3hN-BNk,423
|
devchat/anthropic/anthropic_chat.py,sha256=qQx-5kDn5uSe8LAM1IrY5ybQPS54pUSA1g3kv2Je5Sg,424
|
||||||
devchat/assistant.py,sha256=LCBH5XaTiUpd3xGCdvkKD9Uyn8y7tf5aoArQTAo1CN4,6642
|
devchat/assistant.py,sha256=ucgq_VwrG2impluYhTaJdjxxuWKzv_YBTGGERtHHnk4,6690
|
||||||
devchat/chat.py,sha256=TEO8OndmL4hpJ1D-QAFKO-JB_7w1kTeUC3VVwL9FSUQ,1676
|
devchat/chat.py,sha256=RM8AXyGN-MTQulaj0TBIT1NAEvL2y8eVnQGNX7xA_4Q,1677
|
||||||
devchat/chatmark/.gitignore,sha256=8wf0Azg8LJGtO3zamZ8sHM-ARFcedTCPK1disjofnhY,4
|
devchat/chatmark/.gitignore,sha256=8wf0Azg8LJGtO3zamZ8sHM-ARFcedTCPK1disjofnhY,4
|
||||||
devchat/chatmark/README.md,sha256=akXLntx1ebzWaIqwt0hQ_8eVd79t-CQZ5hOynM20JLk,157
|
devchat/chatmark/README.md,sha256=akXLntx1ebzWaIqwt0hQ_8eVd79t-CQZ5hOynM20JLk,157
|
||||||
devchat/chatmark/__init__.py,sha256=l1xRneWsKKAWs0R4VoynYytFahCRgyvR-tbrhKK3iiE,203
|
devchat/chatmark/__init__.py,sha256=l1xRneWsKKAWs0R4VoynYytFahCRgyvR-tbrhKK3iiE,203
|
||||||
@ -54,13 +54,13 @@ devchat/chatmark/__pycache__/step.cpython-38.pyc,,
|
|||||||
devchat/chatmark/__pycache__/widgets.cpython-38.pyc,,
|
devchat/chatmark/__pycache__/widgets.cpython-38.pyc,,
|
||||||
devchat/chatmark/chatmark_example/README.md,sha256=vtSGvEL1IOQPu56qP5s6ZazW-41iNkS_Ph0GBjjWATA,413
|
devchat/chatmark/chatmark_example/README.md,sha256=vtSGvEL1IOQPu56qP5s6ZazW-41iNkS_Ph0GBjjWATA,413
|
||||||
devchat/chatmark/chatmark_example/__pycache__/main.cpython-38.pyc,,
|
devchat/chatmark/chatmark_example/__pycache__/main.cpython-38.pyc,,
|
||||||
devchat/chatmark/chatmark_example/main.py,sha256=JfAC7opkVIUdzrOfyVwb04FlX7whVFNyeWrf-_ZWC0A,3600
|
devchat/chatmark/chatmark_example/main.py,sha256=VvWhcpCd7t3Itcc2e07sRsj2RcMF9hUifxeEfGPPUt0,3575
|
||||||
devchat/chatmark/form.py,sha256=bbPQhhyMDbrrs2bX8UmVKAZ6n4kcYJEppDD3700ksbM,2586
|
devchat/chatmark/form.py,sha256=8azb4U3M2OddKEU0R6bdfrnmNlIhfAFIloj_CSjPD5k,2538
|
||||||
devchat/chatmark/iobase.py,sha256=CjTHjDAxHkLHiNsrp4aaTjdT6mQB5Dy4B1UsJWVcKS8,913
|
devchat/chatmark/iobase.py,sha256=CjTHjDAxHkLHiNsrp4aaTjdT6mQB5Dy4B1UsJWVcKS8,913
|
||||||
devchat/chatmark/step.py,sha256=jATqxc1ZoeKlkEoO-0DMoyVzLYGNA58S8hL5NMn6W7A,574
|
devchat/chatmark/step.py,sha256=jATqxc1ZoeKlkEoO-0DMoyVzLYGNA58S8hL5NMn6W7A,574
|
||||||
devchat/chatmark/widgets.py,sha256=5FEghZ-BZPHyjPUIKZ3L6BFhNHawa2JdPX6OzBs7Yfs,10159
|
devchat/chatmark/widgets.py,sha256=5FEghZ-BZPHyjPUIKZ3L6BFhNHawa2JdPX6OzBs7Yfs,10159
|
||||||
devchat/config.py,sha256=JJxUfCH6fr40Ek9FawfX1jh6Z6GEzU_OD3GFXOXsjig,4474
|
devchat/config.py,sha256=0mrD0KCyq6kHEjgVT0xAbJxQbsety1ThTQyda4utkH0,4133
|
||||||
devchat/engine/__init__.py,sha256=KSlnUY42kg9EyTaW2lrhdANWl1ORkg-5vYoLZfv9e8Q,373
|
devchat/engine/__init__.py,sha256=3Iz-16ziWQ8BsclsWWjIE0zHr1ldfev5I_uiMBNOvwU,374
|
||||||
devchat/engine/__pycache__/__init__.cpython-38.pyc,,
|
devchat/engine/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/engine/__pycache__/command_parser.cpython-38.pyc,,
|
devchat/engine/__pycache__/command_parser.cpython-38.pyc,,
|
||||||
devchat/engine/__pycache__/command_runner.cpython-38.pyc,,
|
devchat/engine/__pycache__/command_runner.cpython-38.pyc,,
|
||||||
@ -68,24 +68,24 @@ devchat/engine/__pycache__/namespace.cpython-38.pyc,,
|
|||||||
devchat/engine/__pycache__/recursive_prompter.cpython-38.pyc,,
|
devchat/engine/__pycache__/recursive_prompter.cpython-38.pyc,,
|
||||||
devchat/engine/__pycache__/router.cpython-38.pyc,,
|
devchat/engine/__pycache__/router.cpython-38.pyc,,
|
||||||
devchat/engine/__pycache__/util.cpython-38.pyc,,
|
devchat/engine/__pycache__/util.cpython-38.pyc,,
|
||||||
devchat/engine/command_parser.py,sha256=jeJQe-_nEGhYF3mwVitkOefaFfiW9YwYCLJFooMXQLE,1694
|
devchat/engine/command_parser.py,sha256=irCjyyVxLcNfHUWXnkSWzp0LHNoBNuFGrfqBm7u92rU,1694
|
||||||
devchat/engine/command_runner.py,sha256=o5d8L_f_0qLSkkT-E5Wosnzn2XJQ_qqJbPm_znBzW0A,9855
|
devchat/engine/command_runner.py,sha256=sfZY3K-TDWMrB4u5PYU8t2hg8YWsciz_cI01susnAfE,9463
|
||||||
devchat/engine/namespace.py,sha256=MghROybwfVYhfKz8efeG38awQR4eXUThwuVL9J07MGc,5175
|
devchat/engine/namespace.py,sha256=AfKDg5LKVTSGr7_6MxclEjCG7eWrqjQPcm3W_PulC2Q,5158
|
||||||
devchat/engine/recursive_prompter.py,sha256=Fknj_uuLcPB60tsp7V8GM_1YUXN8XHbKU1MCi66JwyM,1558
|
devchat/engine/recursive_prompter.py,sha256=1SXHqqWSRjm9Dr21590KhoqHPjSYkDG1-wQt784_Ybw,1560
|
||||||
devchat/engine/router.py,sha256=1Rerwmp8POq9_G-nG_nMLiuGeWkTlh9KKv_nLKcneVk,1740
|
devchat/engine/router.py,sha256=cGEWkFlnjdaxwUKu442C9lD6seEpPbSYiYD5SoqH9Sg,1708
|
||||||
devchat/engine/util.py,sha256=7PFTBBLkUG5O2xVdH043p0am4rCBSEJoOBSGk__4vps,5943
|
devchat/engine/util.py,sha256=3q7at6Vm7lTWyC_tzQ5sNiPC2m6sUcqaW9-0kG3B5Po,5776
|
||||||
devchat/ide/__init__.py,sha256=55h2mDn2z6y49uJUjNymD0Jg8YaxlMt-YaJKI4S6QlU,138
|
devchat/ide/__init__.py,sha256=VaPxmh12S3oUdrZHcHHYOpQEAqOlWcPJRaOncKG5jnA,152
|
||||||
devchat/ide/__pycache__/__init__.cpython-38.pyc,,
|
devchat/ide/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/ide/__pycache__/idea_services.cpython-38.pyc,,
|
devchat/ide/__pycache__/idea_services.cpython-38.pyc,,
|
||||||
devchat/ide/__pycache__/rpc.cpython-38.pyc,,
|
devchat/ide/__pycache__/rpc.cpython-38.pyc,,
|
||||||
devchat/ide/__pycache__/service.cpython-38.pyc,,
|
devchat/ide/__pycache__/service.cpython-38.pyc,,
|
||||||
devchat/ide/__pycache__/types.cpython-38.pyc,,
|
devchat/ide/__pycache__/types.cpython-38.pyc,,
|
||||||
devchat/ide/__pycache__/vscode_services.cpython-38.pyc,,
|
devchat/ide/__pycache__/vscode_services.cpython-38.pyc,,
|
||||||
devchat/ide/idea_services.py,sha256=OB3xQVf4kCS_9Gn9-GsqLqFfS4l-QNPmmp6kgd3iuVY,394
|
devchat/ide/idea_services.py,sha256=KUwxmn2K9dYO7d5cmD7lTzEMGH-E4gTEKy9xwoQDAhM,393
|
||||||
devchat/ide/rpc.py,sha256=4Ahe-s46RE35CWUto9H_EdiYm6gjY2x3yzAmHZ7-g8g,2464
|
devchat/ide/rpc.py,sha256=ag3hBLBuM4ehFE6bvfHbrMR1nMrjkxt3sJ9alHyTL4k,2344
|
||||||
devchat/ide/service.py,sha256=-kRitl7rDuD98UlIFG9JVKVK0t4lys6r7-nlDY-BBQ4,4955
|
devchat/ide/service.py,sha256=19uwObr7PdFK1VTjYU8AIeeKKCy5JnspQjI0WEFJEnY,4791
|
||||||
devchat/ide/types.py,sha256=7GKd0zhK_oFhP-wQDXEbNzTHxO1J2rgQ2H8GtYZlOdE,1113
|
devchat/ide/types.py,sha256=HdSl_6Eu_8miq1GWzjir0MIvLFFIwtn6Yt5QF15fLWQ,1091
|
||||||
devchat/ide/vscode_services.py,sha256=L_F5PyGsPuR4Femt1xPEh-znLDB5sTZpAyNQc6tijv4,5264
|
devchat/ide/vscode_services.py,sha256=nd6SceGju21dRCgb4XVWeK6tsBkGqDmtMggjbI9NtQM,5200
|
||||||
devchat/llm/__init__.py,sha256=IXhLbfNO-TV2ZIJwZOhjsL2Batb8WGq-gayrxp-z8a0,409
|
devchat/llm/__init__.py,sha256=IXhLbfNO-TV2ZIJwZOhjsL2Batb8WGq-gayrxp-z8a0,409
|
||||||
devchat/llm/__pycache__/__init__.cpython-38.pyc,,
|
devchat/llm/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/llm/__pycache__/chat.cpython-38.pyc,,
|
devchat/llm/__pycache__/chat.cpython-38.pyc,,
|
||||||
@ -93,32 +93,32 @@ devchat/llm/__pycache__/openai.cpython-38.pyc,,
|
|||||||
devchat/llm/__pycache__/pipeline.cpython-38.pyc,,
|
devchat/llm/__pycache__/pipeline.cpython-38.pyc,,
|
||||||
devchat/llm/__pycache__/text_confirm.cpython-38.pyc,,
|
devchat/llm/__pycache__/text_confirm.cpython-38.pyc,,
|
||||||
devchat/llm/__pycache__/tools_call.cpython-38.pyc,,
|
devchat/llm/__pycache__/tools_call.cpython-38.pyc,,
|
||||||
devchat/llm/chat.py,sha256=Hs_3qZRDIjekLQwPTj2RzCmidJbJ5b1pazsDmQJe27k,3445
|
devchat/llm/chat.py,sha256=XWXUIpbWCMiuMCtBchrQpBpuyLwjga5KcCFzDoapbcc,3377
|
||||||
devchat/llm/openai.py,sha256=mG3V97sltmLUIVqvV33eO5aYjre3YHeQLt6GgjIIhQk,6237
|
devchat/llm/openai.py,sha256=8hK2OByDRq8sPgROf-UvVPA8Oz0lSDfMaAFSKh0D644,6208
|
||||||
devchat/llm/pipeline.py,sha256=hVtwEfKVZ1S90Qb9SLe3UBRJZbtLPptgCEk8JHaEpGI,2002
|
devchat/llm/pipeline.py,sha256=D214HASOUA7DsUm63_QDVFTYsHShPrrBwTbd0hM3tRI,1920
|
||||||
devchat/llm/text_confirm.py,sha256=sdt7AUFDcsOZ0fLfS0vtjdS2_8xhkTF6aF8Sn05OlI0,1462
|
devchat/llm/text_confirm.py,sha256=sdt7AUFDcsOZ0fLfS0vtjdS2_8xhkTF6aF8Sn05OlI0,1462
|
||||||
devchat/llm/tools_call.py,sha256=Ks156bm_kkp6Sb3PP7Ci1cR4Gqf1pkye4oG5chd_rSg,8072
|
devchat/llm/tools_call.py,sha256=OBObtFAzuqEJPq7Ro9hR4oirrcMtxGchlMQl8vL1CBc,8038
|
||||||
devchat/memory/__init__.py,sha256=BD2vKfSmWsQrOJSKKXKBwcVcGQcZamglWZDstosn4cw,134
|
devchat/memory/__init__.py,sha256=aPR0Dt8dcf4oWXu2HME2fFSpDJDeoBayPWMFOpO8v5k,133
|
||||||
devchat/memory/__pycache__/__init__.cpython-38.pyc,,
|
devchat/memory/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/memory/__pycache__/base.cpython-38.pyc,,
|
devchat/memory/__pycache__/base.cpython-38.pyc,,
|
||||||
devchat/memory/__pycache__/fixsize_memory.cpython-38.pyc,,
|
devchat/memory/__pycache__/fixsize_memory.cpython-38.pyc,,
|
||||||
devchat/memory/base.py,sha256=mabEkWtez31gMtRamQkEMUGX6yEzb3P0uHFEgK1IBhI,598
|
devchat/memory/base.py,sha256=mabEkWtez31gMtRamQkEMUGX6yEzb3P0uHFEgK1IBhI,598
|
||||||
devchat/memory/fixsize_memory.py,sha256=iPBeylq8UayOepds6qrvVQf46pd8vMcrPO6brx_g-Po,1618
|
devchat/memory/fixsize_memory.py,sha256=iPBeylq8UayOepds6qrvVQf46pd8vMcrPO6brx_g-Po,1618
|
||||||
devchat/message.py,sha256=OdFQ8rv4ZrX-wOrLb4KRdqfvyAvCbaAMhDBXDHWuwRU,758
|
devchat/message.py,sha256=oJr_KXlAfzGEFHB-SAN4iy4KMr7CdSB9VKUIOhdULCY,759
|
||||||
devchat/openai/__init__.py,sha256=9DcELA9I86vSQIySgABiZSb_QgE4qaT5s3n-ODASqiA,283
|
devchat/openai/__init__.py,sha256=3da58vM6OExDCzC6VIvIWz80FwTDBT5I-UbALQC7R2Q,284
|
||||||
devchat/openai/__pycache__/__init__.cpython-38.pyc,,
|
devchat/openai/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/openai/__pycache__/http_openai.cpython-38.pyc,,
|
devchat/openai/__pycache__/http_openai.cpython-38.pyc,,
|
||||||
devchat/openai/__pycache__/openai_chat.cpython-38.pyc,,
|
devchat/openai/__pycache__/openai_chat.cpython-38.pyc,,
|
||||||
devchat/openai/__pycache__/openai_message.cpython-38.pyc,,
|
devchat/openai/__pycache__/openai_message.cpython-38.pyc,,
|
||||||
devchat/openai/__pycache__/openai_prompt.cpython-38.pyc,,
|
devchat/openai/__pycache__/openai_prompt.cpython-38.pyc,,
|
||||||
devchat/openai/http_openai.py,sha256=YH4tyNLq6ODmz9DCKcSOuTu6L4deV0dWrEEMGlxL1Rw,2653
|
devchat/openai/http_openai.py,sha256=5hEc1zRxhguia6vE3gpt1VrCJ6eyJuDe1xNIr5DRr8o,2695
|
||||||
devchat/openai/openai_chat.py,sha256=7SDYyKtjlwGIMDlv2ovfDEDNWkqsPu_GnAQ9_84XbqU,5185
|
devchat/openai/openai_chat.py,sha256=GbkLzNIgWAHFCVbVsvz5BOUlNu58MayR1ax6lNYyqpc,5095
|
||||||
devchat/openai/openai_message.py,sha256=xTmglsj5Iyvcytn3pUYhwkuiyJSx932N88fS4OCJ7Qk,3293
|
devchat/openai/openai_message.py,sha256=HHBSLVxgEFpoiX47Ao4jtQj31X0LQ4CKgo9sLHPpxxY,3310
|
||||||
devchat/openai/openai_prompt.py,sha256=ENh3XHOZlWnONwfw_7r6So7KJg_ihXx-qqpO7DOXdhw,10766
|
devchat/openai/openai_prompt.py,sha256=vg4hj6MChn4Dhcxq3wLLZ6jAL2b-BpeZaa9R2KQN28k,10663
|
||||||
devchat/prompt.py,sha256=CRFvl6x5Fs2CmaAghY4Bo05LKr6DeuYJe5Ut6w-lh_Y,9411
|
devchat/prompt.py,sha256=ZMdyRE4WIidg46_S3-VwpZ6D52bVYpP5ANhzvSJyY8c,9492
|
||||||
devchat/store.py,sha256=wswzzPLlD7ievAsqsPiZo4NZek2lEZkPRJnu3QiEveE,12056
|
devchat/store.py,sha256=-KHp0N9F0t2VjAVXcANJa6tusNZQnd8d5vY4tuPzIFA,11954
|
||||||
devchat/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
|
devchat/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
|
||||||
devchat/utils.py,sha256=5EY6IyasRKc1LQ2gFE0or7GxhJqtetGUVb5SAeqes_E,7802
|
devchat/utils.py,sha256=GdI7DovvEzeG6oKnn8R-siGeW0myfQ76X1tDAhEKELQ,7650
|
||||||
devchat/workflow/README.md,sha256=a48aJE5X8vjgasI9lhJljlUjwLvn7vO7tlG0PFM67m0,201
|
devchat/workflow/README.md,sha256=a48aJE5X8vjgasI9lhJljlUjwLvn7vO7tlG0PFM67m0,201
|
||||||
devchat/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
devchat/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
devchat/workflow/__pycache__/__init__.cpython-38.pyc,,
|
devchat/workflow/__pycache__/__init__.cpython-38.pyc,,
|
||||||
@ -131,23 +131,23 @@ devchat/workflow/__pycache__/schema.cpython-38.pyc,,
|
|||||||
devchat/workflow/__pycache__/step.cpython-38.pyc,,
|
devchat/workflow/__pycache__/step.cpython-38.pyc,,
|
||||||
devchat/workflow/__pycache__/user_setting.cpython-38.pyc,,
|
devchat/workflow/__pycache__/user_setting.cpython-38.pyc,,
|
||||||
devchat/workflow/__pycache__/workflow.cpython-38.pyc,,
|
devchat/workflow/__pycache__/workflow.cpython-38.pyc,,
|
||||||
devchat/workflow/cli.py,sha256=RQyzg1mQm-sVNyGQM-4tgAUbruwTpMLC6nLy0GB53fw,461
|
devchat/workflow/cli.py,sha256=V2kef1wO-BpWuAUPoqCXS2t9twdArXinNCy0QwE9vyM,462
|
||||||
devchat/workflow/command/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
devchat/workflow/command/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
devchat/workflow/command/__pycache__/__init__.cpython-38.pyc,,
|
devchat/workflow/command/__pycache__/__init__.cpython-38.pyc,,
|
||||||
devchat/workflow/command/__pycache__/config.cpython-38.pyc,,
|
devchat/workflow/command/__pycache__/config.cpython-38.pyc,,
|
||||||
devchat/workflow/command/__pycache__/env.cpython-38.pyc,,
|
devchat/workflow/command/__pycache__/env.cpython-38.pyc,,
|
||||||
devchat/workflow/command/__pycache__/list.cpython-38.pyc,,
|
devchat/workflow/command/__pycache__/list.cpython-38.pyc,,
|
||||||
devchat/workflow/command/__pycache__/update.cpython-38.pyc,,
|
devchat/workflow/command/__pycache__/update.cpython-38.pyc,,
|
||||||
devchat/workflow/command/config.py,sha256=4O22ebz-cx_CiKpEM7NnlbW02rb_OZk3vV_JKHMUPmk,713
|
devchat/workflow/command/config.py,sha256=BH2ufIwMBbwWv-P3v8n_kKQQb55tXKGRq4eoFHxyc2s,712
|
||||||
devchat/workflow/command/env.py,sha256=lTcWns9UnEKIRtr1wh9f1gVqBMPN5Tjr6f-r2d3LOKI,2367
|
devchat/workflow/command/env.py,sha256=wXZc497GwSjWk8T37krTkxqyjUhWSAh0c0RCwmLzRw8,2301
|
||||||
devchat/workflow/command/list.py,sha256=sgX9DQ68VvoYEMndBoqa7mEep4aaSG--mWOS6iapWJ0,3384
|
devchat/workflow/command/list.py,sha256=wCsfRZYLgM-CC3JWh7my42TiFgk2xdW_n4qNq2EPA3w,3378
|
||||||
devchat/workflow/command/update.py,sha256=CnJE3P4RIAHrDv3_QPi47u0kGaZvP08el7C-lzLoxOU,9820
|
devchat/workflow/command/update.py,sha256=bsgtL9mt_jCBW-LrbjGpxSQqsCJpogPIa2E5fPMK3Es,9730
|
||||||
devchat/workflow/command/update_flowchart.md,sha256=TrtZtK7K04yihUlIOvI10UCBTmZIumaRjRJJxkn85ow,1737
|
devchat/workflow/command/update_flowchart.md,sha256=TrtZtK7K04yihUlIOvI10UCBTmZIumaRjRJJxkn85ow,1737
|
||||||
devchat/workflow/env_manager.py,sha256=6A_bZZ1JkpPERQ_QFII8SBMcQDmQylBH3r2vv8wNtZw,5568
|
devchat/workflow/env_manager.py,sha256=jxTrHP8Ki6N16tSZClKTWlVcM2KoO93r7ZWr4Is6LjQ,5434
|
||||||
devchat/workflow/envs.py,sha256=7vf71OG5JOlx8IGomjwDfOXPoRlJc11MmEOd4r4O5OY,297
|
devchat/workflow/envs.py,sha256=-lVTLjWRMrb8RGVVlHgWKCiGZaojNdmycjHFT0ZKjEo,298
|
||||||
devchat/workflow/namespace.py,sha256=WBDL_S-Lt2xW6tvkhY9cIb6G_pwcJ3v5-tYhsPvPusI,1611
|
devchat/workflow/namespace.py,sha256=zSYrz2tTwxGNfewqyWFNKN7vOeL2xxtmLRq0Pi1dx0Q,1613
|
||||||
devchat/workflow/path.py,sha256=ldTOXJmff7vP3YjFVo_0Awu2fTxZmAQOXlhD-_v7EkM,1110
|
devchat/workflow/path.py,sha256=ldTOXJmff7vP3YjFVo_0Awu2fTxZmAQOXlhD-_v7EkM,1110
|
||||||
devchat/workflow/schema.py,sha256=XIDZ6uqejeXVSGNJBc0uqDMs2YGvQF8RgCxfP_R6NYQ,1746
|
devchat/workflow/schema.py,sha256=FFqF0qJzr5gP9X3KefcA630kBKonC-Mn0TeyJGuXeVE,1674
|
||||||
devchat/workflow/step.py,sha256=0q7-sH_n3Gn1cqmq-8TJqr_lUXWd0JIwWOOyKixwYiw,5805
|
devchat/workflow/step.py,sha256=zs1k0LtjtBa4pD5hZCzw_ubDXv7osq9WAicEWtLALF4,5725
|
||||||
devchat/workflow/user_setting.py,sha256=oPKLobDH36_kcQT5RAp5Oa0ZKw4cjXPzLn2zLOvdzvI,621
|
devchat/workflow/user_setting.py,sha256=4OiV5s3RuO84fiJPdR_QY4bOJYP6g_68hkrdVQUVahQ,624
|
||||||
devchat/workflow/workflow.py,sha256=7Pk6RORqmrE4gvF70ESgw5veUHNW9axAT0TKVp4MRg0,7677
|
devchat/workflow/workflow.py,sha256=yzhwdXa81-xqt_oSa29hS3lhVWXMFlYdmL1Iiz-LQFA,7624
|
||||||
|
@ -1,17 +1,18 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from .log import log
|
from .log import log
|
||||||
from .prompt import prompt
|
from .prompt import prompt
|
||||||
|
from .route import route
|
||||||
from .run import run
|
from .run import run
|
||||||
from .topic import topic
|
from .topic import topic
|
||||||
from .route import route
|
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
os.environ['TIKTOKEN_CACHE_DIR'] = os.path.join(script_dir, '..', 'tiktoken_cache')
|
os.environ["TIKTOKEN_CACHE_DIR"] = os.path.join(script_dir, "..", "tiktoken_cache")
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'log',
|
"log",
|
||||||
'prompt',
|
"prompt",
|
||||||
'run',
|
"run",
|
||||||
'topic',
|
"topic",
|
||||||
'route',
|
"route",
|
||||||
]
|
]
|
||||||
|
@ -1,4 +1,2 @@
|
|||||||
|
|
||||||
|
|
||||||
class MissContentInPromptException(Exception):
|
class MissContentInPromptException(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Optional, List, Dict
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class PromptData:
|
class PromptData:
|
||||||
model: str = "none"
|
model: str = "none"
|
||||||
@ -19,28 +18,34 @@ class PromptData:
|
|||||||
response_tokens: int = 0
|
response_tokens: int = 0
|
||||||
|
|
||||||
|
|
||||||
@click.command(help='Process logs')
|
@click.command(help="Process logs")
|
||||||
@click.option('--skip', default=0, help='Skip number prompts before showing the prompt history.')
|
@click.option("--skip", default=0, help="Skip number prompts before showing the prompt history.")
|
||||||
@click.option('-n', '--max-count', default=1, help='Limit the number of commits to output.')
|
@click.option("-n", "--max-count", default=1, help="Limit the number of commits to output.")
|
||||||
@click.option('-t', '--topic', 'topic_root', default=None,
|
@click.option(
|
||||||
help='Hash of the root prompt of the topic to select prompts from.')
|
"-t",
|
||||||
@click.option('--insert', default=None, help='JSON string of the prompt to insert into the log.')
|
"--topic",
|
||||||
@click.option('--delete', default=None, help='Hash of the leaf prompt to delete from the log.')
|
"topic_root",
|
||||||
|
default=None,
|
||||||
|
help="Hash of the root prompt of the topic to select prompts from.",
|
||||||
|
)
|
||||||
|
@click.option("--insert", default=None, help="JSON string of the prompt to insert into the log.")
|
||||||
|
@click.option("--delete", default=None, help="Hash of the leaf prompt to delete from the log.")
|
||||||
def log(skip, max_count, topic_root, insert, delete):
|
def log(skip, max_count, topic_root, insert, delete):
|
||||||
"""
|
"""
|
||||||
Manage the prompt history.
|
Manage the prompt history.
|
||||||
"""
|
"""
|
||||||
|
from devchat._cli.utils import get_model_config, handle_errors, init_dir
|
||||||
from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig, OpenAIPrompt
|
from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig, OpenAIPrompt
|
||||||
|
|
||||||
from devchat.store import Store
|
from devchat.store import Store
|
||||||
from devchat._cli.utils import handle_errors, init_dir, get_model_config
|
|
||||||
from devchat.utils import get_logger, get_user_info
|
from devchat.utils import get_logger, get_user_info
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
if (insert or delete) and (skip != 0 or max_count != 1 or topic_root is not None):
|
if (insert or delete) and (skip != 0 or max_count != 1 or topic_root is not None):
|
||||||
print("Error: The --insert or --delete option cannot be used with other options.",
|
print(
|
||||||
file=sys.stderr)
|
"Error: The --insert or --delete option cannot be used with other options.",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
repo_chat_dir, user_chat_dir = init_dir()
|
repo_chat_dir, user_chat_dir = init_dir()
|
||||||
|
@ -1,15 +1,11 @@
|
|||||||
"""
|
"""
|
||||||
This module contains the main function for the DevChat CLI.
|
This module contains the main function for the DevChat CLI.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
from devchat._cli import log, prompt, route, run, topic
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
from devchat._cli import log
|
|
||||||
from devchat._cli import prompt
|
|
||||||
from devchat._cli import run
|
|
||||||
from devchat._cli import topic
|
|
||||||
from devchat._cli import route
|
|
||||||
|
|
||||||
from devchat.workflow.cli import workflow
|
from devchat.workflow.cli import workflow
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -1,33 +1,61 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
|
||||||
@click.command(help='Interact with the large language model (LLM).')
|
@click.command(help="Interact with the large language model (LLM).")
|
||||||
@click.argument('content', required=False)
|
@click.argument("content", required=False)
|
||||||
@click.option('-p', '--parent', help='Input the parent prompt hash to continue the conversation.')
|
@click.option("-p", "--parent", help="Input the parent prompt hash to continue the conversation.")
|
||||||
@click.option('-r', '--reference', multiple=True,
|
@click.option(
|
||||||
help='Input one or more specific previous prompts to include in the current prompt.')
|
"-r",
|
||||||
@click.option('-i', '--instruct', multiple=True,
|
"--reference",
|
||||||
help='Add one or more files to the prompt as instructions.')
|
multiple=True,
|
||||||
@click.option('-c', '--context', multiple=True,
|
help="Input one or more specific previous prompts to include in the current prompt.",
|
||||||
help='Add one or more files to the prompt as a context.')
|
)
|
||||||
@click.option('-m', '--model', help='Specify the model to use for the prompt.')
|
@click.option(
|
||||||
@click.option('--config', 'config_str',
|
"-i", "--instruct", multiple=True, help="Add one or more files to the prompt as instructions."
|
||||||
help='Specify a JSON string to overwrite the default configuration for this prompt.')
|
)
|
||||||
@click.option('-f', '--functions', type=click.Path(exists=True),
|
@click.option(
|
||||||
help='Path to a JSON file with functions for the prompt.')
|
"-c", "--context", multiple=True, help="Add one or more files to the prompt as a context."
|
||||||
@click.option('-n', '--function-name',
|
)
|
||||||
help='Specify the function name when the content is the output of a function.')
|
@click.option("-m", "--model", help="Specify the model to use for the prompt.")
|
||||||
@click.option('-ns', '--not-store', is_flag=True, default=False, required=False,
|
@click.option(
|
||||||
help='Do not save the conversation to the store.')
|
"--config",
|
||||||
def prompt(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
"config_str",
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
help="Specify a JSON string to overwrite the default configuration for this prompt.",
|
||||||
model: Optional[str], config_str: Optional[str] = None,
|
)
|
||||||
functions: Optional[str] = None, function_name: Optional[str] = None,
|
@click.option(
|
||||||
not_store: Optional[bool] = False):
|
"-f",
|
||||||
|
"--functions",
|
||||||
|
type=click.Path(exists=True),
|
||||||
|
help="Path to a JSON file with functions for the prompt.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-n",
|
||||||
|
"--function-name",
|
||||||
|
help="Specify the function name when the content is the output of a function.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-ns",
|
||||||
|
"--not-store",
|
||||||
|
is_flag=True,
|
||||||
|
default=False,
|
||||||
|
required=False,
|
||||||
|
help="Do not save the conversation to the store.",
|
||||||
|
)
|
||||||
|
def prompt(
|
||||||
|
content: Optional[str],
|
||||||
|
parent: Optional[str],
|
||||||
|
reference: Optional[List[str]],
|
||||||
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
functions: Optional[str] = None,
|
||||||
|
function_name: Optional[str] = None,
|
||||||
|
not_store: Optional[bool] = False,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
This command performs interactions with the specified large language model (LLM)
|
This command performs interactions with the specified large language model (LLM)
|
||||||
by sending prompts and receiving responses.
|
by sending prompts and receiving responses.
|
||||||
@ -62,6 +90,7 @@ def prompt(content: Optional[str], parent: Optional[str], reference: Optional[Li
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
from devchat._cli.router import llm_prompt
|
from devchat._cli.router import llm_prompt
|
||||||
|
|
||||||
llm_prompt(
|
llm_prompt(
|
||||||
content,
|
content,
|
||||||
parent,
|
parent,
|
||||||
@ -72,5 +101,6 @@ def prompt(content: Optional[str], parent: Optional[str], reference: Optional[Li
|
|||||||
config_str,
|
config_str,
|
||||||
functions,
|
functions,
|
||||||
function_name,
|
function_name,
|
||||||
not_store)
|
not_store,
|
||||||
|
)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -1,28 +1,48 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
|
||||||
@click.command(help='Route a prompt to the specified LLM')
|
@click.command(help="Route a prompt to the specified LLM")
|
||||||
@click.argument('content', required=False)
|
@click.argument("content", required=False)
|
||||||
@click.option('-p', '--parent', help='Input the parent prompt hash to continue the conversation.')
|
@click.option("-p", "--parent", help="Input the parent prompt hash to continue the conversation.")
|
||||||
@click.option('-r', '--reference', multiple=True,
|
@click.option(
|
||||||
help='Input one or more specific previous prompts to include in the current prompt.')
|
"-r",
|
||||||
@click.option('-i', '--instruct', multiple=True,
|
"--reference",
|
||||||
help='Add one or more files to the prompt as instructions.')
|
multiple=True,
|
||||||
@click.option('-c', '--context', multiple=True,
|
help="Input one or more specific previous prompts to include in the current prompt.",
|
||||||
help='Add one or more files to the prompt as a context.')
|
)
|
||||||
@click.option('-m', '--model', help='Specify the model to use for the prompt.')
|
@click.option(
|
||||||
@click.option('--config', 'config_str',
|
"-i", "--instruct", multiple=True, help="Add one or more files to the prompt as instructions."
|
||||||
help='Specify a JSON string to overwrite the default configuration for this prompt.')
|
)
|
||||||
@click.option('-a', '--auto', is_flag=True, default=False, required=False,
|
@click.option(
|
||||||
help='Answer question by function-calling.')
|
"-c", "--context", multiple=True, help="Add one or more files to the prompt as a context."
|
||||||
def route(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
)
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
@click.option("-m", "--model", help="Specify the model to use for the prompt.")
|
||||||
model: Optional[str], config_str: Optional[str] = None,
|
@click.option(
|
||||||
auto: Optional[bool] = False):
|
"--config",
|
||||||
|
"config_str",
|
||||||
|
help="Specify a JSON string to overwrite the default configuration for this prompt.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-a",
|
||||||
|
"--auto",
|
||||||
|
is_flag=True,
|
||||||
|
default=False,
|
||||||
|
required=False,
|
||||||
|
help="Answer question by function-calling.",
|
||||||
|
)
|
||||||
|
def route(
|
||||||
|
content: Optional[str],
|
||||||
|
parent: Optional[str],
|
||||||
|
reference: Optional[List[str]],
|
||||||
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
auto: Optional[bool] = False,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
This command performs interactions with the specified large language model (LLM)
|
This command performs interactions with the specified large language model (LLM)
|
||||||
by sending prompts and receiving responses.
|
by sending prompts and receiving responses.
|
||||||
@ -58,14 +78,5 @@ def route(content: Optional[str], parent: Optional[str], reference: Optional[Lis
|
|||||||
"""
|
"""
|
||||||
from devchat._cli.router import llm_route
|
from devchat._cli.router import llm_route
|
||||||
|
|
||||||
llm_route(
|
llm_route(content, parent, reference, instruct, context, model, config_str, auto)
|
||||||
content,
|
|
||||||
parent,
|
|
||||||
reference,
|
|
||||||
instruct,
|
|
||||||
context,
|
|
||||||
model,
|
|
||||||
config_str,
|
|
||||||
auto
|
|
||||||
)
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
@ -6,10 +5,8 @@ from typing import List, Optional
|
|||||||
from devchat.workflow.workflow import Workflow
|
from devchat.workflow.workflow import Workflow
|
||||||
|
|
||||||
|
|
||||||
def _get_model_and_config(
|
def _get_model_and_config(model: Optional[str], config_str: Optional[str]):
|
||||||
model: Optional[str],
|
from devchat._cli.utils import get_model_config, init_dir
|
||||||
config_str: Optional[str]):
|
|
||||||
from devchat._cli.utils import init_dir, get_model_config
|
|
||||||
|
|
||||||
_1, user_chat_dir = init_dir()
|
_1, user_chat_dir = init_dir()
|
||||||
model, config = get_model_config(user_chat_dir, model)
|
model, config = get_model_config(user_chat_dir, model)
|
||||||
@ -20,15 +17,17 @@ def _get_model_and_config(
|
|||||||
parameters_data.update(config_data)
|
parameters_data.update(config_data)
|
||||||
return model, parameters_data
|
return model, parameters_data
|
||||||
|
|
||||||
|
|
||||||
def _load_tool_functions(functions: Optional[str]):
|
def _load_tool_functions(functions: Optional[str]):
|
||||||
try:
|
try:
|
||||||
if functions:
|
if functions:
|
||||||
with open(functions, 'r', encoding="utf-8") as f_file:
|
with open(functions, "r", encoding="utf-8") as f_file:
|
||||||
return json.load(f_file)
|
return json.load(f_file)
|
||||||
return None
|
return None
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _load_instruction_contents(content: str, instruct: Optional[List[str]]):
|
def _load_instruction_contents(content: str, instruct: Optional[List[str]]):
|
||||||
from devchat.engine import load_workflow_instruction
|
from devchat.engine import load_workflow_instruction
|
||||||
from devchat.utils import parse_files
|
from devchat.utils import parse_files
|
||||||
@ -41,24 +40,31 @@ def _load_instruction_contents(content: str, instruct: Optional[List[str]]):
|
|||||||
return instruct_contents
|
return instruct_contents
|
||||||
|
|
||||||
|
|
||||||
def before_prompt(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
def before_prompt(
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
content: Optional[str],
|
||||||
model: Optional[str], config_str: Optional[str] = None,
|
parent: Optional[str],
|
||||||
functions: Optional[str] = None, function_name: Optional[str] = None,
|
reference: Optional[List[str]],
|
||||||
not_store: Optional[bool] = False):
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
functions: Optional[str] = None,
|
||||||
|
function_name: Optional[str] = None,
|
||||||
|
not_store: Optional[bool] = False,
|
||||||
|
):
|
||||||
|
from devchat._cli.errors import MissContentInPromptException
|
||||||
|
from devchat._cli.utils import init_dir
|
||||||
from devchat.assistant import Assistant
|
from devchat.assistant import Assistant
|
||||||
from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig
|
from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig
|
||||||
from devchat.store import Store
|
from devchat.store import Store
|
||||||
from devchat.utils import parse_files
|
from devchat.utils import parse_files
|
||||||
from devchat._cli.utils import init_dir
|
|
||||||
from devchat._cli.errors import MissContentInPromptException
|
|
||||||
|
|
||||||
repo_chat_dir, _1 = init_dir()
|
repo_chat_dir, _1 = init_dir()
|
||||||
|
|
||||||
if content is None:
|
if content is None:
|
||||||
content = sys.stdin.read()
|
content = sys.stdin.read()
|
||||||
|
|
||||||
if content == '':
|
if content == "":
|
||||||
raise MissContentInPromptException()
|
raise MissContentInPromptException()
|
||||||
|
|
||||||
instruct_contents = _load_instruction_contents(content, instruct)
|
instruct_contents = _load_instruction_contents(content, instruct)
|
||||||
@ -74,53 +80,80 @@ def before_prompt(content: Optional[str], parent: Optional[str], reference: Opti
|
|||||||
|
|
||||||
assistant = Assistant(chat, chat_store, max_input_tokens, not not_store)
|
assistant = Assistant(chat, chat_store, max_input_tokens, not not_store)
|
||||||
assistant.make_prompt(
|
assistant.make_prompt(
|
||||||
request = content,
|
request=content,
|
||||||
instruct_contents = instruct_contents,
|
instruct_contents=instruct_contents,
|
||||||
context_contents = context_contents,
|
context_contents=context_contents,
|
||||||
functions = tool_functions,
|
functions=tool_functions,
|
||||||
parent=parent,
|
parent=parent,
|
||||||
references=reference,
|
references=reference,
|
||||||
function_name=function_name
|
function_name=function_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
return model, assistant, content
|
return model, assistant, content
|
||||||
|
|
||||||
def llm_prompt(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
def llm_prompt(
|
||||||
model: Optional[str], config_str: Optional[str] = None,
|
content: Optional[str],
|
||||||
functions: Optional[str] = None, function_name: Optional[str] = None,
|
parent: Optional[str],
|
||||||
not_store: Optional[bool] = False):
|
reference: Optional[List[str]],
|
||||||
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
functions: Optional[str] = None,
|
||||||
|
function_name: Optional[str] = None,
|
||||||
|
not_store: Optional[bool] = False,
|
||||||
|
):
|
||||||
from devchat._cli.utils import handle_errors
|
from devchat._cli.utils import handle_errors
|
||||||
|
|
||||||
with handle_errors():
|
with handle_errors():
|
||||||
_1, assistant, _3, = before_prompt(
|
(
|
||||||
content, parent, reference, instruct, context,
|
_1,
|
||||||
model, config_str, functions, function_name, not_store
|
assistant,
|
||||||
)
|
_3,
|
||||||
|
) = before_prompt(
|
||||||
|
content,
|
||||||
|
parent,
|
||||||
|
reference,
|
||||||
|
instruct,
|
||||||
|
context,
|
||||||
|
model,
|
||||||
|
config_str,
|
||||||
|
functions,
|
||||||
|
function_name,
|
||||||
|
not_store,
|
||||||
|
)
|
||||||
|
|
||||||
print(assistant.prompt.formatted_header())
|
print(assistant.prompt.formatted_header())
|
||||||
for response in assistant.iterate_response():
|
for response in assistant.iterate_response():
|
||||||
print(response, end='', flush=True)
|
print(response, end="", flush=True)
|
||||||
|
|
||||||
|
|
||||||
def llm_commmand(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
def llm_commmand(
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
content: Optional[str],
|
||||||
model: Optional[str], config_str: Optional[str] = None):
|
parent: Optional[str],
|
||||||
from devchat.engine import run_command
|
reference: Optional[List[str]],
|
||||||
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
):
|
||||||
from devchat._cli.utils import handle_errors
|
from devchat._cli.utils import handle_errors
|
||||||
|
from devchat.engine import run_command
|
||||||
|
|
||||||
with handle_errors():
|
with handle_errors():
|
||||||
model, assistant, content = before_prompt(
|
model, assistant, content = before_prompt(
|
||||||
content, parent, reference, instruct, context, model, config_str, None, None, True
|
content, parent, reference, instruct, context, model, config_str, None, None, True
|
||||||
)
|
)
|
||||||
|
|
||||||
print(assistant.prompt.formatted_header())
|
print(assistant.prompt.formatted_header())
|
||||||
command_result = run_command(
|
command_result = run_command(
|
||||||
model_name = model,
|
model_name=model,
|
||||||
history_messages = assistant.prompt.messages,
|
history_messages=assistant.prompt.messages,
|
||||||
input_text = content,
|
input_text=content,
|
||||||
parent_hash = parent,
|
parent_hash=parent,
|
||||||
auto_fun = False)
|
auto_fun=False,
|
||||||
|
)
|
||||||
if command_result is not None:
|
if command_result is not None:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
@ -129,17 +162,23 @@ def llm_commmand(content: Optional[str], parent: Optional[str], reference: Optio
|
|||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
|
|
||||||
def llm_route(content: Optional[str], parent: Optional[str], reference: Optional[List[str]],
|
def llm_route(
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
content: Optional[str],
|
||||||
model: Optional[str], config_str: Optional[str] = None,
|
parent: Optional[str],
|
||||||
auto: Optional[bool] = False):
|
reference: Optional[List[str]],
|
||||||
from devchat.engine import run_command
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
auto: Optional[bool] = False,
|
||||||
|
):
|
||||||
from devchat._cli.utils import handle_errors
|
from devchat._cli.utils import handle_errors
|
||||||
|
from devchat.engine import run_command
|
||||||
|
|
||||||
with handle_errors():
|
with handle_errors():
|
||||||
model, assistant, content = before_prompt(
|
model, assistant, content = before_prompt(
|
||||||
content, parent, reference, instruct, context, model, config_str, None, None, True
|
content, parent, reference, instruct, context, model, config_str, None, None, True
|
||||||
)
|
)
|
||||||
|
|
||||||
name, user_input = Workflow.parse_trigger(content)
|
name, user_input = Workflow.parse_trigger(content)
|
||||||
workflow = Workflow.load(name) if name else None
|
workflow = Workflow.load(name) if name else None
|
||||||
@ -165,13 +204,14 @@ def llm_route(content: Optional[str], parent: Optional[str], reference: Optional
|
|||||||
|
|
||||||
print(assistant.prompt.formatted_header())
|
print(assistant.prompt.formatted_header())
|
||||||
command_result = run_command(
|
command_result = run_command(
|
||||||
model_name = model,
|
model_name=model,
|
||||||
history_messages = assistant.prompt.messages,
|
history_messages=assistant.prompt.messages,
|
||||||
input_text = content,
|
input_text=content,
|
||||||
parent_hash = parent,
|
parent_hash=parent,
|
||||||
auto_fun = auto)
|
auto_fun=auto,
|
||||||
|
)
|
||||||
if command_result is not None:
|
if command_result is not None:
|
||||||
sys.exit(command_result[0])
|
sys.exit(command_result[0])
|
||||||
|
|
||||||
for response in assistant.iterate_response():
|
for response in assistant.iterate_response():
|
||||||
print(response, end='', flush=True)
|
print(response, end="", flush=True)
|
||||||
|
@ -1,49 +1,82 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
|
||||||
@click.command(
|
@click.command(
|
||||||
help="The 'command' argument is the name of the command to run or get information about.")
|
help="The 'command' argument is the name of the command to run or get information about."
|
||||||
@click.argument('command', required=False, default='')
|
)
|
||||||
@click.option('--list', 'list_flag', is_flag=True, default=False,
|
@click.argument("command", required=False, default="")
|
||||||
help='List all specified commands in JSON format.')
|
@click.option(
|
||||||
@click.option('--recursive', '-r', 'recursive_flag', is_flag=True, default=True,
|
"--list",
|
||||||
help='List commands recursively.')
|
"list_flag",
|
||||||
@click.option('--update-sys', 'update_sys_flag', is_flag=True, default=False,
|
is_flag=True,
|
||||||
help='Pull the `sys` command directory from the DevChat repository.')
|
default=False,
|
||||||
@click.option('-p', '--parent', help='Input the parent prompt hash to continue the conversation.')
|
help="List all specified commands in JSON format.",
|
||||||
@click.option('-r', '--reference', multiple=True,
|
)
|
||||||
help='Input one or more specific previous prompts to include in the current prompt.')
|
@click.option(
|
||||||
@click.option('-i', '--instruct', multiple=True,
|
"--recursive",
|
||||||
help='Add one or more files to the prompt as instructions.')
|
"-r",
|
||||||
@click.option('-c', '--context', multiple=True,
|
"recursive_flag",
|
||||||
help='Add one or more files to the prompt as a context.')
|
is_flag=True,
|
||||||
@click.option('-m', '--model', help='Specify the model to use for the prompt.')
|
default=True,
|
||||||
@click.option('--config', 'config_str',
|
help="List commands recursively.",
|
||||||
help='Specify a JSON string to overwrite the default configuration for this prompt.')
|
)
|
||||||
# pylint: disable=redefined-outer-name
|
@click.option(
|
||||||
def run(command: str, list_flag: bool, recursive_flag: bool, update_sys_flag: bool,
|
"--update-sys",
|
||||||
parent: Optional[str], reference: Optional[List[str]],
|
"update_sys_flag",
|
||||||
instruct: Optional[List[str]], context: Optional[List[str]],
|
is_flag=True,
|
||||||
model: Optional[str], config_str: Optional[str] = None):
|
default=False,
|
||||||
|
help="Pull the `sys` command directory from the DevChat repository.",
|
||||||
|
)
|
||||||
|
@click.option("-p", "--parent", help="Input the parent prompt hash to continue the conversation.")
|
||||||
|
@click.option(
|
||||||
|
"-r",
|
||||||
|
"--reference",
|
||||||
|
multiple=True,
|
||||||
|
help="Input one or more specific previous prompts to include in the current prompt.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-i", "--instruct", multiple=True, help="Add one or more files to the prompt as instructions."
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-c", "--context", multiple=True, help="Add one or more files to the prompt as a context."
|
||||||
|
)
|
||||||
|
@click.option("-m", "--model", help="Specify the model to use for the prompt.")
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
"config_str",
|
||||||
|
help="Specify a JSON string to overwrite the default configuration for this prompt.",
|
||||||
|
)
|
||||||
|
def run(
|
||||||
|
command: str,
|
||||||
|
list_flag: bool,
|
||||||
|
recursive_flag: bool,
|
||||||
|
update_sys_flag: bool,
|
||||||
|
parent: Optional[str],
|
||||||
|
reference: Optional[List[str]],
|
||||||
|
instruct: Optional[List[str]],
|
||||||
|
context: Optional[List[str]],
|
||||||
|
model: Optional[str],
|
||||||
|
config_str: Optional[str] = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Operate the workflow engine of DevChat.
|
Operate the workflow engine of DevChat.
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from devchat._cli.utils import init_dir, handle_errors
|
|
||||||
|
|
||||||
from devchat.engine import Namespace, CommandParser
|
|
||||||
from devchat.utils import get_logger
|
|
||||||
from devchat._cli.router import llm_commmand
|
from devchat._cli.router import llm_commmand
|
||||||
|
from devchat._cli.utils import handle_errors, init_dir
|
||||||
|
from devchat.engine import CommandParser, Namespace
|
||||||
|
from devchat.utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
_, user_chat_dir = init_dir()
|
_, user_chat_dir = init_dir()
|
||||||
with handle_errors():
|
with handle_errors():
|
||||||
workflows_dir = os.path.join(user_chat_dir, 'workflows')
|
workflows_dir = os.path.join(user_chat_dir, "workflows")
|
||||||
if not os.path.exists(workflows_dir):
|
if not os.path.exists(workflows_dir):
|
||||||
os.makedirs(workflows_dir)
|
os.makedirs(workflows_dir)
|
||||||
if not os.path.isdir(workflows_dir):
|
if not os.path.isdir(workflows_dir):
|
||||||
@ -54,14 +87,14 @@ def run(command: str, list_flag: bool, recursive_flag: bool, update_sys_flag: bo
|
|||||||
commander = CommandParser(namespace)
|
commander = CommandParser(namespace)
|
||||||
|
|
||||||
if update_sys_flag:
|
if update_sys_flag:
|
||||||
sys_dir = os.path.join(workflows_dir, 'sys')
|
sys_dir = os.path.join(workflows_dir, "sys")
|
||||||
git_urls = [
|
git_urls = [
|
||||||
('https://gitlab.com/devchat-ai/workflows.git', 'main'),
|
("https://gitlab.com/devchat-ai/workflows.git", "main"),
|
||||||
('https://github.com/devchat-ai/workflows.git', 'main')
|
("https://github.com/devchat-ai/workflows.git", "main"),
|
||||||
]
|
]
|
||||||
zip_urls = [
|
zip_urls = [
|
||||||
'https://gitlab.com/devchat-ai/workflows/-/archive/main/workflows-main.zip',
|
"https://gitlab.com/devchat-ai/workflows/-/archive/main/workflows-main.zip",
|
||||||
'https://codeload.github.com/devchat-ai/workflows/zip/refs/heads/main'
|
"https://codeload.github.com/devchat-ai/workflows/zip/refs/heads/main",
|
||||||
]
|
]
|
||||||
_clone_or_pull_git_repo(sys_dir, git_urls, zip_urls)
|
_clone_or_pull_git_repo(sys_dir, git_urls, zip_urls)
|
||||||
return
|
return
|
||||||
@ -73,26 +106,15 @@ def run(command: str, list_flag: bool, recursive_flag: bool, update_sys_flag: bo
|
|||||||
if not cmd:
|
if not cmd:
|
||||||
logger.warning("Existing command directory failed to parse: %s", name)
|
logger.warning("Existing command directory failed to parse: %s", name)
|
||||||
continue
|
continue
|
||||||
commands.append({
|
commands.append({"name": name, "description": cmd.description, "path": cmd.path})
|
||||||
'name': name,
|
|
||||||
'description': cmd.description,
|
|
||||||
'path': cmd.path
|
|
||||||
})
|
|
||||||
print(json.dumps(commands, indent=2))
|
print(json.dumps(commands, indent=2))
|
||||||
return
|
return
|
||||||
|
|
||||||
if command:
|
if command:
|
||||||
llm_commmand(
|
llm_commmand(command, parent, reference, instruct, context, model, config_str)
|
||||||
command,
|
|
||||||
parent,
|
|
||||||
reference,
|
|
||||||
instruct,
|
|
||||||
context,
|
|
||||||
model,
|
|
||||||
config_str
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def __onerror(func, path, _1):
|
def __onerror(func, path, _1):
|
||||||
"""
|
"""
|
||||||
Error handler for shutil.rmtree.
|
Error handler for shutil.rmtree.
|
||||||
@ -114,18 +136,21 @@ def __onerror(func, path, _1):
|
|||||||
# Retry the function that failed
|
# Retry the function that failed
|
||||||
func(path)
|
func(path)
|
||||||
|
|
||||||
|
|
||||||
def __make_files_writable(directory):
|
def __make_files_writable(directory):
|
||||||
"""
|
"""
|
||||||
Recursively make all files in the directory writable.
|
Recursively make all files in the directory writable.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
|
|
||||||
for root, _1, files in os.walk(directory):
|
for root, _1, files in os.walk(directory):
|
||||||
for name in files:
|
for name in files:
|
||||||
filepath = os.path.join(root, name)
|
filepath = os.path.join(root, name)
|
||||||
if not os.access(filepath, os.W_OK):
|
if not os.access(filepath, os.W_OK):
|
||||||
os.chmod(filepath, stat.S_IWUSR)
|
os.chmod(filepath, stat.S_IWUSR)
|
||||||
|
|
||||||
|
|
||||||
def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]], zip_urls: List[str]):
|
def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]], zip_urls: List[str]):
|
||||||
"""
|
"""
|
||||||
Clone a Git repository to a specified location, or pull it if it already exists.
|
Clone a Git repository to a specified location, or pull it if it already exists.
|
||||||
@ -135,13 +160,13 @@ def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]], z
|
|||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
from devchat._cli.utils import clone_git_repo, download_and_extract_workflow
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
from devchat._cli.utils import download_and_extract_workflow
|
|
||||||
from devchat._cli.utils import clone_git_repo
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
if shutil.which('git') is None:
|
if shutil.which("git") is None:
|
||||||
# If Git is not installed, download and extract the workflow
|
# If Git is not installed, download and extract the workflow
|
||||||
for url in zip_urls:
|
for url in zip_urls:
|
||||||
try:
|
try:
|
||||||
@ -152,13 +177,13 @@ def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]], z
|
|||||||
return
|
return
|
||||||
|
|
||||||
if os.path.exists(target_dir):
|
if os.path.exists(target_dir):
|
||||||
bak_dir = target_dir + '_bak'
|
bak_dir = target_dir + "_bak"
|
||||||
new_dir = target_dir + '_old'
|
new_dir = target_dir + "_old"
|
||||||
if os.path.exists(new_dir):
|
if os.path.exists(new_dir):
|
||||||
shutil.rmtree(new_dir, onerror=__onerror)
|
shutil.rmtree(new_dir, onerror=__onerror)
|
||||||
if os.path.exists(bak_dir):
|
if os.path.exists(bak_dir):
|
||||||
shutil.rmtree(bak_dir, onerror=__onerror)
|
shutil.rmtree(bak_dir, onerror=__onerror)
|
||||||
print(f'{target_dir} is already exists. Moved to {new_dir}')
|
print(f"{target_dir} is already exists. Moved to {new_dir}")
|
||||||
clone_git_repo(bak_dir, repo_urls)
|
clone_git_repo(bak_dir, repo_urls)
|
||||||
try:
|
try:
|
||||||
shutil.move(target_dir, new_dir)
|
shutil.move(target_dir, new_dir)
|
||||||
@ -173,4 +198,4 @@ def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]], z
|
|||||||
else:
|
else:
|
||||||
clone_git_repo(target_dir, repo_urls)
|
clone_git_repo(target_dir, repo_urls)
|
||||||
|
|
||||||
print(f'Updated {target_dir}')
|
print(f"Updated {target_dir}")
|
||||||
|
@ -1,19 +1,21 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
@click.command(help='Manage topics')
|
|
||||||
@click.option('--list', '-l', 'list_topics', is_flag=True,
|
@click.command(help="Manage topics")
|
||||||
help='List topics in reverse chronological order.')
|
@click.option(
|
||||||
@click.option('--skip', default=0, help='Skip number of topics before showing the list.')
|
"--list", "-l", "list_topics", is_flag=True, help="List topics in reverse chronological order."
|
||||||
@click.option('-n', '--max-count', default=100, help='Limit the number of topics to output.')
|
)
|
||||||
|
@click.option("--skip", default=0, help="Skip number of topics before showing the list.")
|
||||||
|
@click.option("-n", "--max-count", default=100, help="Limit the number of topics to output.")
|
||||||
def topic(list_topics: bool, skip: int, max_count: int):
|
def topic(list_topics: bool, skip: int, max_count: int):
|
||||||
"""
|
"""
|
||||||
Manage topics.
|
Manage topics.
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
from devchat._cli.utils import get_model_config, handle_errors, init_dir
|
||||||
|
from devchat.openai import OpenAIChat, OpenAIChatConfig
|
||||||
from devchat.store import Store
|
from devchat.store import Store
|
||||||
from devchat.openai import OpenAIChatConfig, OpenAIChat
|
|
||||||
from devchat._cli.utils import init_dir, handle_errors, get_model_config
|
|
||||||
|
|
||||||
repo_chat_dir, user_chat_dir = init_dir()
|
repo_chat_dir, user_chat_dir = init_dir()
|
||||||
|
|
||||||
|
@ -1,32 +1,32 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Tuple, List, Optional, Any
|
import sys
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
from devchat._cli.errors import MissContentInPromptException
|
from devchat._cli.errors import MissContentInPromptException
|
||||||
from devchat.utils import find_root_dir, add_gitignore, setup_logger, get_logger
|
from devchat.utils import add_gitignore, find_root_dir, get_logger, setup_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_workflow(workflow_url, target_dir):
|
def download_and_extract_workflow(workflow_url, target_dir):
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
# Download the workflow zip file
|
# Download the workflow zip file
|
||||||
response = requests.get(workflow_url, stream=True, timeout=10)
|
response = requests.get(workflow_url, stream=True, timeout=10)
|
||||||
# Downaload file to temp dir
|
# Downaload file to temp dir
|
||||||
os.makedirs(target_dir, exist_ok=True)
|
os.makedirs(target_dir, exist_ok=True)
|
||||||
zip_path = os.path.join(target_dir, 'workflow.zip')
|
zip_path = os.path.join(target_dir, "workflow.zip")
|
||||||
with open(zip_path, 'wb') as file_handle:
|
with open(zip_path, "wb") as file_handle:
|
||||||
for chunk in response.iter_content(chunk_size=8192):
|
for chunk in response.iter_content(chunk_size=8192):
|
||||||
if chunk:
|
if chunk:
|
||||||
file_handle.write(chunk)
|
file_handle.write(chunk)
|
||||||
|
|
||||||
# Extract the zip file
|
# Extract the zip file
|
||||||
parent_dir = os.path.dirname(target_dir)
|
parent_dir = os.path.dirname(target_dir)
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
||||||
zip_ref.extractall(parent_dir)
|
zip_ref.extractall(parent_dir)
|
||||||
|
|
||||||
# Delete target directory if exists
|
# Delete target directory if exists
|
||||||
@ -34,7 +34,7 @@ def download_and_extract_workflow(workflow_url, target_dir):
|
|||||||
shutil.rmtree(target_dir)
|
shutil.rmtree(target_dir)
|
||||||
|
|
||||||
# Rename extracted directory to target directory
|
# Rename extracted directory to target directory
|
||||||
extracted_dir = os.path.join(parent_dir, 'workflows-main')
|
extracted_dir = os.path.join(parent_dir, "workflows-main")
|
||||||
os.rename(extracted_dir, target_dir)
|
os.rename(extracted_dir, target_dir)
|
||||||
|
|
||||||
|
|
||||||
@ -58,9 +58,11 @@ def handle_errors():
|
|||||||
print(f"{type(error).__name__}: {error}", file=sys.stderr)
|
print(f"{type(error).__name__}: {error}", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
REPO_CHAT_DIR = None
|
REPO_CHAT_DIR = None
|
||||||
USER_CHAT_DIR = None
|
USER_CHAT_DIR = None
|
||||||
|
|
||||||
|
|
||||||
def init_dir() -> Tuple[str, str]:
|
def init_dir() -> Tuple[str, str]:
|
||||||
"""
|
"""
|
||||||
Initialize the chat directories.
|
Initialize the chat directories.
|
||||||
@ -69,7 +71,6 @@ def init_dir() -> Tuple[str, str]:
|
|||||||
REPO_CHAT_DIR: The chat directory in the repository.
|
REPO_CHAT_DIR: The chat directory in the repository.
|
||||||
USER_CHAT_DIR: The chat directory in the user's home.
|
USER_CHAT_DIR: The chat directory in the user's home.
|
||||||
"""
|
"""
|
||||||
# pylint: disable=global-statement
|
|
||||||
global REPO_CHAT_DIR
|
global REPO_CHAT_DIR
|
||||||
global USER_CHAT_DIR
|
global USER_CHAT_DIR
|
||||||
if REPO_CHAT_DIR and USER_CHAT_DIR:
|
if REPO_CHAT_DIR and USER_CHAT_DIR:
|
||||||
@ -108,8 +109,8 @@ def init_dir() -> Tuple[str, str]:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
setup_logger(os.path.join(REPO_CHAT_DIR, 'error.log'))
|
setup_logger(os.path.join(REPO_CHAT_DIR, "error.log"))
|
||||||
add_gitignore(REPO_CHAT_DIR, '*')
|
add_gitignore(REPO_CHAT_DIR, "*")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.error("Failed to setup logger or add .gitignore: %s", exc)
|
logger.error("Failed to setup logger or add .gitignore: %s", exc)
|
||||||
|
|
||||||
@ -125,7 +126,7 @@ def valid_git_repo(target_dir: str, valid_urls: List[str]) -> bool:
|
|||||||
:return: True if the directory is a valid Git repository with a valid URL, False otherwise.
|
:return: True if the directory is a valid Git repository with a valid URL, False otherwise.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from git import Repo, InvalidGitRepositoryError
|
from git import InvalidGitRepositoryError, Repo
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -148,7 +149,7 @@ def clone_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]]):
|
|||||||
:param repo_urls: A list of possible Git repository URLs.
|
:param repo_urls: A list of possible Git repository URLs.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from git import Repo, GitCommandError
|
from git import GitCommandError, Repo
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -164,8 +165,8 @@ def clone_git_repo(target_dir: str, repo_urls: List[Tuple[str, str]]):
|
|||||||
raise GitCommandError(f"Failed to clone repository to {target_dir}")
|
raise GitCommandError(f"Failed to clone repository to {target_dir}")
|
||||||
|
|
||||||
|
|
||||||
def get_model_config(user_chat_dir: str,
|
def get_model_config(user_chat_dir: str, model: Optional[str] = None) -> Tuple[str, Any]:
|
||||||
model: Optional[str] = None) -> Tuple[str, Any]:
|
|
||||||
from devchat.config import ConfigManager
|
from devchat.config import ConfigManager
|
||||||
|
|
||||||
manager = ConfigManager(user_chat_dir)
|
manager = ConfigManager(user_chat_dir)
|
||||||
return manager.model_config(model)
|
return manager.model_config(model)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
from .anthropic_chat import AnthropicChatParameters
|
from .anthropic_chat import AnthropicChatParameters
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["AnthropicChatParameters"]
|
||||||
'AnthropicChatParameters'
|
|
||||||
]
|
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
from typing import List, Optional, Dict, Any
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
class AnthropicChatParameters(BaseModel, extra='ignore'):
|
class AnthropicChatParameters(BaseModel, extra="ignore"):
|
||||||
max_tokens_to_sample: int = Field(1024, ge=1)
|
max_tokens_to_sample: int = Field(1024, ge=1)
|
||||||
stop_sequences: Optional[List[str]]
|
stop_sequences: Optional[List[str]]
|
||||||
temperature: Optional[float] = Field(0.2, ge=0, le=1)
|
temperature: Optional[float] = Field(0.2, ge=0, le=1)
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Optional, List, Iterator
|
from typing import Iterator, List, Optional
|
||||||
|
|
||||||
from devchat.message import Message
|
|
||||||
from devchat.chat import Chat
|
from devchat.chat import Chat
|
||||||
|
from devchat.message import Message
|
||||||
from devchat.openai.openai_prompt import OpenAIPrompt
|
from devchat.openai.openai_prompt import OpenAIPrompt
|
||||||
from devchat.store import Store
|
from devchat.store import Store
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -37,14 +36,20 @@ class Assistant:
|
|||||||
|
|
||||||
def _check_limit(self):
|
def _check_limit(self):
|
||||||
if self._prompt.request_tokens > self.token_limit:
|
if self._prompt.request_tokens > self.token_limit:
|
||||||
raise ValueError(f"Prompt tokens {self._prompt.request_tokens} "
|
raise ValueError(
|
||||||
f"beyond limit {self.token_limit}.")
|
f"Prompt tokens {self._prompt.request_tokens} " f"beyond limit {self.token_limit}."
|
||||||
|
)
|
||||||
|
|
||||||
def make_prompt(self, request: str,
|
def make_prompt(
|
||||||
instruct_contents: Optional[List[str]], context_contents: Optional[List[str]],
|
self,
|
||||||
functions: Optional[List[dict]],
|
request: str,
|
||||||
parent: Optional[str] = None, references: Optional[List[str]] = None,
|
instruct_contents: Optional[List[str]],
|
||||||
function_name: Optional[str] = None):
|
context_contents: Optional[List[str]],
|
||||||
|
functions: Optional[List[dict]],
|
||||||
|
parent: Optional[str] = None,
|
||||||
|
references: Optional[List[str]] = None,
|
||||||
|
function_name: Optional[str] = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Make a prompt for the chat API.
|
Make a prompt for the chat API.
|
||||||
|
|
||||||
@ -59,7 +64,7 @@ class Assistant:
|
|||||||
self._check_limit()
|
self._check_limit()
|
||||||
# Add instructions to the prompt
|
# Add instructions to the prompt
|
||||||
if instruct_contents:
|
if instruct_contents:
|
||||||
combined_instruct = ''.join(instruct_contents)
|
combined_instruct = "".join(instruct_contents)
|
||||||
self._prompt.append_new(Message.INSTRUCT, combined_instruct)
|
self._prompt.append_new(Message.INSTRUCT, combined_instruct)
|
||||||
self._check_limit()
|
self._check_limit()
|
||||||
# Add context to the prompt
|
# Add context to the prompt
|
||||||
@ -77,8 +82,9 @@ class Assistant:
|
|||||||
for reference_hash in references:
|
for reference_hash in references:
|
||||||
prompt = self._store.get_prompt(reference_hash)
|
prompt = self._store.get_prompt(reference_hash)
|
||||||
if not prompt:
|
if not prompt:
|
||||||
logger.error("Reference %s not retrievable while making prompt.",
|
logger.error(
|
||||||
reference_hash)
|
"Reference %s not retrievable while making prompt.", reference_hash
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
self._prompt.references.append(reference_hash)
|
self._prompt.references.append(reference_hash)
|
||||||
self._prompt.prepend_history(prompt, self.token_limit)
|
self._prompt.prepend_history(prompt, self.token_limit)
|
||||||
@ -111,8 +117,10 @@ class Assistant:
|
|||||||
try:
|
try:
|
||||||
if hasattr(chunk, "dict"):
|
if hasattr(chunk, "dict"):
|
||||||
chunk = chunk.dict()
|
chunk = chunk.dict()
|
||||||
if "function_call" in chunk["choices"][0]["delta"] and \
|
if (
|
||||||
not chunk["choices"][0]["delta"]["function_call"]:
|
"function_call" in chunk["choices"][0]["delta"]
|
||||||
|
and not chunk["choices"][0]["delta"]["function_call"]
|
||||||
|
):
|
||||||
del chunk["choices"][0]["delta"]["function_call"]
|
del chunk["choices"][0]["delta"]["function_call"]
|
||||||
if not chunk["choices"][0]["delta"]["content"]:
|
if not chunk["choices"][0]["delta"]["content"]:
|
||||||
chunk["choices"][0]["delta"]["content"] = ""
|
chunk["choices"][0]["delta"]["content"] = ""
|
||||||
@ -123,8 +131,8 @@ class Assistant:
|
|||||||
chunk["model"] = config_params["model"]
|
chunk["model"] = config_params["model"]
|
||||||
chunk["choices"][0]["index"] = 0
|
chunk["choices"][0]["index"] = 0
|
||||||
chunk["choices"][0]["finish_reason"] = "stop"
|
chunk["choices"][0]["finish_reason"] = "stop"
|
||||||
if "role" not in chunk['choices'][0]['delta']:
|
if "role" not in chunk["choices"][0]["delta"]:
|
||||||
chunk['choices'][0]['delta']['role']='assistant'
|
chunk["choices"][0]["delta"]["role"] = "assistant"
|
||||||
|
|
||||||
delta = self._prompt.append_response(json.dumps(chunk))
|
delta = self._prompt.append_response(json.dumps(chunk))
|
||||||
yield delta
|
yield delta
|
||||||
@ -136,9 +144,9 @@ class Assistant:
|
|||||||
raise RuntimeError("No responses returned from the chat API")
|
raise RuntimeError("No responses returned from the chat API")
|
||||||
if self._need_store:
|
if self._need_store:
|
||||||
self._store.store_prompt(self._prompt)
|
self._store.store_prompt(self._prompt)
|
||||||
yield self._prompt.formatted_footer(0) + '\n'
|
yield self._prompt.formatted_footer(0) + "\n"
|
||||||
for index in range(1, len(self._prompt.responses)):
|
for index in range(1, len(self._prompt.responses)):
|
||||||
yield self._prompt.formatted_full_response(index) + '\n'
|
yield self._prompt.formatted_full_response(index) + "\n"
|
||||||
else:
|
else:
|
||||||
response_str = self._chat.complete_response(self._prompt)
|
response_str = self._chat.complete_response(self._prompt)
|
||||||
self._prompt.set_response(response_str)
|
self._prompt.set_response(response_str)
|
||||||
@ -147,4 +155,4 @@ class Assistant:
|
|||||||
if self._need_store:
|
if self._need_store:
|
||||||
self._store.store_prompt(self._prompt)
|
self._store.store_prompt(self._prompt)
|
||||||
for index in range(len(self._prompt.responses)):
|
for index in range(len(self._prompt.responses)):
|
||||||
yield self._prompt.formatted_full_response(index) + '\n'
|
yield self._prompt.formatted_full_response(index) + "\n"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Iterator
|
from typing import Iterator
|
||||||
|
|
||||||
from devchat.prompt import Prompt
|
from devchat.prompt import Prompt
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
|
from devchat.chatmark import Button, Checkbox, Form, Radio, Step, TextEditor
|
||||||
from devchat.chatmark import Button, Checkbox, Form, Radio, Step, TextEditor # pylint: disable=E402
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# pylint: disable=C0103
|
|
||||||
# pylint: disable=W0212
|
|
||||||
from typing import Dict, List, Optional, Union
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
from .iobase import pipe_interaction
|
from .iobase import pipe_interaction
|
||||||
|
@ -1,18 +1,21 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Dict, Tuple, Optional
|
from typing import Dict, List, Optional, Tuple
|
||||||
from pydantic import BaseModel
|
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
class GeneralProviderConfig(BaseModel):
|
class GeneralProviderConfig(BaseModel):
|
||||||
api_key: Optional[str]
|
api_key: Optional[str]
|
||||||
api_base: Optional[str]
|
api_base: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
class ModelConfig(BaseModel):
|
class ModelConfig(BaseModel):
|
||||||
max_input_tokens: Optional[int] = sys.maxsize
|
max_input_tokens: Optional[int] = sys.maxsize
|
||||||
provider: Optional[str]
|
provider: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
class GeneralModelConfig(ModelConfig):
|
class GeneralModelConfig(ModelConfig):
|
||||||
max_tokens: Optional[int]
|
max_tokens: Optional[int]
|
||||||
stop_sequences: Optional[List[str]]
|
stop_sequences: Optional[List[str]]
|
||||||
@ -30,7 +33,7 @@ class ChatConfig(BaseModel):
|
|||||||
|
|
||||||
class ConfigManager:
|
class ConfigManager:
|
||||||
def __init__(self, dir_path: str):
|
def __init__(self, dir_path: str):
|
||||||
self.config_path = os.path.join(dir_path, 'config.yml')
|
self.config_path = os.path.join(dir_path, "config.yml")
|
||||||
if not os.path.exists(self.config_path):
|
if not os.path.exists(self.config_path):
|
||||||
self._create_sample_file()
|
self._create_sample_file()
|
||||||
self._file_is_new = True
|
self._file_is_new = True
|
||||||
@ -47,14 +50,14 @@ class ConfigManager:
|
|||||||
return os.path.getmtime(self.config_path)
|
return os.path.getmtime(self.config_path)
|
||||||
|
|
||||||
def _load_and_validate_config(self) -> ChatConfig:
|
def _load_and_validate_config(self) -> ChatConfig:
|
||||||
with open(self.config_path, 'r', encoding='utf-8') as file:
|
with open(self.config_path, "r", encoding="utf-8") as file:
|
||||||
data = yaml.safe_load(file)
|
data = yaml.safe_load(file)
|
||||||
|
|
||||||
if 'providers' in data:
|
if "providers" in data:
|
||||||
for provider, config in data['providers'].items():
|
for provider, config in data["providers"].items():
|
||||||
data['providers'][provider] = GeneralProviderConfig(**config)
|
data["providers"][provider] = GeneralProviderConfig(**config)
|
||||||
for model, config in data['models'].items():
|
for model, config in data["models"].items():
|
||||||
data['models'][model] = GeneralModelConfig(**config)
|
data["models"][model] = GeneralModelConfig(**config)
|
||||||
|
|
||||||
return ChatConfig(**data)
|
return ChatConfig(**data)
|
||||||
|
|
||||||
@ -70,9 +73,7 @@ class ConfigManager:
|
|||||||
return model_id, self.config.models[model_id]
|
return model_id, self.config.models[model_id]
|
||||||
|
|
||||||
def update_model_config(
|
def update_model_config(
|
||||||
self,
|
self, model_id: str, new_config: GeneralModelConfig
|
||||||
model_id: str,
|
|
||||||
new_config: GeneralModelConfig
|
|
||||||
) -> GeneralModelConfig:
|
) -> GeneralModelConfig:
|
||||||
_, old_config = self.model_config(model_id)
|
_, old_config = self.model_config(model_id)
|
||||||
if new_config.max_input_tokens is not None:
|
if new_config.max_input_tokens is not None:
|
||||||
@ -83,46 +84,29 @@ class ConfigManager:
|
|||||||
return self.config.models[model_id]
|
return self.config.models[model_id]
|
||||||
|
|
||||||
def sync(self):
|
def sync(self):
|
||||||
with open(self.config_path, 'w', encoding='utf-8') as file:
|
with open(self.config_path, "w", encoding="utf-8") as file:
|
||||||
yaml.dump(self.config.dict(exclude_unset=True), file)
|
yaml.dump(self.config.dict(exclude_unset=True), file)
|
||||||
|
|
||||||
def _create_sample_file(self):
|
def _create_sample_file(self):
|
||||||
sample_config = ChatConfig(
|
sample_config = ChatConfig(
|
||||||
providers={
|
providers={
|
||||||
"devchat.ai": GeneralProviderConfig(
|
"devchat.ai": GeneralProviderConfig(api_key=""),
|
||||||
api_key=""
|
"openai.com": GeneralProviderConfig(api_key=""),
|
||||||
),
|
"general": GeneralProviderConfig(),
|
||||||
"openai.com": GeneralProviderConfig(
|
|
||||||
api_key=""
|
|
||||||
),
|
|
||||||
"general": GeneralProviderConfig(
|
|
||||||
)
|
|
||||||
},
|
},
|
||||||
models={
|
models={
|
||||||
"gpt-4": GeneralModelConfig(
|
"gpt-4": GeneralModelConfig(
|
||||||
max_input_tokens=6000,
|
max_input_tokens=6000, provider="devchat.ai", temperature=0, stream=True
|
||||||
provider='devchat.ai',
|
|
||||||
temperature=0,
|
|
||||||
stream=True
|
|
||||||
),
|
),
|
||||||
"gpt-3.5-turbo-16k": GeneralModelConfig(
|
"gpt-3.5-turbo-16k": GeneralModelConfig(
|
||||||
max_input_tokens=12000,
|
max_input_tokens=12000, provider="devchat.ai", temperature=0, stream=True
|
||||||
provider='devchat.ai',
|
|
||||||
temperature=0,
|
|
||||||
stream=True
|
|
||||||
),
|
),
|
||||||
"gpt-3.5-turbo": GeneralModelConfig(
|
"gpt-3.5-turbo": GeneralModelConfig(
|
||||||
max_input_tokens=3000,
|
max_input_tokens=3000, provider="devchat.ai", temperature=0, stream=True
|
||||||
provider='devchat.ai',
|
|
||||||
temperature=0,
|
|
||||||
stream=True
|
|
||||||
),
|
),
|
||||||
"claude-2": GeneralModelConfig(
|
"claude-2": GeneralModelConfig(provider="general", max_tokens=20000),
|
||||||
provider='general',
|
|
||||||
max_tokens=20000
|
|
||||||
)
|
|
||||||
},
|
},
|
||||||
default_model="gpt-3.5-turbo"
|
default_model="gpt-3.5-turbo",
|
||||||
)
|
)
|
||||||
with open(self.config_path, 'w', encoding='utf-8') as file:
|
with open(self.config_path, "w", encoding="utf-8") as file:
|
||||||
yaml.dump(sample_config.dict(exclude_unset=True), file)
|
yaml.dump(sample_config.dict(exclude_unset=True), file)
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
from .command_parser import parse_command, Command, CommandParser
|
from .command_parser import Command, CommandParser, parse_command
|
||||||
from .namespace import Namespace
|
from .namespace import Namespace
|
||||||
from .recursive_prompter import RecursivePrompter
|
from .recursive_prompter import RecursivePrompter
|
||||||
from .router import run_command, load_workflow_instruction
|
from .router import load_workflow_instruction, run_command
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'parse_command',
|
"parse_command",
|
||||||
'Command',
|
"Command",
|
||||||
'CommandParser',
|
"CommandParser",
|
||||||
'Namespace',
|
"Namespace",
|
||||||
'RecursivePrompter',
|
"RecursivePrompter",
|
||||||
'run_command',
|
"run_command",
|
||||||
'load_workflow_instruction'
|
"load_workflow_instruction",
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
from typing import List, Dict, Optional
|
from typing import Dict, List, Optional
|
||||||
from pydantic import BaseModel
|
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from .namespace import Namespace
|
from .namespace import Namespace
|
||||||
|
|
||||||
|
|
||||||
@ -21,7 +23,7 @@ class Command(BaseModel):
|
|||||||
path: Optional[str] = None
|
path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class CommandParser():
|
class CommandParser:
|
||||||
def __init__(self, namespace: Namespace):
|
def __init__(self, namespace: Namespace):
|
||||||
self.namespace = namespace
|
self.namespace = namespace
|
||||||
|
|
||||||
@ -32,7 +34,7 @@ class CommandParser():
|
|||||||
:param name: The command name in the namespace.
|
:param name: The command name in the namespace.
|
||||||
:return: The JSON representation of the command.
|
:return: The JSON representation of the command.
|
||||||
"""
|
"""
|
||||||
file_path = self.namespace.get_file(name, 'command.yml')
|
file_path = self.namespace.get_file(name, "command.yml")
|
||||||
if not file_path:
|
if not file_path:
|
||||||
return None
|
return None
|
||||||
return parse_command(file_path)
|
return parse_command(file_path)
|
||||||
@ -48,9 +50,9 @@ def parse_command(file_path: str) -> Command:
|
|||||||
# get path from file_path, /xx1/xx2/xx3.py => /xx1/xx2
|
# get path from file_path, /xx1/xx2/xx3.py => /xx1/xx2
|
||||||
config_dir = os.path.dirname(file_path)
|
config_dir = os.path.dirname(file_path)
|
||||||
|
|
||||||
with open(file_path, 'r', encoding='utf-8') as file:
|
with open(file_path, "r", encoding="utf-8") as file:
|
||||||
# replace {curpath} with config_dir
|
# replace {curpath} with config_dir
|
||||||
content = file.read().replace('$command_path', config_dir.replace('\\', '/'))
|
content = file.read().replace("$command_path", config_dir.replace("\\", "/"))
|
||||||
config_dict = yaml.safe_load(content)
|
config_dict = yaml.safe_load(content)
|
||||||
config = Command(**config_dict)
|
config = Command(**config_dict)
|
||||||
config.path = file_path
|
config.path = file_path
|
||||||
|
@ -1,37 +1,39 @@
|
|||||||
"""
|
"""
|
||||||
Run Command with a input text.
|
Run Command with a input text.
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
import threading
|
import os
|
||||||
import subprocess
|
|
||||||
from typing import List, Dict
|
|
||||||
import shlex
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
|
|
||||||
from .command_parser import Command
|
from .command_parser import Command
|
||||||
from .util import ToolUtil
|
from .util import ToolUtil
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DEVCHAT_COMMAND_MISS_ERROR_MESSAGE = (
|
DEVCHAT_COMMAND_MISS_ERROR_MESSAGE = (
|
||||||
'devchat-commands environment is not installed yet. '
|
"devchat-commands environment is not installed yet. "
|
||||||
'Please install it before using the current command.'
|
"Please install it before using the current command."
|
||||||
'The devchat-command environment is automatically '
|
"The devchat-command environment is automatically "
|
||||||
'installed after the plugin starts,'
|
"installed after the plugin starts,"
|
||||||
' and details can be viewed in the output window.'
|
" and details can be viewed in the output window."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def pipe_reader(pipe, out_data, out_flag):
|
def pipe_reader(pipe, out_data, out_flag):
|
||||||
while pipe:
|
while pipe:
|
||||||
data = pipe.read(1)
|
data = pipe.read(1)
|
||||||
if data == '':
|
if data == "":
|
||||||
break
|
break
|
||||||
out_data['out'] += data
|
out_data["out"] += data
|
||||||
print(data, end='', file=out_flag, flush=True)
|
print(data, end="", file=out_flag, flush=True)
|
||||||
|
|
||||||
|
|
||||||
# Equivalent of CommandRun in Python\which executes subprocesses
|
# Equivalent of CommandRun in Python\which executes subprocesses
|
||||||
@ -40,21 +42,25 @@ class CommandRunner:
|
|||||||
self.process = None
|
self.process = None
|
||||||
self._model_name = model_name
|
self._model_name = model_name
|
||||||
|
|
||||||
def run_command(self,
|
def run_command(
|
||||||
command_name: str,
|
self,
|
||||||
command: Command,
|
command_name: str,
|
||||||
history_messages: List[Dict],
|
command: Command,
|
||||||
input_text: str,
|
history_messages: List[Dict],
|
||||||
parent_hash: str):
|
input_text: str,
|
||||||
|
parent_hash: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
if command has parameters, then generate command parameters from input by LLM
|
if command has parameters, then generate command parameters from input by LLM
|
||||||
if command.input is "required", and input is null, then return error
|
if command.input is "required", and input is null, then return error
|
||||||
"""
|
"""
|
||||||
input_text = input_text.strip()\
|
input_text = (
|
||||||
.replace(f'/{command_name}', '')\
|
input_text.strip()
|
||||||
.replace('\"', '\\"')\
|
.replace(f"/{command_name}", "")
|
||||||
.replace('\'', '\\\'')\
|
.replace('"', '\\"')
|
||||||
.replace('\n', '\\n')
|
.replace("'", "\\'")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
)
|
||||||
|
|
||||||
arguments = {}
|
arguments = {}
|
||||||
if command.parameters and len(command.parameters) > 0:
|
if command.parameters and len(command.parameters) > 0:
|
||||||
@ -69,20 +75,19 @@ class CommandRunner:
|
|||||||
return self.run_command_with_parameters(
|
return self.run_command_with_parameters(
|
||||||
command_name=command_name,
|
command_name=command_name,
|
||||||
command=command,
|
command=command,
|
||||||
parameters={
|
parameters={"input": input_text, **arguments},
|
||||||
"input": input_text,
|
|
||||||
**arguments
|
|
||||||
},
|
|
||||||
parent_hash=parent_hash,
|
parent_hash=parent_hash,
|
||||||
history_messages=history_messages
|
history_messages=history_messages,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run_command_with_parameters(self,
|
def run_command_with_parameters(
|
||||||
command_name: str,
|
self,
|
||||||
command: Command,
|
command_name: str,
|
||||||
parameters: Dict[str, str],
|
command: Command,
|
||||||
parent_hash: str,
|
parameters: Dict[str, str],
|
||||||
history_messages: List[Dict]):
|
parent_hash: str,
|
||||||
|
history_messages: List[Dict],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
replace $xxx in command.steps[0].run with parameters[xxx]
|
replace $xxx in command.steps[0].run with parameters[xxx]
|
||||||
then run command.steps[0].run
|
then run command.steps[0].run
|
||||||
@ -91,17 +96,13 @@ class CommandRunner:
|
|||||||
try:
|
try:
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env.update(parameters)
|
env.update(parameters)
|
||||||
env.update(
|
env.update(self.__load_command_runtime(command))
|
||||||
self.__load_command_runtime(command)
|
env.update(self.__load_chat_data(self._model_name, parent_hash, history_messages))
|
||||||
)
|
|
||||||
env.update(
|
|
||||||
self.__load_chat_data(self._model_name, parent_hash, history_messages)
|
|
||||||
)
|
|
||||||
self.__update_devchat_python_path(env, command.steps[0]["run"])
|
self.__update_devchat_python_path(env, command.steps[0]["run"])
|
||||||
|
|
||||||
command_run = command.steps[0]["run"]
|
command_run = command.steps[0]["run"]
|
||||||
for parameter in env:
|
for parameter in env:
|
||||||
command_run = command_run.replace('$' + parameter, str(env[parameter]))
|
command_run = command_run.replace("$" + parameter, str(env[parameter]))
|
||||||
|
|
||||||
if self.__check_command_python_error(command_run, env):
|
if self.__check_command_python_error(command_run, env):
|
||||||
return result
|
return result
|
||||||
@ -124,14 +125,15 @@ class CommandRunner:
|
|||||||
"""
|
"""
|
||||||
run command string
|
run command string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def handle_output(process):
|
def handle_output(process):
|
||||||
stdout_data, stderr_data = {'out': ''}, {'out': ''}
|
stdout_data, stderr_data = {"out": ""}, {"out": ""}
|
||||||
stdout_thread = threading.Thread(
|
stdout_thread = threading.Thread(
|
||||||
target=pipe_reader,
|
target=pipe_reader, args=(process.stdout, stdout_data, sys.stdout)
|
||||||
args=(process.stdout, stdout_data, sys.stdout))
|
)
|
||||||
stderr_thread = threading.Thread(
|
stderr_thread = threading.Thread(
|
||||||
target=pipe_reader,
|
target=pipe_reader, args=(process.stderr, stderr_data, sys.stderr)
|
||||||
args=(process.stderr, stderr_data, sys.stderr))
|
)
|
||||||
stdout_thread.start()
|
stdout_thread.start()
|
||||||
stderr_thread.start()
|
stderr_thread.start()
|
||||||
stdout_thread.join()
|
stdout_thread.join()
|
||||||
@ -142,17 +144,17 @@ class CommandRunner:
|
|||||||
if isinstance(env[key], (List, Dict)):
|
if isinstance(env[key], (List, Dict)):
|
||||||
env[key] = json.dumps(env[key])
|
env[key] = json.dumps(env[key])
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(
|
||||||
shlex.split(command_str),
|
shlex.split(command_str),
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
env=env,
|
env=env,
|
||||||
text=True
|
text=True,
|
||||||
) as process:
|
) as process:
|
||||||
return handle_output(process)
|
return handle_output(process)
|
||||||
|
|
||||||
def __check_command_python_error(self, command_run: str, parameters: Dict[str, str]):
|
def __check_command_python_error(self, command_run: str, parameters: Dict[str, str]):
|
||||||
need_command_python = command_run.find('$command_python ') != -1
|
need_command_python = command_run.find("$command_python ") != -1
|
||||||
has_command_python = parameters.get('command_python', None)
|
has_command_python = parameters.get("command_python", None)
|
||||||
|
|
||||||
if need_command_python and not has_command_python:
|
if need_command_python and not has_command_python:
|
||||||
print(DEVCHAT_COMMAND_MISS_ERROR_MESSAGE, file=sys.stderr, flush=True)
|
print(DEVCHAT_COMMAND_MISS_ERROR_MESSAGE, file=sys.stderr, flush=True)
|
||||||
@ -162,9 +164,9 @@ class CommandRunner:
|
|||||||
def __get_readme(self, command: Command):
|
def __get_readme(self, command: Command):
|
||||||
try:
|
try:
|
||||||
command_dir = os.path.dirname(command.path)
|
command_dir = os.path.dirname(command.path)
|
||||||
readme_file = os.path.join(command_dir, 'README.md')
|
readme_file = os.path.join(command_dir, "README.md")
|
||||||
if os.path.exists(readme_file):
|
if os.path.exists(readme_file):
|
||||||
with open(readme_file, 'r', encoding='utf8') as file:
|
with open(readme_file, "r", encoding="utf8") as file:
|
||||||
readme = file.read()
|
readme = file.read()
|
||||||
return readme
|
return readme
|
||||||
return None
|
return None
|
||||||
@ -172,8 +174,8 @@ class CommandRunner:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def __check_input_miss_error(
|
def __check_input_miss_error(
|
||||||
self, command: Command, command_name: str, parameters: Dict[str, str]
|
self, command: Command, command_name: str, parameters: Dict[str, str]
|
||||||
):
|
):
|
||||||
is_input_required = command.input == "required"
|
is_input_required = command.input == "required"
|
||||||
if not (is_input_required and parameters["input"] == ""):
|
if not (is_input_required and parameters["input"] == ""):
|
||||||
return False
|
return False
|
||||||
@ -197,7 +199,7 @@ class CommandRunner:
|
|||||||
|
|
||||||
missed_parameters = []
|
missed_parameters = []
|
||||||
for parameter_name in parameter_names:
|
for parameter_name in parameter_names:
|
||||||
if command_run.find('$' + parameter_name) != -1:
|
if command_run.find("$" + parameter_name) != -1:
|
||||||
missed_parameters.append(parameter_name)
|
missed_parameters.append(parameter_name)
|
||||||
|
|
||||||
if len(missed_parameters) == 0:
|
if len(missed_parameters) == 0:
|
||||||
@ -216,22 +218,21 @@ class CommandRunner:
|
|||||||
|
|
||||||
# visit each path in command_path, for example: /usr/x1/x2/x3
|
# visit each path in command_path, for example: /usr/x1/x2/x3
|
||||||
# then load visit: /usr, /usr/x1, /usr/x1/x2, /usr/x1/x2/x3
|
# then load visit: /usr, /usr/x1, /usr/x1/x2, /usr/x1/x2/x3
|
||||||
paths = command_path.split('/')
|
paths = command_path.split("/")
|
||||||
for index in range(1, len(paths)+1):
|
for index in range(1, len(paths) + 1):
|
||||||
try:
|
try:
|
||||||
path = '/'.join(paths[:index])
|
path = "/".join(paths[:index])
|
||||||
runtime_file = os.path.join(path, 'runtime.json')
|
runtime_file = os.path.join(path, "runtime.json")
|
||||||
if os.path.exists(runtime_file):
|
if os.path.exists(runtime_file):
|
||||||
with open(runtime_file, 'r', encoding='utf8') as file:
|
with open(runtime_file, "r", encoding="utf8") as file:
|
||||||
command_runtime_config = json.loads(file.read())
|
command_runtime_config = json.loads(file.read())
|
||||||
runtime_config.update(command_runtime_config)
|
runtime_config.update(command_runtime_config)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# for windows
|
# for windows
|
||||||
if runtime_config.get('command_python', None):
|
if runtime_config.get("command_python", None):
|
||||||
runtime_config['command_python'] = \
|
runtime_config["command_python"] = runtime_config["command_python"].replace("\\", "/")
|
||||||
runtime_config['command_python'].replace('\\', '/')
|
|
||||||
return runtime_config
|
return runtime_config
|
||||||
|
|
||||||
def __load_chat_data(self, model_name: str, parent_hash: str, history_messages: List[Dict]):
|
def __load_chat_data(self, model_name: str, parent_hash: str, history_messages: List[Dict]):
|
||||||
@ -242,16 +243,15 @@ class CommandRunner:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def __update_devchat_python_path(self, env: Dict[str, str], command_run: str):
|
def __update_devchat_python_path(self, env: Dict[str, str], command_run: str):
|
||||||
python_path = os.environ.get('PYTHONPATH', '')
|
python_path = os.environ.get("PYTHONPATH", "")
|
||||||
env['DEVCHAT_PYTHONPATH'] = os.environ.get('DEVCHAT_PYTHONPATH', python_path)
|
env["DEVCHAT_PYTHONPATH"] = os.environ.get("DEVCHAT_PYTHONPATH", python_path)
|
||||||
if command_run.find('$devchat_python ') == -1:
|
if command_run.find("$devchat_python ") == -1:
|
||||||
del env['PYTHONPATH']
|
del env["PYTHONPATH"]
|
||||||
env["devchat_python"] = sys.executable.replace('\\', '/')
|
env["devchat_python"] = sys.executable.replace("\\", "/")
|
||||||
|
|
||||||
def _call_function_by_llm(self,
|
def _call_function_by_llm(
|
||||||
command_name: str,
|
self, command_name: str, command: Command, history_messages: List[Dict]
|
||||||
command: Command,
|
):
|
||||||
history_messages: List[Dict]):
|
|
||||||
"""
|
"""
|
||||||
command needs multi parameters, so we need parse each
|
command needs multi parameters, so we need parse each
|
||||||
parameter by LLM from input_text
|
parameter by LLM from input_text
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
import os
|
import os
|
||||||
from typing import List, Optional
|
|
||||||
import re
|
import re
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
class Namespace:
|
class Namespace:
|
||||||
def __init__(self, root_path: str,
|
def __init__(self, root_path: str, branches: List[str] = None):
|
||||||
branches: List[str] = None):
|
|
||||||
"""
|
"""
|
||||||
:param root_path: The root path of the namespace.
|
:param root_path: The root path of the namespace.
|
||||||
:param branches: The hidden branches with ascending order of priority.
|
:param branches: The hidden branches with ascending order of priority.
|
||||||
"""
|
"""
|
||||||
self.root_path = root_path
|
self.root_path = root_path
|
||||||
self.branches = branches if branches else ['sys', 'org', 'usr']
|
self.branches = branches if branches else ["sys", "org", "usr"]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid_name(name: str) -> bool:
|
def is_valid_name(name: str) -> bool:
|
||||||
@ -28,7 +27,7 @@ class Namespace:
|
|||||||
# The regular expression pattern for a valid name
|
# The regular expression pattern for a valid name
|
||||||
if name is None:
|
if name is None:
|
||||||
return False
|
return False
|
||||||
pattern = r'^$|^(?!.*\.\.)[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)*$'
|
pattern = r"^$|^(?!.*\.\.)[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)*$"
|
||||||
return bool(re.match(pattern, name))
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
def get_file(self, name: str, file: str) -> Optional[str]:
|
def get_file(self, name: str, file: str) -> Optional[str]:
|
||||||
@ -40,7 +39,7 @@ class Namespace:
|
|||||||
if not self.is_valid_name(name):
|
if not self.is_valid_name(name):
|
||||||
return None
|
return None
|
||||||
# Convert the dot-separated name to a path
|
# Convert the dot-separated name to a path
|
||||||
path = os.path.join(*name.split('.'))
|
path = os.path.join(*name.split("."))
|
||||||
for branch in reversed(self.branches):
|
for branch in reversed(self.branches):
|
||||||
full_path = os.path.join(self.root_path, branch, path)
|
full_path = os.path.join(self.root_path, branch, path)
|
||||||
if os.path.isdir(full_path):
|
if os.path.isdir(full_path):
|
||||||
@ -60,7 +59,7 @@ class Namespace:
|
|||||||
if not self.is_valid_name(name):
|
if not self.is_valid_name(name):
|
||||||
raise ValueError(f"Invalid name to list files: {name}")
|
raise ValueError(f"Invalid name to list files: {name}")
|
||||||
# Convert the dot-separated name to a path
|
# Convert the dot-separated name to a path
|
||||||
path = os.path.join(*name.split('.'))
|
path = os.path.join(*name.split("."))
|
||||||
files = {}
|
files = {}
|
||||||
path_found = False
|
path_found = False
|
||||||
for branch in self.branches:
|
for branch in self.branches:
|
||||||
@ -77,7 +76,7 @@ class Namespace:
|
|||||||
# Sort the files in alphabetical order before returning
|
# Sort the files in alphabetical order before returning
|
||||||
return sorted(files.values()) if files else []
|
return sorted(files.values()) if files else []
|
||||||
|
|
||||||
def list_names(self, name: str = '', recursive: bool = False) -> List[str]:
|
def list_names(self, name: str = "", recursive: bool = False) -> List[str]:
|
||||||
"""
|
"""
|
||||||
:param name: The command name in the namespace. Defaults to the root.
|
:param name: The command name in the namespace. Defaults to the root.
|
||||||
:param recursive: Whether to list all descendant names or only child names.
|
:param recursive: Whether to list all descendant names or only child names.
|
||||||
@ -86,7 +85,7 @@ class Namespace:
|
|||||||
if not self.is_valid_name(name):
|
if not self.is_valid_name(name):
|
||||||
raise ValueError(f"Invalid name to list names: {name}")
|
raise ValueError(f"Invalid name to list names: {name}")
|
||||||
commands = set()
|
commands = set()
|
||||||
path = os.path.join(*name.split('.'))
|
path = os.path.join(*name.split("."))
|
||||||
found = False
|
found = False
|
||||||
for branch in self.branches:
|
for branch in self.branches:
|
||||||
full_path = os.path.join(self.root_path, branch, path)
|
full_path = os.path.join(self.root_path, branch, path)
|
||||||
@ -101,10 +100,10 @@ class Namespace:
|
|||||||
|
|
||||||
def _add_dirnames_to_commands(self, full_path: str, name: str, commands: set):
|
def _add_dirnames_to_commands(self, full_path: str, name: str, commands: set):
|
||||||
for dirname in os.listdir(full_path):
|
for dirname in os.listdir(full_path):
|
||||||
if dirname.startswith('.'):
|
if dirname.startswith("."):
|
||||||
continue
|
continue
|
||||||
if os.path.isdir(os.path.join(full_path, dirname)):
|
if os.path.isdir(os.path.join(full_path, dirname)):
|
||||||
command_name = '.'.join([name, dirname]) if name else dirname
|
command_name = ".".join([name, dirname]) if name else dirname
|
||||||
commands.add(command_name)
|
commands.add(command_name)
|
||||||
|
|
||||||
def _add_recursive_dirnames_to_commands(self, full_path: str, name: str, commands: set):
|
def _add_recursive_dirnames_to_commands(self, full_path: str, name: str, commands: set):
|
||||||
@ -112,10 +111,10 @@ class Namespace:
|
|||||||
|
|
||||||
def _recursive_dir_walk(self, full_path: str, name: str, commands: set):
|
def _recursive_dir_walk(self, full_path: str, name: str, commands: set):
|
||||||
for dirname in os.listdir(full_path):
|
for dirname in os.listdir(full_path):
|
||||||
if dirname.startswith('.'):
|
if dirname.startswith("."):
|
||||||
continue
|
continue
|
||||||
dir_path = os.path.join(full_path, dirname)
|
dir_path = os.path.join(full_path, dirname)
|
||||||
if os.path.isdir(dir_path):
|
if os.path.isdir(dir_path):
|
||||||
command_name = '.'.join([name, dirname]) if name else dirname
|
command_name = ".".join([name, dirname]) if name else dirname
|
||||||
commands.add(command_name)
|
commands.add(command_name)
|
||||||
self._recursive_dir_walk(dir_path, command_name, commands)
|
self._recursive_dir_walk(dir_path, command_name, commands)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import re
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
from .namespace import Namespace
|
from .namespace import Namespace
|
||||||
|
|
||||||
|
|
||||||
@ -8,30 +9,30 @@ class RecursivePrompter:
|
|||||||
self.namespace = namespace
|
self.namespace = namespace
|
||||||
|
|
||||||
def run(self, name: str) -> str:
|
def run(self, name: str) -> str:
|
||||||
ancestors = name.split('.')
|
ancestors = name.split(".")
|
||||||
merged_content = ''
|
merged_content = ""
|
||||||
for index in range(len(ancestors)):
|
for index in range(len(ancestors)):
|
||||||
ancestor_name = '.'.join(ancestors[:index + 1])
|
ancestor_name = ".".join(ancestors[: index + 1])
|
||||||
file_path = self.namespace.get_file(ancestor_name, 'prompt.txt')
|
file_path = self.namespace.get_file(ancestor_name, "prompt.txt")
|
||||||
if file_path:
|
if file_path:
|
||||||
with open(file_path, 'r', encoding='utf-8') as file:
|
with open(file_path, "r", encoding="utf-8") as file:
|
||||||
prompt_content = file.read()
|
prompt_content = file.read()
|
||||||
# replace @file@ with the content of the file
|
# replace @file@ with the content of the file
|
||||||
prompt_content = self._replace_file_references(file_path, prompt_content)
|
prompt_content = self._replace_file_references(file_path, prompt_content)
|
||||||
merged_content += prompt_content
|
merged_content += prompt_content
|
||||||
merged_content += '\n'
|
merged_content += "\n"
|
||||||
|
|
||||||
return merged_content
|
return merged_content
|
||||||
|
|
||||||
def _replace_file_references(self, prompt_file_path: str, content: str) -> str:
|
def _replace_file_references(self, prompt_file_path: str, content: str) -> str:
|
||||||
# prompt_file_path is the path to the file that contains the content
|
# prompt_file_path is the path to the file that contains the content
|
||||||
# @relative file path@: file is relative to the prompt_file_path
|
# @relative file path@: file is relative to the prompt_file_path
|
||||||
pattern = re.compile(r'@(.+?)@')
|
pattern = re.compile(r"@(.+?)@")
|
||||||
matches = pattern.findall(content)
|
matches = pattern.findall(content)
|
||||||
for match in matches:
|
for match in matches:
|
||||||
file_path = os.path.join(os.path.dirname(prompt_file_path), match)
|
file_path = os.path.join(os.path.dirname(prompt_file_path), match)
|
||||||
if os.path.exists(file_path):
|
if os.path.exists(file_path):
|
||||||
with open(file_path, 'r', encoding='utf-8') as file:
|
with open(file_path, "r", encoding="utf-8") as file:
|
||||||
file_content = file.read()
|
file_content = file.read()
|
||||||
content = content.replace(f'@{match}@', file_content)
|
content = content.replace(f"@{match}@", file_content)
|
||||||
return content
|
return content
|
||||||
|
@ -1,18 +1,20 @@
|
|||||||
import os
|
import os
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from .command_runner import CommandRunner
|
from .command_runner import CommandRunner
|
||||||
from .util import CommandUtil
|
|
||||||
from .namespace import Namespace
|
from .namespace import Namespace
|
||||||
from.recursive_prompter import RecursivePrompter
|
from .recursive_prompter import RecursivePrompter
|
||||||
|
from .util import CommandUtil
|
||||||
|
|
||||||
|
|
||||||
def load_workflow_instruction(user_input: str):
|
def load_workflow_instruction(user_input: str):
|
||||||
user_input = user_input.strip()
|
user_input = user_input.strip()
|
||||||
if len(user_input) == 0:
|
if len(user_input) == 0:
|
||||||
return None
|
return None
|
||||||
if user_input[:1] != '/':
|
if user_input[:1] != "/":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
workflows_dir = os.path.join(os.path.expanduser('~/.chat'), 'workflows')
|
workflows_dir = os.path.join(os.path.expanduser("~/.chat"), "workflows")
|
||||||
if not os.path.exists(workflows_dir):
|
if not os.path.exists(workflows_dir):
|
||||||
return None
|
return None
|
||||||
if not os.path.isdir(workflows_dir):
|
if not os.path.isdir(workflows_dir):
|
||||||
@ -28,19 +30,16 @@ def load_workflow_instruction(user_input: str):
|
|||||||
|
|
||||||
|
|
||||||
def run_command(
|
def run_command(
|
||||||
model_name: str,
|
model_name: str, history_messages: List[dict], input_text: str, parent_hash: str, auto_fun: bool
|
||||||
history_messages: List[dict],
|
):
|
||||||
input_text: str,
|
|
||||||
parent_hash: str,
|
|
||||||
auto_fun: bool):
|
|
||||||
"""
|
"""
|
||||||
load command config, and then run Command
|
load command config, and then run Command
|
||||||
"""
|
"""
|
||||||
# split input_text by ' ','\n','\t'
|
# split input_text by ' ','\n','\t'
|
||||||
if len(input_text.strip()) == 0:
|
if len(input_text.strip()) == 0:
|
||||||
return None
|
return None
|
||||||
if input_text.strip()[:1] != '/':
|
if input_text.strip()[:1] != "/":
|
||||||
if not (auto_fun and model_name.startswith('gpt-')):
|
if not (auto_fun and model_name.startswith("gpt-")):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# TODO
|
# TODO
|
||||||
@ -60,5 +59,5 @@ def run_command(
|
|||||||
command=command_obj,
|
command=command_obj,
|
||||||
history_messages=history_messages,
|
history_messages=history_messages,
|
||||||
input_text=input_text,
|
input_text=input_text,
|
||||||
parent_hash=parent_hash
|
parent_hash=parent_hash,
|
||||||
)
|
)
|
||||||
|
@ -1,26 +1,25 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
from typing import Dict, List
|
||||||
from typing import List, Dict
|
|
||||||
|
|
||||||
from devchat._cli.utils import init_dir
|
from devchat._cli.utils import init_dir
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
|
|
||||||
|
from .command_parser import Command, CommandParser
|
||||||
from .namespace import Namespace
|
from .namespace import Namespace
|
||||||
from .command_parser import CommandParser, Command
|
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_MODEL = "gpt-3.5-turbo"
|
DEFAULT_MODEL = "gpt-3.5-turbo"
|
||||||
|
|
||||||
|
|
||||||
class CommandUtil:
|
class CommandUtil:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __command_parser():
|
def __command_parser():
|
||||||
_, user_chat_dir = init_dir()
|
_, user_chat_dir = init_dir()
|
||||||
workflows_dir = os.path.join(user_chat_dir, 'workflows')
|
workflows_dir = os.path.join(user_chat_dir, "workflows")
|
||||||
if not os.path.exists(workflows_dir) or not os.path.isdir(workflows_dir):
|
if not os.path.exists(workflows_dir) or not os.path.isdir(workflows_dir):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -42,8 +41,8 @@ class CommandUtil:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
command_names = commander.namespace.list_names("", True)
|
command_names = commander.namespace.list_names("", True)
|
||||||
commands = [ (name, commander.parse(name)) for name in command_names ]
|
commands = [(name, commander.parse(name)) for name in command_names]
|
||||||
return [ cmd for cmd in commands if cmd[1] ]
|
return [cmd for cmd in commands if cmd[1]]
|
||||||
|
|
||||||
|
|
||||||
class ToolUtil:
|
class ToolUtil:
|
||||||
@ -56,23 +55,20 @@ class ToolUtil:
|
|||||||
for key, value in command.parameters.items():
|
for key, value in command.parameters.items():
|
||||||
properties[key] = {}
|
properties[key] = {}
|
||||||
for key1, value1 in value.dict().items():
|
for key1, value1 in value.dict().items():
|
||||||
if key1 not in ['type', 'description', 'enum'] or value1 is None:
|
if key1 not in ["type", "description", "enum"] or value1 is None:
|
||||||
continue
|
continue
|
||||||
properties[key][key1] = value1
|
properties[key][key1] = value1
|
||||||
required.append(key)
|
required.append(key)
|
||||||
elif command.steps[0]['run'].find('$input') > 0:
|
elif command.steps[0]["run"].find("$input") > 0:
|
||||||
properties['input'] = {
|
properties["input"] = {"type": "string", "description": "input text"}
|
||||||
"type": "string",
|
required.append("input")
|
||||||
"description": "input text"
|
|
||||||
}
|
|
||||||
required.append('input')
|
|
||||||
|
|
||||||
return properties, required
|
return properties, required
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_function(command: Command, command_name: str):
|
def make_function(command: Command, command_name: str):
|
||||||
properties, required = ToolUtil.__make_function_parameters(command)
|
properties, required = ToolUtil.__make_function_parameters(command)
|
||||||
command_name = command_name.replace('.', '---')
|
command_name = command_name.replace(".", "---")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"type": "function",
|
"type": "function",
|
||||||
@ -84,37 +80,37 @@ class ToolUtil:
|
|||||||
"properties": properties,
|
"properties": properties,
|
||||||
"required": required,
|
"required": required,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def select_function_by_llm(
|
def select_function_by_llm(
|
||||||
history_messages: List[Dict], tools: List[Dict], model: str = DEFAULT_MODEL
|
history_messages: List[Dict], tools: List[Dict], model: str = DEFAULT_MODEL
|
||||||
):
|
):
|
||||||
import openai
|
|
||||||
import httpx
|
import httpx
|
||||||
|
import openai
|
||||||
|
|
||||||
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
||||||
proxy_setting ={"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
proxy_setting = (
|
||||||
|
{"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
||||||
|
)
|
||||||
|
|
||||||
client = openai.OpenAI(
|
client = openai.OpenAI(
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None),
|
api_key=os.environ.get("OPENAI_API_KEY", None),
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", None),
|
base_url=os.environ.get("OPENAI_API_BASE", None),
|
||||||
http_client=httpx.Client(**proxy_setting, trust_env=False)
|
http_client=httpx.Client(**proxy_setting, trust_env=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = client.chat.completions.create(
|
response = client.chat.completions.create(
|
||||||
messages=history_messages,
|
messages=history_messages, model=model, stream=False, tools=tools
|
||||||
model=model,
|
|
||||||
stream=False,
|
|
||||||
tools=tools
|
|
||||||
)
|
)
|
||||||
|
|
||||||
respose_message = response.dict()["choices"][0]["message"]
|
respose_message = response.dict()["choices"][0]["message"]
|
||||||
if not respose_message['tool_calls']:
|
if not respose_message["tool_calls"]:
|
||||||
return None
|
return None
|
||||||
tool_call = respose_message['tool_calls'][0]['function']
|
tool_call = respose_message["tool_calls"][0]["function"]
|
||||||
if tool_call['name'] != tools[0]["function"]["name"]:
|
if tool_call["name"] != tools[0]["function"]["name"]:
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The LLM returned an invalid function name. "
|
"The LLM returned an invalid function name. "
|
||||||
f"Expected: {tools[0]['function']['name']}, "
|
f"Expected: {tools[0]['function']['name']}, "
|
||||||
@ -123,8 +119,8 @@ class ToolUtil:
|
|||||||
print(error_msg, file=sys.stderr, flush=True)
|
print(error_msg, file=sys.stderr, flush=True)
|
||||||
return None
|
return None
|
||||||
return {
|
return {
|
||||||
"name": tool_call['name'].replace('---', '.'),
|
"name": tool_call["name"].replace("---", "."),
|
||||||
"arguments": json.loads(tool_call['arguments'])
|
"arguments": json.loads(tool_call["arguments"]),
|
||||||
}
|
}
|
||||||
except (ConnectionError, openai.APIConnectionError) as err:
|
except (ConnectionError, openai.APIConnectionError) as err:
|
||||||
print("ConnectionError:", err, file=sys.stderr, flush=True)
|
print("ConnectionError:", err, file=sys.stderr, flush=True)
|
||||||
@ -139,25 +135,22 @@ class ToolUtil:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _create_tool(command_name:str, command: Command) -> dict:
|
def _create_tool(command_name: str, command: Command) -> dict:
|
||||||
properties = {}
|
properties = {}
|
||||||
required = []
|
required = []
|
||||||
if command.parameters:
|
if command.parameters:
|
||||||
for key, value in command.parameters.items():
|
for key, value in command.parameters.items():
|
||||||
properties[key] = {}
|
properties[key] = {}
|
||||||
for key1, value1 in value.dict().items():
|
for key1, value1 in value.dict().items():
|
||||||
if key1 not in ['type', 'description', 'enum'] or value1 is None:
|
if key1 not in ["type", "description", "enum"] or value1 is None:
|
||||||
continue
|
continue
|
||||||
properties[key][key1] = value1
|
properties[key][key1] = value1
|
||||||
required.append(key)
|
required.append(key)
|
||||||
elif command.steps[0]['run'].find('$input') > 0:
|
elif command.steps[0]["run"].find("$input") > 0:
|
||||||
properties['input'] = {
|
properties["input"] = {"type": "string", "description": "input text"}
|
||||||
"type": "string",
|
required.append("input")
|
||||||
"description": "input text"
|
|
||||||
}
|
|
||||||
required.append('input')
|
|
||||||
|
|
||||||
command_name = command_name.replace('.', '---')
|
command_name = command_name.replace(".", "---")
|
||||||
return {
|
return {
|
||||||
"type": "function",
|
"type": "function",
|
||||||
"function": {
|
"function": {
|
||||||
@ -168,6 +161,5 @@ class ToolUtil:
|
|||||||
"properties": properties,
|
"properties": properties,
|
||||||
"required": required,
|
"required": required,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -1,5 +1,5 @@
|
|||||||
from .service import IDEService
|
from .service import IDEService
|
||||||
from .types import *
|
from .types import * # noqa: F403
|
||||||
from .types import __all__ as types_all
|
from .types import __all__ as types_all
|
||||||
|
|
||||||
__all__ = types_all + [
|
__all__ = types_all + [
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from .types import LocationWithText
|
|
||||||
from .rpc import rpc_method
|
from .rpc import rpc_method
|
||||||
|
from .types import LocationWithText
|
||||||
|
|
||||||
|
|
||||||
class IdeaIDEService:
|
class IdeaIDEService:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -7,8 +8,8 @@ class IdeaIDEService:
|
|||||||
|
|
||||||
@rpc_method
|
@rpc_method
|
||||||
def get_visible_range(self) -> LocationWithText:
|
def get_visible_range(self) -> LocationWithText:
|
||||||
return LocationWithText.parse_obj(self._result)
|
return LocationWithText.parse_obj(self._result)
|
||||||
|
|
||||||
@rpc_method
|
@rpc_method
|
||||||
def get_selected_range(self) -> LocationWithText:
|
def get_selected_range(self) -> LocationWithText:
|
||||||
return LocationWithText.parse_obj(self._result)
|
return LocationWithText.parse_obj(self._result)
|
||||||
|
@ -1,8 +1,3 @@
|
|||||||
# pylint: disable=C0103
|
|
||||||
# pylint: disable=W3101
|
|
||||||
# pylint: disable=W0719
|
|
||||||
# pylint: disable=R1710
|
|
||||||
# pylint: disable=W0212
|
|
||||||
import os
|
import os
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
@ -1,17 +1,9 @@
|
|||||||
# disable pylint
|
|
||||||
# pylint: disable=W0613
|
|
||||||
# pylint: disable=E1133
|
|
||||||
# pylint: disable=R1710
|
|
||||||
# pylint: disable=W0719
|
|
||||||
# pylint: disable=W3101
|
|
||||||
# pylint: disable=C0103
|
|
||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from .rpc import rpc_method
|
|
||||||
from .types import Location, SymbolNode, LocationWithText
|
|
||||||
from .vscode_services import selected_range, visible_range
|
|
||||||
from .idea_services import IdeaIDEService
|
from .idea_services import IdeaIDEService
|
||||||
|
from .rpc import rpc_method
|
||||||
|
from .types import Location, LocationWithText, SymbolNode
|
||||||
|
from .vscode_services import selected_range, visible_range
|
||||||
|
|
||||||
|
|
||||||
class IDEService:
|
class IDEService:
|
||||||
@ -39,7 +31,7 @@ class IDEService:
|
|||||||
@rpc_method
|
@rpc_method
|
||||||
def install_python_env(self, command_name: str, requirements_file: str) -> str:
|
def install_python_env(self, command_name: str, requirements_file: str) -> str:
|
||||||
"""
|
"""
|
||||||
A method to install a Python environment with the provided command name
|
A method to install a Python environment with the provided command name
|
||||||
and requirements file, returning python path installed.
|
and requirements file, returning python path installed.
|
||||||
Command name is the name of the environment to be installed.
|
Command name is the name of the environment to be installed.
|
||||||
"""
|
"""
|
||||||
@ -136,7 +128,7 @@ class IDEService:
|
|||||||
Determines and returns the visible range of code in the current IDE.
|
Determines and returns the visible range of code in the current IDE.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A tuple denoting the visible range if the IDE is VSCode, or defers to
|
A tuple denoting the visible range if the IDE is VSCode, or defers to
|
||||||
IdeaIDEService's get_visible_range method for other IDEs.
|
IdeaIDEService's get_visible_range method for other IDEs.
|
||||||
"""
|
"""
|
||||||
if self.ide_name() == "vscode":
|
if self.ide_name() == "vscode":
|
||||||
|
@ -2,13 +2,7 @@ from typing import List
|
|||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["Position", "Range", "Location", "SymbolNode", "LocationWithText"]
|
||||||
"Position",
|
|
||||||
"Range",
|
|
||||||
"Location",
|
|
||||||
"SymbolNode",
|
|
||||||
"LocationWithText"
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class Position(BaseModel):
|
class Position(BaseModel):
|
||||||
|
@ -5,12 +5,12 @@ from .types import LocationWithText
|
|||||||
|
|
||||||
|
|
||||||
@rpc_call
|
@rpc_call
|
||||||
def run_code(code: str): # pylint: disable=unused-argument
|
def run_code(code: str):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@rpc_call
|
@rpc_call
|
||||||
def diff_apply(filepath, content): # pylint: disable=unused-argument
|
def diff_apply(filepath, content):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -110,6 +110,7 @@ def visible_lines():
|
|||||||
"visibleRange": [start_line, end_line],
|
"visibleRange": [start_line, end_line],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def visible_range() -> LocationWithText:
|
def visible_range() -> LocationWithText:
|
||||||
visible_range_text = visible_lines()
|
visible_range_text = visible_lines()
|
||||||
return LocationWithText(
|
return LocationWithText(
|
||||||
@ -124,7 +125,7 @@ def visible_range() -> LocationWithText:
|
|||||||
"line": visible_range_text["visibleRange"][1],
|
"line": visible_range_text["visibleRange"][1],
|
||||||
"character": 0,
|
"character": 0,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -159,6 +160,7 @@ def selected_lines():
|
|||||||
"selectedRange": [start_line, start_col, end_line, end_col],
|
"selectedRange": [start_line, start_col, end_line, end_col],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def selected_range() -> LocationWithText:
|
def selected_range() -> LocationWithText:
|
||||||
selected_range_text = selected_lines()
|
selected_range_text = selected_lines()
|
||||||
return LocationWithText(
|
return LocationWithText(
|
||||||
@ -173,5 +175,5 @@ def selected_range() -> LocationWithText:
|
|||||||
"line": selected_range_text["selectedRange"][2],
|
"line": selected_range_text["selectedRange"][2],
|
||||||
"character": selected_range_text["selectedRange"][3],
|
"character": selected_range_text["selectedRange"][3],
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
@ -47,7 +47,7 @@ def chat(
|
|||||||
):
|
):
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(*args, **kwargs): # pylint: disable=unused-argument
|
def wrapper(*args, **kwargs):
|
||||||
nonlocal prompt, memory, model, llm_config
|
nonlocal prompt, memory, model, llm_config
|
||||||
prompt_new = prompt.format(**kwargs)
|
prompt_new = prompt.format(**kwargs)
|
||||||
messages = memory.contexts() if memory else []
|
messages = memory.contexts() if memory else []
|
||||||
@ -86,7 +86,7 @@ def chat_json(
|
|||||||
):
|
):
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(*args, **kwargs): # pylint: disable=unused-argument
|
def wrapper(*args, **kwargs):
|
||||||
nonlocal prompt, memory, model, llm_config
|
nonlocal prompt, memory, model, llm_config
|
||||||
prompt_new = prompt.format(**kwargs)
|
prompt_new = prompt.format(**kwargs)
|
||||||
messages = memory.contexts() if memory else []
|
messages = memory.contexts() if memory else []
|
||||||
|
@ -41,12 +41,12 @@ def chat_completion_stream_commit(
|
|||||||
llm_config: Dict, # {"model": "...", ...}
|
llm_config: Dict, # {"model": "...", ...}
|
||||||
):
|
):
|
||||||
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
||||||
proxy_setting ={"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
proxy_setting = {"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
||||||
|
|
||||||
client = openai.OpenAI(
|
client = openai.OpenAI(
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None),
|
api_key=os.environ.get("OPENAI_API_KEY", None),
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", None),
|
base_url=os.environ.get("OPENAI_API_BASE", None),
|
||||||
http_client=httpx.Client(**proxy_setting, trust_env=False)
|
http_client=httpx.Client(**proxy_setting, trust_env=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
llm_config["stream"] = True
|
llm_config["stream"] = True
|
||||||
@ -56,12 +56,12 @@ def chat_completion_stream_commit(
|
|||||||
|
|
||||||
def chat_completion_stream_raw(**kwargs):
|
def chat_completion_stream_raw(**kwargs):
|
||||||
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
||||||
proxy_setting ={"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
proxy_setting = {"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
||||||
|
|
||||||
client = openai.OpenAI(
|
client = openai.OpenAI(
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None),
|
api_key=os.environ.get("OPENAI_API_KEY", None),
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", None),
|
base_url=os.environ.get("OPENAI_API_BASE", None),
|
||||||
http_client=httpx.Client(**proxy_setting, trust_env=False)
|
http_client=httpx.Client(**proxy_setting, trust_env=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
kwargs["stream"] = True
|
kwargs["stream"] = True
|
||||||
@ -87,7 +87,7 @@ def retry_timeout(chunks):
|
|||||||
|
|
||||||
|
|
||||||
def chunk_list(chunks):
|
def chunk_list(chunks):
|
||||||
return [chunk for chunk in chunks] # pylint: disable=R1721
|
return [chunk for chunk in chunks]
|
||||||
|
|
||||||
|
|
||||||
def chunks_content(chunks):
|
def chunks_content(chunks):
|
||||||
@ -164,13 +164,13 @@ chat_completion_no_stream_return_json_with_retry = exception_handle(
|
|||||||
exception_output_handle(lambda err: None),
|
exception_output_handle(lambda err: None),
|
||||||
)
|
)
|
||||||
|
|
||||||
def chat_completion_no_stream_return_json(
|
|
||||||
messages: List[Dict], llm_config: Dict):
|
def chat_completion_no_stream_return_json(messages: List[Dict], llm_config: Dict):
|
||||||
"""call llm without stream, return json object"""
|
"""call llm without stream, return json object"""
|
||||||
llm_config["response_format"]={"type": "json_object"}
|
llm_config["response_format"] = {"type": "json_object"}
|
||||||
return chat_completion_no_stream_return_json_with_retry(
|
return chat_completion_no_stream_return_json_with_retry(
|
||||||
messages=messages,
|
messages=messages, llm_config=llm_config
|
||||||
llm_config=llm_config)
|
)
|
||||||
|
|
||||||
|
|
||||||
chat_completion_stream = exception_handle(
|
chat_completion_stream = exception_handle(
|
||||||
|
@ -19,8 +19,7 @@ def retry(func, times):
|
|||||||
raise err.error
|
raise err.error
|
||||||
continue
|
continue
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise err
|
raise err.error
|
||||||
raise err.error
|
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
@ -62,9 +61,8 @@ def pipeline(*funcs):
|
|||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
for index, func in enumerate(funcs):
|
for index, func in enumerate(funcs):
|
||||||
if index > 0:
|
if index > 0:
|
||||||
# pylint: disable=E1101
|
|
||||||
if isinstance(args, Dict) and args.get("__type__", None) == "parallel":
|
if isinstance(args, Dict) and args.get("__type__", None) == "parallel":
|
||||||
args = func(*args["value"]) # pylint: disable=E1126
|
args = func(*args["value"])
|
||||||
else:
|
else:
|
||||||
args = func(args)
|
args = func(args)
|
||||||
else:
|
else:
|
||||||
|
@ -3,9 +3,9 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from devchat.memory import ChatMemory
|
|
||||||
from devchat.chatmark import Form, Radio, TextEditor
|
from devchat.chatmark import Form, Radio, TextEditor
|
||||||
from devchat.ide import IDEService
|
from devchat.ide import IDEService
|
||||||
|
from devchat.memory import ChatMemory
|
||||||
|
|
||||||
from .openai import chat_call_completion_stream
|
from .openai import chat_call_completion_stream
|
||||||
|
|
||||||
@ -140,7 +140,7 @@ def chat_tools(
|
|||||||
):
|
):
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(*args, **kwargs): # pylint: disable=unused-argument
|
def wrapper(*args, **kwargs):
|
||||||
nonlocal prompt, memory, model, tools, call_confirm_fun, llm_config
|
nonlocal prompt, memory, model, tools, call_confirm_fun, llm_config
|
||||||
prompt = prompt.format(**kwargs)
|
prompt = prompt.format(**kwargs)
|
||||||
if not tools:
|
if not tools:
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from .base import ChatMemory
|
from .base import ChatMemory
|
||||||
from .fixsize_memory import FixSizeChatMemory
|
from .fixsize_memory import FixSizeChatMemory
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ class Message(ABC):
|
|||||||
"""
|
"""
|
||||||
The basic unit of information in a prompt.
|
The basic unit of information in a prompt.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
content: str = ""
|
content: str = ""
|
||||||
|
|
||||||
INSTRUCT = "instruct"
|
INSTRUCT = "instruct"
|
||||||
@ -22,7 +23,7 @@ class Message(ABC):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def from_dict(cls, message_data: dict) -> 'Message':
|
def from_dict(cls, message_data: dict) -> "Message":
|
||||||
"""
|
"""
|
||||||
Convert the message from a dictionary.
|
Convert the message from a dictionary.
|
||||||
"""
|
"""
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
from .openai_chat import OpenAIChatParameters, OpenAIChatConfig, OpenAIChat
|
from .openai_chat import OpenAIChat, OpenAIChatConfig, OpenAIChatParameters
|
||||||
from .openai_message import OpenAIMessage
|
from .openai_message import OpenAIMessage
|
||||||
from .openai_prompt import OpenAIPrompt
|
from .openai_prompt import OpenAIPrompt
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'OpenAIChat',
|
"OpenAIChat",
|
||||||
'OpenAIChatConfig',
|
"OpenAIChatConfig",
|
||||||
'OpenAIChatParameters',
|
"OpenAIChatParameters",
|
||||||
'OpenAIMessage',
|
"OpenAIMessage",
|
||||||
'OpenAIPrompt'
|
"OpenAIPrompt",
|
||||||
]
|
]
|
||||||
|
@ -4,6 +4,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
|
||||||
class LineReader:
|
class LineReader:
|
||||||
def __init__(self, response):
|
def __init__(self, response):
|
||||||
self.response = response
|
self.response = response
|
||||||
@ -18,7 +19,7 @@ class LineReader:
|
|||||||
line = line.strip()
|
line = line.strip()
|
||||||
if not line:
|
if not line:
|
||||||
return self.__next__()
|
return self.__next__()
|
||||||
line = line.decode('utf-8')
|
line = line.decode("utf-8")
|
||||||
if not line.startswith("data:"):
|
if not line.startswith("data:"):
|
||||||
print("Receive invalid line: {line}", end="\n\n", file=sys.stderr)
|
print("Receive invalid line: {line}", end="\n\n", file=sys.stderr)
|
||||||
raise ValueError(f"Invalid line: {line}")
|
raise ValueError(f"Invalid line: {line}")
|
||||||
@ -31,13 +32,17 @@ class LineReader:
|
|||||||
print(f"Error decoding JSON: {err}", end="\n\n", file=sys.stderr)
|
print(f"Error decoding JSON: {err}", end="\n\n", file=sys.stderr)
|
||||||
raise ValueError(f"Invalid line: {line}") from err
|
raise ValueError(f"Invalid line: {line}") from err
|
||||||
|
|
||||||
|
|
||||||
def stream_response(connection: http.client.HTTPSConnection, data, headers):
|
def stream_response(connection: http.client.HTTPSConnection, data, headers):
|
||||||
connection.request("POST", "/v1/chat/completions", body=json.dumps(data), headers=headers)
|
connection.request("POST", "/v1/chat/completions", body=json.dumps(data), headers=headers)
|
||||||
response = connection.getresponse()
|
response = connection.getresponse()
|
||||||
|
|
||||||
if response.status != 200:
|
if response.status != 200:
|
||||||
print(f"Error: {response.status} - {response.reason} {response.read()}",
|
print(
|
||||||
end="\n\n", file=sys.stderr)
|
f"Error: {response.status} - {response.reason} {response.read()}",
|
||||||
|
end="\n\n",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
return LineReader(response=response)
|
return LineReader(response=response)
|
||||||
|
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from typing import Optional, Union, List, Dict, Iterator
|
from typing import Dict, Iterator, List, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from devchat.chat import Chat
|
from devchat.chat import Chat
|
||||||
from devchat.utils import get_user_info, user_id
|
from devchat.utils import get_user_info, user_id
|
||||||
|
|
||||||
|
from .http_openai import stream_request
|
||||||
from .openai_message import OpenAIMessage
|
from .openai_message import OpenAIMessage
|
||||||
from .openai_prompt import OpenAIPrompt
|
from .openai_prompt import OpenAIPrompt
|
||||||
from .http_openai import stream_request
|
|
||||||
|
|
||||||
class OpenAIChatParameters(BaseModel, extra='ignore'):
|
|
||||||
|
class OpenAIChatParameters(BaseModel, extra="ignore"):
|
||||||
temperature: Optional[float] = Field(0, ge=0, le=2)
|
temperature: Optional[float] = Field(0, ge=0, le=2)
|
||||||
top_p: Optional[float] = Field(None, ge=0, le=1)
|
top_p: Optional[float] = Field(None, ge=0, le=1)
|
||||||
n: Optional[int] = Field(None, ge=1)
|
n: Optional[int] = Field(None, ge=1)
|
||||||
@ -28,6 +30,7 @@ class OpenAIChatConfig(OpenAIChatParameters):
|
|||||||
"""
|
"""
|
||||||
Configuration object for the OpenAIChat APIs.
|
Configuration object for the OpenAIChat APIs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model: str
|
model: str
|
||||||
|
|
||||||
|
|
||||||
@ -35,6 +38,7 @@ class OpenAIChat(Chat):
|
|||||||
"""
|
"""
|
||||||
OpenAIChat class that handles communication with the OpenAI Chat API.
|
OpenAIChat class that handles communication with the OpenAI Chat API.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config: OpenAIChatConfig):
|
def __init__(self, config: OpenAIChatConfig):
|
||||||
"""
|
"""
|
||||||
Initialize the OpenAIChat class with a configuration object.
|
Initialize the OpenAIChat class with a configuration object.
|
||||||
@ -52,83 +56,81 @@ class OpenAIChat(Chat):
|
|||||||
return prompt
|
return prompt
|
||||||
|
|
||||||
def load_prompt(self, data: dict) -> OpenAIPrompt:
|
def load_prompt(self, data: dict) -> OpenAIPrompt:
|
||||||
data['_new_messages'] = {
|
data["_new_messages"] = {
|
||||||
k: [OpenAIMessage.from_dict(m) for m in v]
|
k: [OpenAIMessage.from_dict(m) for m in v]
|
||||||
if isinstance(v, list) else OpenAIMessage.from_dict(v)
|
if isinstance(v, list)
|
||||||
for k, v in data['_new_messages'].items() if k != 'function'
|
else OpenAIMessage.from_dict(v)
|
||||||
|
for k, v in data["_new_messages"].items()
|
||||||
|
if k != "function"
|
||||||
|
}
|
||||||
|
data["_history_messages"] = {
|
||||||
|
k: [OpenAIMessage.from_dict(m) for m in v] for k, v in data["_history_messages"].items()
|
||||||
}
|
}
|
||||||
data['_history_messages'] = {k: [OpenAIMessage.from_dict(m) for m in v]
|
|
||||||
for k, v in data['_history_messages'].items()}
|
|
||||||
return OpenAIPrompt(**data)
|
return OpenAIPrompt(**data)
|
||||||
|
|
||||||
def complete_response(self, prompt: OpenAIPrompt) -> str:
|
def complete_response(self, prompt: OpenAIPrompt) -> str:
|
||||||
import openai
|
|
||||||
import httpx
|
import httpx
|
||||||
|
import openai
|
||||||
|
|
||||||
# Filter the config parameters with set values
|
# Filter the config parameters with set values
|
||||||
config_params = self.config.dict(exclude_unset=True)
|
config_params = self.config.dict(exclude_unset=True)
|
||||||
if prompt.get_functions():
|
if prompt.get_functions():
|
||||||
config_params['functions'] = prompt.get_functions()
|
config_params["functions"] = prompt.get_functions()
|
||||||
config_params['function_call'] = 'auto'
|
config_params["function_call"] = "auto"
|
||||||
config_params['stream'] = False
|
config_params["stream"] = False
|
||||||
|
|
||||||
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
||||||
proxy_setting ={"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
proxy_setting = (
|
||||||
|
{"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
||||||
|
)
|
||||||
|
|
||||||
client = openai.OpenAI(
|
client = openai.OpenAI(
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None),
|
api_key=os.environ.get("OPENAI_API_KEY", None),
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", None),
|
base_url=os.environ.get("OPENAI_API_BASE", None),
|
||||||
http_client=httpx.Client(**proxy_setting, trust_env=False)
|
http_client=httpx.Client(**proxy_setting, trust_env=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.chat.completions.create(
|
response = client.chat.completions.create(messages=prompt.messages, **config_params)
|
||||||
messages=prompt.messages,
|
|
||||||
**config_params
|
|
||||||
)
|
|
||||||
if isinstance(response, openai.types.chat.chat_completion.ChatCompletion):
|
if isinstance(response, openai.types.chat.chat_completion.ChatCompletion):
|
||||||
return json.dumps(response.dict())
|
return json.dumps(response.dict())
|
||||||
return str(response)
|
return str(response)
|
||||||
|
|
||||||
def stream_response(self, prompt: OpenAIPrompt) -> Iterator:
|
def stream_response(self, prompt: OpenAIPrompt) -> Iterator:
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None)
|
api_key = os.environ.get("OPENAI_API_KEY", None)
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1/")
|
base_url = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1/")
|
||||||
|
|
||||||
if not os.environ.get("USE_TIKTOKEN", False) and base_url != "https://api.openai.com/v1/":
|
if not os.environ.get("USE_TIKTOKEN", False) and base_url != "https://api.openai.com/v1/":
|
||||||
config_params = self.config.dict(exclude_unset=True)
|
config_params = self.config.dict(exclude_unset=True)
|
||||||
if prompt.get_functions():
|
if prompt.get_functions():
|
||||||
config_params['functions'] = prompt.get_functions()
|
config_params["functions"] = prompt.get_functions()
|
||||||
config_params['function_call'] = 'auto'
|
config_params["function_call"] = "auto"
|
||||||
config_params['stream'] = True
|
config_params["stream"] = True
|
||||||
|
|
||||||
data = {
|
data = {"messages": prompt.messages, **config_params, "timeout": 180}
|
||||||
"messages":prompt.messages,
|
|
||||||
**config_params,
|
|
||||||
"timeout":180
|
|
||||||
}
|
|
||||||
response = stream_request(api_key, base_url, data)
|
response = stream_request(api_key, base_url, data)
|
||||||
return response
|
return response
|
||||||
import openai
|
|
||||||
import httpx
|
import httpx
|
||||||
|
import openai
|
||||||
|
|
||||||
# Filter the config parameters with set values
|
# Filter the config parameters with set values
|
||||||
config_params = self.config.dict(exclude_unset=True)
|
config_params = self.config.dict(exclude_unset=True)
|
||||||
if prompt.get_functions():
|
if prompt.get_functions():
|
||||||
config_params['functions'] = prompt.get_functions()
|
config_params["functions"] = prompt.get_functions()
|
||||||
config_params['function_call'] = 'auto'
|
config_params["function_call"] = "auto"
|
||||||
config_params['stream'] = True
|
config_params["stream"] = True
|
||||||
|
|
||||||
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
proxy_url = os.environ.get("DEVCHAT_PROXY", "")
|
||||||
proxy_setting ={"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
proxy_setting = (
|
||||||
|
{"proxy": {"https://": proxy_url, "http://": proxy_url}} if proxy_url else {}
|
||||||
|
)
|
||||||
|
|
||||||
client = openai.OpenAI(
|
client = openai.OpenAI(
|
||||||
api_key=os.environ.get("OPENAI_API_KEY", None),
|
api_key=os.environ.get("OPENAI_API_KEY", None),
|
||||||
base_url=os.environ.get("OPENAI_API_BASE", None),
|
base_url=os.environ.get("OPENAI_API_BASE", None),
|
||||||
http_client=httpx.Client(**proxy_setting, trust_env=False)
|
http_client=httpx.Client(**proxy_setting, trust_env=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.chat.completions.create(
|
response = client.chat.completions.create(
|
||||||
messages=prompt.messages,
|
messages=prompt.messages, **config_params, timeout=180
|
||||||
**config_params,
|
|
||||||
timeout=180
|
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import ast
|
import ast
|
||||||
import json
|
import json
|
||||||
from dataclasses import dataclass, asdict, field, fields
|
from dataclasses import asdict, dataclass, field, fields
|
||||||
from typing import Dict, Optional
|
from typing import Dict, Optional
|
||||||
|
|
||||||
from devchat.message import Message
|
from devchat.message import Message
|
||||||
@ -17,19 +17,21 @@ class OpenAIMessage(Message):
|
|||||||
raise ValueError("Invalid role. Must be one of 'system', 'user', or 'assistant'.")
|
raise ValueError("Invalid role. Must be one of 'system', 'user', or 'assistant'.")
|
||||||
|
|
||||||
if not self._validate_name():
|
if not self._validate_name():
|
||||||
raise ValueError("Invalid name. Must contain a-z, A-Z, 0-9, and underscores, "
|
raise ValueError(
|
||||||
"with a maximum length of 64 characters.")
|
"Invalid name. Must contain a-z, A-Z, 0-9, and underscores, "
|
||||||
|
"with a maximum length of 64 characters."
|
||||||
|
)
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
state = asdict(self)
|
state = asdict(self)
|
||||||
if state['name'] is None:
|
if state["name"] is None:
|
||||||
del state['name']
|
del state["name"]
|
||||||
if not state['function_call'] or len(state['function_call'].keys()) == 0:
|
if not state["function_call"] or len(state["function_call"].keys()) == 0:
|
||||||
del state['function_call']
|
del state["function_call"]
|
||||||
return state
|
return state
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, message_data: dict) -> 'OpenAIMessage':
|
def from_dict(cls, message_data: dict) -> "OpenAIMessage":
|
||||||
keys = {f.name for f in fields(cls)}
|
keys = {f.name for f in fields(cls)}
|
||||||
kwargs = {k: v for k, v in message_data.items() if k in keys}
|
kwargs = {k: v for k, v in message_data.items() if k in keys}
|
||||||
return cls(**kwargs)
|
return cls(**kwargs)
|
||||||
@ -44,24 +46,24 @@ class OpenAIMessage(Message):
|
|||||||
}
|
}
|
||||||
'''
|
'''
|
||||||
if not self.function_call:
|
if not self.function_call:
|
||||||
return ''
|
return ""
|
||||||
function_call_copy = self.function_call.copy()
|
function_call_copy = self.function_call.copy()
|
||||||
if 'arguments' in function_call_copy:
|
if "arguments" in function_call_copy:
|
||||||
# arguments field may be not a json string
|
# arguments field may be not a json string
|
||||||
# we can try parse it by eval
|
# we can try parse it by eval
|
||||||
try:
|
try:
|
||||||
function_call_copy['arguments'] = ast.literal_eval(function_call_copy['arguments'])
|
function_call_copy["arguments"] = ast.literal_eval(function_call_copy["arguments"])
|
||||||
except Exception:
|
except Exception:
|
||||||
# if it is not a json string, we can do nothing
|
# if it is not a json string, we can do nothing
|
||||||
try:
|
try:
|
||||||
function_call_copy['arguments'] = json.loads(function_call_copy['arguments'])
|
function_call_copy["arguments"] = json.loads(function_call_copy["arguments"])
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return '```command\n' + json.dumps(function_call_copy) + '\n```'
|
return "```command\n" + json.dumps(function_call_copy) + "\n```"
|
||||||
|
|
||||||
def stream_from_dict(self, message_data: dict) -> str:
|
def stream_from_dict(self, message_data: dict) -> str:
|
||||||
"""Append to the message from a dictionary returned from a streaming chat API."""
|
"""Append to the message from a dictionary returned from a streaming chat API."""
|
||||||
delta = message_data.get('content', '')
|
delta = message_data.get("content", "")
|
||||||
if self.content:
|
if self.content:
|
||||||
self.content += delta
|
self.content += delta
|
||||||
else:
|
else:
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from devchat.prompt import Prompt
|
|
||||||
from devchat.message import Message
|
from devchat.message import Message
|
||||||
from devchat.utils import update_dict, get_logger
|
from devchat.prompt import Prompt
|
||||||
from devchat.utils import openai_message_tokens, openai_response_tokens
|
from devchat.utils import get_logger, openai_message_tokens, openai_response_tokens, update_dict
|
||||||
|
|
||||||
from .openai_message import OpenAIMessage
|
from .openai_message import OpenAIMessage
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@ -31,9 +32,10 @@ class OpenAIPrompt(Prompt):
|
|||||||
combined += [msg.to_dict() for msg in self._new_messages[Message.INSTRUCT]]
|
combined += [msg.to_dict() for msg in self._new_messages[Message.INSTRUCT]]
|
||||||
# History context
|
# History context
|
||||||
if self._history_messages[Message.CONTEXT]:
|
if self._history_messages[Message.CONTEXT]:
|
||||||
combined += [update_dict(msg.to_dict(), 'content',
|
combined += [
|
||||||
f"<context>\n{msg.content}\n</context>")
|
update_dict(msg.to_dict(), "content", f"<context>\n{msg.content}\n</context>")
|
||||||
for msg in self._history_messages[Message.CONTEXT]]
|
for msg in self._history_messages[Message.CONTEXT]
|
||||||
|
]
|
||||||
# History chat
|
# History chat
|
||||||
if self._history_messages[Message.CHAT]:
|
if self._history_messages[Message.CHAT]:
|
||||||
combined += [msg.to_dict() for msg in self._history_messages[Message.CHAT]]
|
combined += [msg.to_dict() for msg in self._history_messages[Message.CHAT]]
|
||||||
@ -42,9 +44,10 @@ class OpenAIPrompt(Prompt):
|
|||||||
combined += [self.request.to_dict()]
|
combined += [self.request.to_dict()]
|
||||||
# New context
|
# New context
|
||||||
if self.new_context:
|
if self.new_context:
|
||||||
combined += [update_dict(msg.to_dict(), 'content',
|
combined += [
|
||||||
f"<context>\n{msg.content}\n</context>")
|
update_dict(msg.to_dict(), "content", f"<context>\n{msg.content}\n</context>")
|
||||||
for msg in self.new_context]
|
for msg in self.new_context
|
||||||
|
]
|
||||||
return combined
|
return combined
|
||||||
|
|
||||||
def input_messages(self, messages: List[dict]):
|
def input_messages(self, messages: List[dict]):
|
||||||
@ -94,12 +97,13 @@ class OpenAIPrompt(Prompt):
|
|||||||
continue
|
continue
|
||||||
self._history_messages[Message.CHAT].append(last_message)
|
self._history_messages[Message.CHAT].append(last_message)
|
||||||
|
|
||||||
def append_new(self, message_type: str, content: str,
|
def append_new(
|
||||||
available_tokens: int = sys.maxsize) -> bool:
|
self, message_type: str, content: str, available_tokens: int = sys.maxsize
|
||||||
|
) -> bool:
|
||||||
if message_type not in (Message.INSTRUCT, Message.CONTEXT):
|
if message_type not in (Message.INSTRUCT, Message.CONTEXT):
|
||||||
raise ValueError(f"Current messages cannot be of type {message_type}.")
|
raise ValueError(f"Current messages cannot be of type {message_type}.")
|
||||||
# New instructions and context are of the system role
|
# New instructions and context are of the system role
|
||||||
message = OpenAIMessage(content=content, role='system')
|
message = OpenAIMessage(content=content, role="system")
|
||||||
|
|
||||||
num_tokens = openai_message_tokens(message.to_dict(), self.model)
|
num_tokens = openai_message_tokens(message.to_dict(), self.model)
|
||||||
if num_tokens > available_tokens:
|
if num_tokens > available_tokens:
|
||||||
@ -121,8 +125,9 @@ class OpenAIPrompt(Prompt):
|
|||||||
def get_functions(self):
|
def get_functions(self):
|
||||||
return self._new_messages.get(Message.FUNCTION, None)
|
return self._new_messages.get(Message.FUNCTION, None)
|
||||||
|
|
||||||
def _prepend_history(self, message_type: str, message: Message,
|
def _prepend_history(
|
||||||
token_limit: int = sys.maxsize) -> bool:
|
self, message_type: str, message: Message, token_limit: int = sys.maxsize
|
||||||
|
) -> bool:
|
||||||
if message_type == Message.INSTRUCT:
|
if message_type == Message.INSTRUCT:
|
||||||
raise ValueError("History messages cannot be of type INSTRUCT.")
|
raise ValueError("History messages cannot be of type INSTRUCT.")
|
||||||
num_tokens = openai_message_tokens(message.to_dict(), self.model)
|
num_tokens = openai_message_tokens(message.to_dict(), self.model)
|
||||||
@ -132,7 +137,7 @@ class OpenAIPrompt(Prompt):
|
|||||||
self._request_tokens += num_tokens
|
self._request_tokens += num_tokens
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def prepend_history(self, prompt: 'OpenAIPrompt', token_limit: int = sys.maxsize) -> bool:
|
def prepend_history(self, prompt: "OpenAIPrompt", token_limit: int = sys.maxsize) -> bool:
|
||||||
# Prepend the first response and the request of the prompt
|
# Prepend the first response and the request of the prompt
|
||||||
if not self._prepend_history(Message.CHAT, prompt.responses[0], token_limit):
|
if not self._prepend_history(Message.CHAT, prompt.responses[0], token_limit):
|
||||||
return False
|
return False
|
||||||
@ -148,9 +153,9 @@ class OpenAIPrompt(Prompt):
|
|||||||
def set_request(self, content: str, function_name: Optional[str] = None) -> int:
|
def set_request(self, content: str, function_name: Optional[str] = None) -> int:
|
||||||
if not content.strip():
|
if not content.strip():
|
||||||
raise ValueError("The request cannot be empty.")
|
raise ValueError("The request cannot be empty.")
|
||||||
message = OpenAIMessage(content=content,
|
message = OpenAIMessage(
|
||||||
role=('user' if not function_name else 'function'),
|
content=content, role=("user" if not function_name else "function"), name=function_name
|
||||||
name=function_name)
|
)
|
||||||
self.request = message
|
self.request = message
|
||||||
self._request_tokens += openai_message_tokens(message.to_dict(), self.model)
|
self._request_tokens += openai_message_tokens(message.to_dict(), self.model)
|
||||||
|
|
||||||
@ -166,17 +171,17 @@ class OpenAIPrompt(Prompt):
|
|||||||
self._timestamp_from_dict(response_data)
|
self._timestamp_from_dict(response_data)
|
||||||
self._id_from_dict(response_data)
|
self._id_from_dict(response_data)
|
||||||
|
|
||||||
self._request_tokens = response_data['usage']['prompt_tokens']
|
self._request_tokens = response_data["usage"]["prompt_tokens"]
|
||||||
self._response_tokens = response_data['usage']['completion_tokens']
|
self._response_tokens = response_data["usage"]["completion_tokens"]
|
||||||
|
|
||||||
for choice in response_data['choices']:
|
for choice in response_data["choices"]:
|
||||||
index = choice['index']
|
index = choice["index"]
|
||||||
if index >= len(self.responses):
|
if index >= len(self.responses):
|
||||||
self.responses.extend([None] * (index - len(self.responses) + 1))
|
self.responses.extend([None] * (index - len(self.responses) + 1))
|
||||||
self._response_reasons.extend([None] * (index - len(self._response_reasons) + 1))
|
self._response_reasons.extend([None] * (index - len(self._response_reasons) + 1))
|
||||||
self.responses[index] = OpenAIMessage.from_dict(choice['message'])
|
self.responses[index] = OpenAIMessage.from_dict(choice["message"])
|
||||||
if choice['finish_reason']:
|
if choice["finish_reason"]:
|
||||||
self._response_reasons[index] = choice['finish_reason']
|
self._response_reasons[index] = choice["finish_reason"]
|
||||||
|
|
||||||
def append_response(self, delta_str: str) -> str:
|
def append_response(self, delta_str: str) -> str:
|
||||||
"""
|
"""
|
||||||
@ -193,11 +198,11 @@ class OpenAIPrompt(Prompt):
|
|||||||
self._timestamp_from_dict(response_data)
|
self._timestamp_from_dict(response_data)
|
||||||
self._id_from_dict(response_data)
|
self._id_from_dict(response_data)
|
||||||
|
|
||||||
delta_content = ''
|
delta_content = ""
|
||||||
for choice in response_data['choices']:
|
for choice in response_data["choices"]:
|
||||||
delta = choice['delta']
|
delta = choice["delta"]
|
||||||
index = choice['index']
|
index = choice["index"]
|
||||||
finish_reason = choice['finish_reason']
|
finish_reason = choice["finish_reason"]
|
||||||
|
|
||||||
if index >= len(self.responses):
|
if index >= len(self.responses):
|
||||||
self.responses.extend([None] * (index - len(self.responses) + 1))
|
self.responses.extend([None] * (index - len(self.responses) + 1))
|
||||||
@ -206,22 +211,24 @@ class OpenAIPrompt(Prompt):
|
|||||||
if not self.responses[index]:
|
if not self.responses[index]:
|
||||||
self.responses[index] = OpenAIMessage.from_dict(delta)
|
self.responses[index] = OpenAIMessage.from_dict(delta)
|
||||||
if index == 0:
|
if index == 0:
|
||||||
delta_content = self.responses[0].content if self.responses[0].content else ''
|
delta_content = self.responses[0].content if self.responses[0].content else ""
|
||||||
else:
|
else:
|
||||||
if index == 0:
|
if index == 0:
|
||||||
delta_content = self.responses[0].stream_from_dict(delta)
|
delta_content = self.responses[0].stream_from_dict(delta)
|
||||||
else:
|
else:
|
||||||
self.responses[index].stream_from_dict(delta)
|
self.responses[index].stream_from_dict(delta)
|
||||||
|
|
||||||
if 'function_call' in delta:
|
if "function_call" in delta:
|
||||||
if 'name' in delta['function_call'] and \
|
if (
|
||||||
self.responses[index].function_call.get('name', '') == '':
|
"name" in delta["function_call"]
|
||||||
self.responses[index].function_call['name'] = \
|
and self.responses[index].function_call.get("name", "") == ""
|
||||||
delta['function_call']['name']
|
):
|
||||||
if 'arguments' in delta['function_call']:
|
self.responses[index].function_call["name"] = delta["function_call"]["name"]
|
||||||
self.responses[index].function_call['arguments'] = \
|
if "arguments" in delta["function_call"]:
|
||||||
self.responses[index].function_call.get('arguments', '') + \
|
self.responses[index].function_call["arguments"] = (
|
||||||
delta['function_call']['arguments']
|
self.responses[index].function_call.get("arguments", "")
|
||||||
|
+ delta["function_call"]["arguments"]
|
||||||
|
)
|
||||||
|
|
||||||
if finish_reason:
|
if finish_reason:
|
||||||
self._response_reasons[index] = finish_reason
|
self._response_reasons[index] = finish_reason
|
||||||
@ -231,19 +238,19 @@ class OpenAIPrompt(Prompt):
|
|||||||
return sum(openai_response_tokens(resp.to_dict(), self.model) for resp in self.responses)
|
return sum(openai_response_tokens(resp.to_dict(), self.model) for resp in self.responses)
|
||||||
|
|
||||||
def _validate_model(self, response_data: dict):
|
def _validate_model(self, response_data: dict):
|
||||||
if not response_data['model'].startswith(self.model):
|
if not response_data["model"].startswith(self.model):
|
||||||
logger.warning("Model mismatch: expected '%s', got '%s'",
|
logger.warning(
|
||||||
self.model, response_data['model'])
|
"Model mismatch: expected '%s', got '%s'", self.model, response_data["model"]
|
||||||
|
)
|
||||||
|
|
||||||
def _timestamp_from_dict(self, response_data: dict):
|
def _timestamp_from_dict(self, response_data: dict):
|
||||||
if not self._timestamp:
|
if not self._timestamp:
|
||||||
self._timestamp = response_data['created']
|
self._timestamp = response_data["created"]
|
||||||
elif self._timestamp != response_data['created']:
|
elif self._timestamp != response_data["created"]:
|
||||||
self._timestamp = response_data['created']
|
self._timestamp = response_data["created"]
|
||||||
|
|
||||||
def _id_from_dict(self, response_data: dict):
|
def _id_from_dict(self, response_data: dict):
|
||||||
if self._id is None:
|
if self._id is None:
|
||||||
self._id = response_data['id']
|
self._id = response_data["id"]
|
||||||
elif self._id != response_data['id']:
|
elif self._id != response_data["id"]:
|
||||||
raise ValueError(f"ID mismatch: expected {self._id}, "
|
raise ValueError(f"ID mismatch: expected {self._id}, " f"got {response_data['id']}")
|
||||||
f"got {response_data['id']}")
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from dataclasses import dataclass, field, asdict
|
|
||||||
import hashlib
|
import hashlib
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
import sys
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import asdict, dataclass, field
|
||||||
|
from datetime import datetime
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
from devchat.message import Message
|
|
||||||
from devchat.utils import unix_to_local_datetime, get_logger, user_id
|
|
||||||
|
|
||||||
|
from devchat.message import Message
|
||||||
|
from devchat.utils import get_logger, unix_to_local_datetime, user_id
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@ -33,16 +33,17 @@ class Prompt(ABC):
|
|||||||
model: str
|
model: str
|
||||||
user_name: str
|
user_name: str
|
||||||
user_email: str
|
user_email: str
|
||||||
_new_messages: Dict = field(default_factory=lambda: {
|
_new_messages: Dict = field(
|
||||||
Message.INSTRUCT: [],
|
default_factory=lambda: {
|
||||||
'request': None,
|
Message.INSTRUCT: [],
|
||||||
Message.CONTEXT: [],
|
"request": None,
|
||||||
'responses': []
|
Message.CONTEXT: [],
|
||||||
})
|
"responses": [],
|
||||||
_history_messages: Dict[str, Message] = field(default_factory=lambda: {
|
}
|
||||||
Message.CONTEXT: [],
|
)
|
||||||
Message.CHAT: []
|
_history_messages: Dict[str, Message] = field(
|
||||||
})
|
default_factory=lambda: {Message.CONTEXT: [], Message.CHAT: []}
|
||||||
|
)
|
||||||
parent: str = None
|
parent: str = None
|
||||||
references: List[str] = field(default_factory=list)
|
references: List[str] = field(default_factory=list)
|
||||||
_timestamp: int = 0
|
_timestamp: int = 0
|
||||||
@ -59,8 +60,9 @@ class Prompt(ABC):
|
|||||||
bool: Whether the prompt is complete.
|
bool: Whether the prompt is complete.
|
||||||
"""
|
"""
|
||||||
if not self.request or not self.responses:
|
if not self.request or not self.responses:
|
||||||
logger.warning("Incomplete prompt: request = %s, response = %s",
|
logger.warning(
|
||||||
self.request, self.responses)
|
"Incomplete prompt: request = %s, response = %s", self.request, self.responses
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.timestamp:
|
if not self.timestamp:
|
||||||
@ -78,15 +80,15 @@ class Prompt(ABC):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def request(self) -> Message:
|
def request(self) -> Message:
|
||||||
return self._new_messages['request']
|
return self._new_messages["request"]
|
||||||
|
|
||||||
@request.setter
|
@request.setter
|
||||||
def request(self, value: Message):
|
def request(self, value: Message):
|
||||||
self._new_messages['request'] = value
|
self._new_messages["request"] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def responses(self) -> List[Message]:
|
def responses(self) -> List[Message]:
|
||||||
return self._new_messages['responses']
|
return self._new_messages["responses"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timestamp(self) -> int:
|
def timestamp(self) -> int:
|
||||||
@ -142,8 +144,9 @@ class Prompt(ABC):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def append_new(self, message_type: str, content: str,
|
def append_new(
|
||||||
available_tokens: int = sys.maxsize) -> bool:
|
self, message_type: str, content: str, available_tokens: int = sys.maxsize
|
||||||
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Append a new message provided by the user to this prompt.
|
Append a new message provided by the user to this prompt.
|
||||||
|
|
||||||
@ -215,9 +218,9 @@ class Prompt(ABC):
|
|||||||
self._response_tokens = self._count_response_tokens()
|
self._response_tokens = self._count_response_tokens()
|
||||||
|
|
||||||
data = asdict(self)
|
data = asdict(self)
|
||||||
data.pop('_hash')
|
data.pop("_hash")
|
||||||
string = str(tuple(sorted(data.items())))
|
string = str(tuple(sorted(data.items())))
|
||||||
self._hash = hashlib.sha256(string.encode('utf-8')).hexdigest()
|
self._hash = hashlib.sha256(string.encode("utf-8")).hexdigest()
|
||||||
return self._hash
|
return self._hash
|
||||||
|
|
||||||
def formatted_header(self) -> str:
|
def formatted_header(self) -> str:
|
||||||
@ -238,12 +241,12 @@ class Prompt(ABC):
|
|||||||
note = None
|
note = None
|
||||||
formatted_str = "\n\n"
|
formatted_str = "\n\n"
|
||||||
reason = self._response_reasons[index]
|
reason = self._response_reasons[index]
|
||||||
if reason == 'length':
|
if reason == "length":
|
||||||
note = "Incomplete model output due to max_tokens parameter or token limit"
|
note = "Incomplete model output due to max_tokens parameter or token limit"
|
||||||
elif reason == 'function_call':
|
elif reason == "function_call":
|
||||||
formatted_str += self.responses[index].function_call_to_json() + "\n\n"
|
formatted_str += self.responses[index].function_call_to_json() + "\n\n"
|
||||||
note = "The model decided to call a function"
|
note = "The model decided to call a function"
|
||||||
elif reason == 'content_filter':
|
elif reason == "content_filter":
|
||||||
note = "Omitted content due to a flag from our content filters"
|
note = "Omitted content due to a flag from our content filters"
|
||||||
|
|
||||||
if note:
|
if note:
|
||||||
@ -262,8 +265,12 @@ class Prompt(ABC):
|
|||||||
str: The formatted response string. None if the response is invalid.
|
str: The formatted response string. None if the response is invalid.
|
||||||
"""
|
"""
|
||||||
if index >= len(self.responses) or not self.responses[index]:
|
if index >= len(self.responses) or not self.responses[index]:
|
||||||
logger.error("Response index %d is invalid to format: request = %s, response = %s",
|
logger.error(
|
||||||
index, self.request, self.responses)
|
"Response index %d is invalid to format: request = %s, response = %s",
|
||||||
|
index,
|
||||||
|
self.request,
|
||||||
|
self.responses,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
formatted_str = ""
|
formatted_str = ""
|
||||||
@ -280,8 +287,9 @@ class Prompt(ABC):
|
|||||||
|
|
||||||
responses = []
|
responses = []
|
||||||
for message in self.responses:
|
for message in self.responses:
|
||||||
responses.append((message.content if message.content else "")
|
responses.append(
|
||||||
+ message.function_call_to_json())
|
(message.content if message.content else "") + message.function_call_to_json()
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"user": user_id(self.user_name, self.user_email)[0],
|
"user": user_id(self.user_name, self.user_email)[0],
|
||||||
@ -292,5 +300,5 @@ class Prompt(ABC):
|
|||||||
"request_tokens": self._request_tokens,
|
"request_tokens": self._request_tokens,
|
||||||
"response_tokens": self._response_tokens,
|
"response_tokens": self._response_tokens,
|
||||||
"hash": self.hash,
|
"hash": self.hash,
|
||||||
"parent": self.parent
|
"parent": self.parent,
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from dataclasses import asdict
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from typing import List, Dict, Any, Optional
|
from dataclasses import asdict
|
||||||
from tinydb import TinyDB, where, Query
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from tinydb import Query, TinyDB, where
|
||||||
from tinydb.table import Table
|
from tinydb.table import Table
|
||||||
|
|
||||||
from devchat.chat import Chat
|
from devchat.chat import Chat
|
||||||
from devchat.prompt import Prompt
|
from devchat.prompt import Prompt
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
@ -25,22 +26,24 @@ class Store:
|
|||||||
if not os.path.isdir(store_dir):
|
if not os.path.isdir(store_dir):
|
||||||
os.makedirs(store_dir)
|
os.makedirs(store_dir)
|
||||||
|
|
||||||
self._graph_path = os.path.join(store_dir, 'prompts.graphml')
|
self._graph_path = os.path.join(store_dir, "prompts.graphml")
|
||||||
self._chat_list_path = os.path.join(store_dir, 'prompts_list.json')
|
self._chat_list_path = os.path.join(store_dir, "prompts_list.json")
|
||||||
self._db_path = os.path.join(store_dir, 'prompts.json')
|
self._db_path = os.path.join(store_dir, "prompts.json")
|
||||||
self._chat = chat
|
self._chat = chat
|
||||||
|
|
||||||
self._db = TinyDB(self._db_path)
|
self._db = TinyDB(self._db_path)
|
||||||
self._db_meta = self._migrate_db()
|
self._db_meta = self._migrate_db()
|
||||||
self._topics_table = self._db.table('topics')
|
self._topics_table = self._db.table("topics")
|
||||||
|
|
||||||
if os.path.isfile(self._chat_list_path):
|
if os.path.isfile(self._chat_list_path):
|
||||||
with open(self._chat_list_path, 'r', encoding="utf-8") as file:
|
with open(self._chat_list_path, "r", encoding="utf-8") as file:
|
||||||
self._chat_lists = json.loads(file.read())
|
self._chat_lists = json.loads(file.read())
|
||||||
elif os.path.isfile(self._graph_path):
|
elif os.path.isfile(self._graph_path):
|
||||||
# convert old graphml to new json
|
# convert old graphml to new json
|
||||||
from xml.etree.ElementTree import ParseError
|
from xml.etree.ElementTree import ParseError
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
|
|
||||||
try:
|
try:
|
||||||
graph = nx.read_graphml(self._graph_path)
|
graph = nx.read_graphml(self._graph_path)
|
||||||
|
|
||||||
@ -48,25 +51,25 @@ class Store:
|
|||||||
|
|
||||||
self._chat_lists = []
|
self._chat_lists = []
|
||||||
for root in roots:
|
for root in roots:
|
||||||
chat_list = [(root, graph.nodes[root]['timestamp'])]
|
chat_list = [(root, graph.nodes[root]["timestamp"])]
|
||||||
|
|
||||||
ancestors = nx.ancestors(graph, root)
|
ancestors = nx.ancestors(graph, root)
|
||||||
for ancestor in ancestors:
|
for ancestor in ancestors:
|
||||||
chat_list.append((ancestor, graph.nodes[ancestor]['timestamp']))
|
chat_list.append((ancestor, graph.nodes[ancestor]["timestamp"]))
|
||||||
|
|
||||||
self._chat_lists.append(chat_list)
|
self._chat_lists.append(chat_list)
|
||||||
|
|
||||||
with open(self._chat_list_path, 'w', encoding="utf-8") as file:
|
with open(self._chat_list_path, "w", encoding="utf-8") as file:
|
||||||
file.write(json.dumps(self._chat_lists))
|
file.write(json.dumps(self._chat_lists))
|
||||||
|
|
||||||
# rename graphml to json
|
# rename graphml to json
|
||||||
os.rename(self._graph_path, self._graph_path + '.bak')
|
os.rename(self._graph_path, self._graph_path + ".bak")
|
||||||
|
|
||||||
# update topic table, add request and response fields
|
# update topic table, add request and response fields
|
||||||
# new fields: user, date, request, responses, hash
|
# new fields: user, date, request, responses, hash
|
||||||
visible_topics = self._topics_table.all()
|
visible_topics = self._topics_table.all()
|
||||||
for topic in visible_topics:
|
for topic in visible_topics:
|
||||||
prompt = self.get_prompt(topic['root'])
|
prompt = self.get_prompt(topic["root"])
|
||||||
if not prompt:
|
if not prompt:
|
||||||
continue
|
continue
|
||||||
self._update_topic_fields(topic, prompt)
|
self._update_topic_fields(topic, prompt)
|
||||||
@ -81,37 +84,38 @@ class Store:
|
|||||||
self._initialize_topics_table()
|
self._initialize_topics_table()
|
||||||
|
|
||||||
def _update_topic_fields(self, topic, prompt):
|
def _update_topic_fields(self, topic, prompt):
|
||||||
topic['user'] = prompt.user_name
|
topic["user"] = prompt.user_name
|
||||||
topic['date'] = prompt.timestamp
|
topic["date"] = prompt.timestamp
|
||||||
topic['request'] = prompt.request.content
|
topic["request"] = prompt.request.content
|
||||||
topic['responses'] = prompt.responses[0].content if prompt.responses else ""
|
topic["responses"] = prompt.responses[0].content if prompt.responses else ""
|
||||||
topic['hash'] = prompt.hash
|
topic["hash"] = prompt.hash
|
||||||
if len(topic['request']) > 100:
|
if len(topic["request"]) > 100:
|
||||||
topic['request'] = topic['request'][:100] + "..."
|
topic["request"] = topic["request"][:100] + "..."
|
||||||
if len(topic['responses']) > 100:
|
if len(topic["responses"]) > 100:
|
||||||
topic['responses'] = topic['responses'][:100] + "..."
|
topic["responses"] = topic["responses"][:100] + "..."
|
||||||
|
|
||||||
|
|
||||||
def _migrate_db(self) -> Table:
|
def _migrate_db(self) -> Table:
|
||||||
"""
|
"""
|
||||||
Migrate the database to the latest version.
|
Migrate the database to the latest version.
|
||||||
"""
|
"""
|
||||||
metadata = self._db.table('metadata')
|
metadata = self._db.table("metadata")
|
||||||
|
|
||||||
|
result = metadata.get(where("version").exists())
|
||||||
|
if not result or result["version"].startswith("0.1."):
|
||||||
|
|
||||||
result = metadata.get(where('version').exists())
|
|
||||||
if not result or result['version'].startswith('0.1.'):
|
|
||||||
def replace_response():
|
def replace_response():
|
||||||
def transform(doc):
|
def transform(doc):
|
||||||
if '_new_messages' not in doc or 'response' not in doc['_new_messages']:
|
if "_new_messages" not in doc or "response" not in doc["_new_messages"]:
|
||||||
logger.error("Prompt %s does not match '_new_messages.response'",
|
logger.error(
|
||||||
doc['_hash'])
|
"Prompt %s does not match '_new_messages.response'", doc["_hash"]
|
||||||
doc['_new_messages']['responses'] = doc['_new_messages'].pop('response')
|
)
|
||||||
|
doc["_new_messages"]["responses"] = doc["_new_messages"].pop("response")
|
||||||
|
|
||||||
return transform
|
return transform
|
||||||
|
|
||||||
logger.info("Migrating database from %s to 0.2.0", result)
|
logger.info("Migrating database from %s to 0.2.0", result)
|
||||||
self._db.update(replace_response(),
|
self._db.update(replace_response(), Query()._new_messages.response.exists())
|
||||||
Query()._new_messages.response.exists()) # pylint: disable=W0212
|
metadata.insert({"version": "0.2.0"})
|
||||||
metadata.insert({'version': '0.2.0'})
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
def _initialize_topics_table(self):
|
def _initialize_topics_table(self):
|
||||||
@ -120,18 +124,13 @@ class Store:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
first = chat_list[0]
|
first = chat_list[0]
|
||||||
last = chat_list[-1]
|
last = chat_list[-1]
|
||||||
|
|
||||||
topic = {
|
topic = {"root": first[0], "latest_time": last[1], "title": None, "hidden": False}
|
||||||
'root': first[0],
|
|
||||||
'latest_time': last[1],
|
|
||||||
'title': None,
|
|
||||||
'hidden': False
|
|
||||||
}
|
|
||||||
|
|
||||||
prompt = self.get_prompt(topic['root'])
|
prompt = self.get_prompt(topic["root"])
|
||||||
if not prompt:
|
if not prompt:
|
||||||
logger.error("Prompt %s not found while selecting from the store", topic['root'])
|
logger.error("Prompt %s not found while selecting from the store", topic["root"])
|
||||||
continue
|
continue
|
||||||
self._update_topic_fields(topic, prompt)
|
self._update_topic_fields(topic, prompt)
|
||||||
|
|
||||||
@ -145,17 +144,17 @@ class Store:
|
|||||||
|
|
||||||
if chat_list[-1][0] == prompt.hash:
|
if chat_list[-1][0] == prompt.hash:
|
||||||
topic_hash = chat_list[0][0]
|
topic_hash = chat_list[0][0]
|
||||||
topic = next((t for t in self._topics_table if t['root'] == topic_hash), None)
|
topic = next((t for t in self._topics_table if t["root"] == topic_hash), None)
|
||||||
if topic:
|
if topic:
|
||||||
topic['latest_time'] = max(topic.get('latest_time', 0), prompt.timestamp)
|
topic["latest_time"] = max(topic.get("latest_time", 0), prompt.timestamp)
|
||||||
self._topics_table.update(topic, doc_ids=[topic.doc_id])
|
self._topics_table.update(topic, doc_ids=[topic.doc_id])
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
topic = {
|
topic = {
|
||||||
'root': prompt.hash,
|
"root": prompt.hash,
|
||||||
'latest_time': prompt.timestamp,
|
"latest_time": prompt.timestamp,
|
||||||
'title': None,
|
"title": None,
|
||||||
'hidden': False
|
"hidden": False,
|
||||||
}
|
}
|
||||||
self._update_topic_fields(topic, prompt)
|
self._update_topic_fields(topic, prompt)
|
||||||
self._topics_table.insert(topic)
|
self._topics_table.insert(topic)
|
||||||
@ -187,12 +186,11 @@ class Store:
|
|||||||
self._chat_lists.append([(prompt.hash, prompt.timestamp)])
|
self._chat_lists.append([(prompt.hash, prompt.timestamp)])
|
||||||
self._update_topics_table(prompt)
|
self._update_topics_table(prompt)
|
||||||
|
|
||||||
with open(self._chat_list_path, 'w', encoding="utf-8") as file:
|
with open(self._chat_list_path, "w", encoding="utf-8") as file:
|
||||||
file.write(json.dumps(self._chat_lists))
|
file.write(json.dumps(self._chat_lists))
|
||||||
|
|
||||||
return topic_hash
|
return topic_hash
|
||||||
|
|
||||||
|
|
||||||
def get_prompt(self, prompt_hash: str) -> Prompt:
|
def get_prompt(self, prompt_hash: str) -> Prompt:
|
||||||
"""
|
"""
|
||||||
Retrieve a prompt from the store.
|
Retrieve a prompt from the store.
|
||||||
@ -203,7 +201,7 @@ class Store:
|
|||||||
Prompt: The retrieved prompt. None if the prompt is not found.
|
Prompt: The retrieved prompt. None if the prompt is not found.
|
||||||
"""
|
"""
|
||||||
# Retrieve the prompt object from TinyDB
|
# Retrieve the prompt object from TinyDB
|
||||||
prompt_data = self._db.search(where('_hash') == prompt_hash)
|
prompt_data = self._db.search(where("_hash") == prompt_hash)
|
||||||
if not prompt_data:
|
if not prompt_data:
|
||||||
logger.warning("Prompt %s not found while retrieving from object store.", prompt_hash)
|
logger.warning("Prompt %s not found while retrieving from object store.", prompt_hash)
|
||||||
return None
|
return None
|
||||||
@ -266,24 +264,25 @@ class Store:
|
|||||||
List[Dict[str, Any]]: A list of dictionaries containing root prompts
|
List[Dict[str, Any]]: A list of dictionaries containing root prompts
|
||||||
with latest_time, and title fields.
|
with latest_time, and title fields.
|
||||||
"""
|
"""
|
||||||
visible_topics = self._topics_table.search(
|
visible_topics = self._topics_table.search(where("hidden") == False) # noqa: E712
|
||||||
where('hidden') == False) # pylint: disable=C0121
|
sorted_topics = sorted(visible_topics, key=lambda x: x["latest_time"], reverse=True)
|
||||||
sorted_topics = sorted(visible_topics, key=lambda x: x['latest_time'], reverse=True)
|
|
||||||
|
|
||||||
topics = []
|
topics = []
|
||||||
for topic in sorted_topics[start:end]:
|
for topic in sorted_topics[start:end]:
|
||||||
topics.append({
|
topics.append(
|
||||||
'root_prompt': {
|
{
|
||||||
'hash': topic['root'],
|
"root_prompt": {
|
||||||
'user': topic['user'],
|
"hash": topic["root"],
|
||||||
'date': topic['date'],
|
"user": topic["user"],
|
||||||
'request': topic['request'],
|
"date": topic["date"],
|
||||||
'responses': [topic['responses']],
|
"request": topic["request"],
|
||||||
},
|
"responses": [topic["responses"]],
|
||||||
'latest_time': topic['latest_time'],
|
},
|
||||||
'title': topic['title'],
|
"latest_time": topic["latest_time"],
|
||||||
'hidden': topic['hidden'],
|
"title": topic["title"],
|
||||||
})
|
"hidden": topic["hidden"],
|
||||||
|
}
|
||||||
|
)
|
||||||
return topics
|
return topics
|
||||||
|
|
||||||
def delete_prompt(self, prompt_hash: str) -> bool:
|
def delete_prompt(self, prompt_hash: str) -> bool:
|
||||||
@ -316,13 +315,13 @@ class Store:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Update the topics table
|
# Update the topics table
|
||||||
self._topics_table.remove(where('root') == prompt_hash)
|
self._topics_table.remove(where("root") == prompt_hash)
|
||||||
|
|
||||||
# Remove the prompt from the database
|
# Remove the prompt from the database
|
||||||
self._db.remove(where('_hash') == prompt_hash)
|
self._db.remove(where("_hash") == prompt_hash)
|
||||||
|
|
||||||
# Save the graph
|
# Save the graph
|
||||||
with open(self._chat_list_path, 'w', encoding="utf-8") as file:
|
with open(self._chat_list_path, "w", encoding="utf-8") as file:
|
||||||
file.write(json.dumps(self._chat_lists))
|
file.write(json.dumps(self._chat_lists))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
# pylint: disable=import-outside-toplevel
|
import datetime
|
||||||
|
import getpass
|
||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import getpass
|
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Tuple, Optional
|
from typing import List, Optional, Tuple
|
||||||
import datetime
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
|
log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||||
log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
encoding = None
|
encoding = None
|
||||||
|
|
||||||
|
|
||||||
def setup_logger(file_path: Optional[str] = None):
|
def setup_logger(file_path: Optional[str] = None):
|
||||||
"""Utility function to set up a global file log handler."""
|
"""Utility function to set up a global file log handler."""
|
||||||
if file_path is None:
|
if file_path is None:
|
||||||
@ -28,7 +26,7 @@ def get_logger(name: str = None, handler: logging.Handler = None) -> logging.Log
|
|||||||
local_logger = logging.getLogger(name)
|
local_logger = logging.getLogger(name)
|
||||||
|
|
||||||
# Default to 'INFO' if 'LOG_LEVEL' env is not set
|
# Default to 'INFO' if 'LOG_LEVEL' env is not set
|
||||||
log_level_str = os.getenv('LOG_LEVEL', 'INFO')
|
log_level_str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
log_level = getattr(logging, log_level_str.upper(), logging.INFO)
|
log_level = getattr(logging, log_level_str.upper(), logging.INFO)
|
||||||
local_logger.setLevel(log_level)
|
local_logger.setLevel(log_level)
|
||||||
|
|
||||||
@ -55,9 +53,13 @@ def find_root_dir() -> Tuple[Optional[str], Optional[str]]:
|
|||||||
|
|
||||||
repo_dir = None
|
repo_dir = None
|
||||||
try:
|
try:
|
||||||
repo_dir = subprocess.run(["git", "rev-parse", "--show-toplevel"],
|
repo_dir = subprocess.run(
|
||||||
capture_output=True, text=True, check=True,
|
["git", "rev-parse", "--show-toplevel"],
|
||||||
encoding='utf-8').stdout.strip()
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
encoding="utf-8",
|
||||||
|
).stdout.strip()
|
||||||
if not os.path.isdir(repo_dir):
|
if not os.path.isdir(repo_dir):
|
||||||
repo_dir = None
|
repo_dir = None
|
||||||
else:
|
else:
|
||||||
@ -66,8 +68,9 @@ def find_root_dir() -> Tuple[Optional[str], Optional[str]]:
|
|||||||
repo_dir = None
|
repo_dir = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(["svn", "info"],
|
result = subprocess.run(
|
||||||
capture_output=True, text=True, check=True, encoding='utf-8')
|
["svn", "info"], capture_output=True, text=True, check=True, encoding="utf-8"
|
||||||
|
)
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
for line in result.stdout.splitlines():
|
for line in result.stdout.splitlines():
|
||||||
if line.startswith("Working Copy Root Path: "):
|
if line.startswith("Working Copy Root Path: "):
|
||||||
@ -81,10 +84,10 @@ def find_root_dir() -> Tuple[Optional[str], Optional[str]]:
|
|||||||
|
|
||||||
|
|
||||||
def add_gitignore(target_dir: str, *ignore_entries: str) -> None:
|
def add_gitignore(target_dir: str, *ignore_entries: str) -> None:
|
||||||
gitignore_path = os.path.join(target_dir, '.gitignore')
|
gitignore_path = os.path.join(target_dir, ".gitignore")
|
||||||
|
|
||||||
if os.path.exists(gitignore_path):
|
if os.path.exists(gitignore_path):
|
||||||
with open(gitignore_path, 'r', encoding='utf-8') as gitignore_file:
|
with open(gitignore_path, "r", encoding="utf-8") as gitignore_file:
|
||||||
gitignore_content = gitignore_file.read()
|
gitignore_content = gitignore_file.read()
|
||||||
|
|
||||||
new_entries = []
|
new_entries = []
|
||||||
@ -93,15 +96,15 @@ def add_gitignore(target_dir: str, *ignore_entries: str) -> None:
|
|||||||
new_entries.append(entry)
|
new_entries.append(entry)
|
||||||
|
|
||||||
if new_entries:
|
if new_entries:
|
||||||
with open(gitignore_path, 'a', encoding='utf-8') as gitignore_file:
|
with open(gitignore_path, "a", encoding="utf-8") as gitignore_file:
|
||||||
gitignore_file.write('\n# devchat\n')
|
gitignore_file.write("\n# devchat\n")
|
||||||
for entry in new_entries:
|
for entry in new_entries:
|
||||||
gitignore_file.write(f'{entry}\n')
|
gitignore_file.write(f"{entry}\n")
|
||||||
else:
|
else:
|
||||||
with open(gitignore_path, 'w', encoding='utf-8') as gitignore_file:
|
with open(gitignore_path, "w", encoding="utf-8") as gitignore_file:
|
||||||
gitignore_file.write('# devchat\n')
|
gitignore_file.write("# devchat\n")
|
||||||
for entry in ignore_entries:
|
for entry in ignore_entries:
|
||||||
gitignore_file.write(f'{entry}\n')
|
gitignore_file.write(f"{entry}\n")
|
||||||
|
|
||||||
|
|
||||||
def unix_to_local_datetime(unix_time) -> datetime.datetime:
|
def unix_to_local_datetime(unix_time) -> datetime.datetime:
|
||||||
@ -116,8 +119,8 @@ def unix_to_local_datetime(unix_time) -> datetime.datetime:
|
|||||||
|
|
||||||
def get_user_info() -> Tuple[str, str]:
|
def get_user_info() -> Tuple[str, str]:
|
||||||
try:
|
try:
|
||||||
cmd = ['git', 'config', 'user.name']
|
cmd = ["git", "config", "user.name"]
|
||||||
user_name = subprocess.check_output(cmd, encoding='utf-8').strip()
|
user_name = subprocess.check_output(cmd, encoding="utf-8").strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
user_name = getpass.getuser()
|
user_name = getpass.getuser()
|
||||||
@ -126,17 +129,17 @@ def get_user_info() -> Tuple[str, str]:
|
|||||||
user_name = user_dir.split(os.sep)[-1]
|
user_name = user_dir.split(os.sep)[-1]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = ['git', 'config', 'user.email']
|
cmd = ["git", "config", "user.email"]
|
||||||
user_email = subprocess.check_output(cmd, encoding='utf-8').strip()
|
user_email = subprocess.check_output(cmd, encoding="utf-8").strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
user_email = user_name + '@' + socket.gethostname()
|
user_email = user_name + "@" + socket.gethostname()
|
||||||
|
|
||||||
return user_name, user_email
|
return user_name, user_email
|
||||||
|
|
||||||
|
|
||||||
def user_id(user_name, user_email) -> Tuple[str, str]:
|
def user_id(user_name, user_email) -> Tuple[str, str]:
|
||||||
user_str = f"{user_name} <{user_email}>"
|
user_str = f"{user_name} <{user_email}>"
|
||||||
user_hash = hashlib.sha1(user_str.encode('utf-8')).hexdigest()
|
user_hash = hashlib.sha1(user_str.encode("utf-8")).hexdigest()
|
||||||
return user_str, user_hash
|
return user_str, user_hash
|
||||||
|
|
||||||
|
|
||||||
@ -151,7 +154,7 @@ def parse_files(file_paths: List[str]) -> List[str]:
|
|||||||
|
|
||||||
contents = []
|
contents = []
|
||||||
for file_path in file_paths:
|
for file_path in file_paths:
|
||||||
with open(file_path, 'r', encoding='utf-8') as file:
|
with open(file_path, "r", encoding="utf-8") as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
if not content:
|
if not content:
|
||||||
raise ValueError(f"File {file_path} is empty.")
|
raise ValueError(f"File {file_path} is empty.")
|
||||||
@ -161,7 +164,7 @@ def parse_files(file_paths: List[str]) -> List[str]:
|
|||||||
|
|
||||||
def valid_hash(hash_str):
|
def valid_hash(hash_str):
|
||||||
"""Check if a string is a valid hash value."""
|
"""Check if a string is a valid hash value."""
|
||||||
pattern = re.compile(r'^[a-f0-9]{64}$') # for SHA-256 hash
|
pattern = re.compile(r"^[a-f0-9]{64}$") # for SHA-256 hash
|
||||||
return bool(pattern.match(hash_str))
|
return bool(pattern.match(hash_str))
|
||||||
|
|
||||||
|
|
||||||
@ -198,25 +201,24 @@ def update_dict(dict_to_update, key, value) -> dict:
|
|||||||
return dict_to_update
|
return dict_to_update
|
||||||
|
|
||||||
|
|
||||||
def openai_message_tokens(messages: dict, model: str) -> int: # pylint: disable=unused-argument
|
def openai_message_tokens(messages: dict, model: str) -> int:
|
||||||
"""Returns the number of tokens used by a message."""
|
"""Returns the number of tokens used by a message."""
|
||||||
if not os.environ.get("USE_TIKTOKEN", False):
|
if not os.environ.get("USE_TIKTOKEN", False):
|
||||||
return len(str(messages))/4
|
return len(str(messages)) / 4
|
||||||
|
|
||||||
# pylint: disable=global-statement
|
|
||||||
global encoding
|
global encoding
|
||||||
if not encoding:
|
if not encoding:
|
||||||
import tiktoken
|
import tiktoken
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
os.environ['TIKTOKEN_CACHE_DIR'] = os.path.join(script_dir, 'tiktoken_cache')
|
os.environ["TIKTOKEN_CACHE_DIR"] = os.path.join(script_dir, "tiktoken_cache")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
encoding = tiktoken.get_encoding("cl100k_base")
|
encoding = tiktoken.get_encoding("cl100k_base")
|
||||||
except Exception:
|
except Exception:
|
||||||
from tiktoken import registry
|
from tiktoken import registry
|
||||||
from tiktoken.registry import _find_constructors
|
|
||||||
from tiktoken.core import Encoding
|
from tiktoken.core import Encoding
|
||||||
|
from tiktoken.registry import _find_constructors
|
||||||
|
|
||||||
def get_encoding(name: str):
|
def get_encoding(name: str):
|
||||||
_find_constructors()
|
_find_constructors()
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
import click
|
import click
|
||||||
from devchat.workflow.command.update import update
|
|
||||||
from devchat.workflow.command.list import list_cmd
|
|
||||||
from devchat.workflow.command.env import env
|
|
||||||
from devchat.workflow.command.config import config_cmd
|
from devchat.workflow.command.config import config_cmd
|
||||||
|
from devchat.workflow.command.env import env
|
||||||
|
from devchat.workflow.command.list import list_cmd
|
||||||
|
from devchat.workflow.command.update import update
|
||||||
|
|
||||||
|
|
||||||
@click.group(help="CLI for devchat workflow engine.")
|
@click.group(help="CLI for devchat workflow engine.")
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
|
|
||||||
@ -10,7 +10,6 @@ from devchat.workflow.path import WORKFLOWS_BASE, WORKFLOWS_CONFIG_FILENAME
|
|||||||
@click.command(help="Workflow configuration.", name="config")
|
@click.command(help="Workflow configuration.", name="config")
|
||||||
@click.option("--json", "in_json", is_flag=True, help="Output in json format.")
|
@click.option("--json", "in_json", is_flag=True, help="Output in json format.")
|
||||||
def config_cmd(in_json: bool):
|
def config_cmd(in_json: bool):
|
||||||
|
|
||||||
config_path = Path(WORKFLOWS_BASE) / WORKFLOWS_CONFIG_FILENAME
|
config_path = Path(WORKFLOWS_BASE) / WORKFLOWS_CONFIG_FILENAME
|
||||||
config_content = {}
|
config_content = {}
|
||||||
if config_path.exists():
|
if config_path.exists():
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Commands for managing the python environment of workflows.
|
Commands for managing the python environment of workflows.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional, List
|
from typing import List, Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from devchat.workflow.env_manager import PyEnvManager, MAMBA_PY_ENVS
|
|
||||||
|
from devchat.workflow.env_manager import MAMBA_PY_ENVS, PyEnvManager
|
||||||
|
|
||||||
|
|
||||||
def _get_all_env_names() -> List[str]:
|
def _get_all_env_names() -> List[str]:
|
||||||
@ -19,11 +19,7 @@ def _get_all_env_names() -> List[str]:
|
|||||||
excludes = ["devchat", "devchat-ask", "devchat-commands"]
|
excludes = ["devchat", "devchat-ask", "devchat-commands"]
|
||||||
|
|
||||||
envs_path = Path(MAMBA_PY_ENVS)
|
envs_path = Path(MAMBA_PY_ENVS)
|
||||||
envs = [
|
envs = [env.name for env in envs_path.iterdir() if env.is_dir() and env.name not in excludes]
|
||||||
env.name
|
|
||||||
for env in envs_path.iterdir()
|
|
||||||
if env.is_dir() and env.name not in excludes
|
|
||||||
]
|
|
||||||
return envs
|
return envs
|
||||||
|
|
||||||
|
|
||||||
@ -42,9 +38,7 @@ def list_envs():
|
|||||||
required=False,
|
required=False,
|
||||||
type=str,
|
type=str,
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option("--all", "all_flag", help="Remove all the python envs of workflows.", is_flag=True)
|
||||||
"--all", "all_flag", help="Remove all the python envs of workflows.", is_flag=True
|
|
||||||
)
|
|
||||||
def remove(env_name: Optional[str] = None, all_flag: bool = False):
|
def remove(env_name: Optional[str] = None, all_flag: bool = False):
|
||||||
if not env_name and not all_flag:
|
if not env_name and not all_flag:
|
||||||
click.echo("Please provide the name of the python env to remove.")
|
click.echo("Please provide the name of the python env to remove.")
|
||||||
|
@ -1,19 +1,19 @@
|
|||||||
import json
|
import json
|
||||||
|
from dataclasses import asdict, dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Set, Tuple, Dict
|
from typing import Dict, List, Set, Tuple
|
||||||
from dataclasses import dataclass, asdict, field
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
import yaml as pyyaml
|
import yaml as pyyaml
|
||||||
|
|
||||||
|
from devchat.utils import get_logger
|
||||||
from devchat.workflow.namespace import get_prioritized_namespace_path
|
from devchat.workflow.namespace import get_prioritized_namespace_path
|
||||||
from devchat.workflow.path import COMMAND_FILENAMES
|
from devchat.workflow.path import COMMAND_FILENAMES
|
||||||
from devchat.utils import get_logger
|
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WorkflowMeta:
|
class WorkflowMeta:
|
||||||
name: str
|
name: str
|
||||||
@ -27,9 +27,7 @@ class WorkflowMeta:
|
|||||||
return f"{'*' if self.active else ' '} {self.name} ({self.namespace})"
|
return f"{'*' if self.active else ' '} {self.name} ({self.namespace})"
|
||||||
|
|
||||||
|
|
||||||
def iter_namespace(
|
def iter_namespace(ns_path: str, existing_names: Set[str]) -> Tuple[List[WorkflowMeta], Set[str]]:
|
||||||
ns_path: str, existing_names: Set[str]
|
|
||||||
) -> Tuple[List[WorkflowMeta], Set[str]]:
|
|
||||||
"""
|
"""
|
||||||
Get all workflows under the namespace path.
|
Get all workflows under the namespace path.
|
||||||
|
|
||||||
|
@ -1,27 +1,25 @@
|
|||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import List, Optional, Tuple
|
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from devchat.utils import get_logger
|
||||||
from devchat.workflow.path import (
|
from devchat.workflow.path import (
|
||||||
CHAT_DIR,
|
CHAT_DIR,
|
||||||
|
CUSTOM_BASE,
|
||||||
WORKFLOWS_BASE,
|
WORKFLOWS_BASE,
|
||||||
WORKFLOWS_BASE_NAME,
|
WORKFLOWS_BASE_NAME,
|
||||||
CUSTOM_BASE,
|
|
||||||
)
|
)
|
||||||
from devchat.utils import get_logger
|
|
||||||
|
|
||||||
HAS_GIT = False
|
HAS_GIT = False
|
||||||
try:
|
try:
|
||||||
from git import Repo, InvalidGitRepositoryError, GitCommandError
|
from git import GitCommandError, InvalidGitRepositoryError, Repo
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@ -254,9 +252,7 @@ def update_by_git(workflow_base: Path):
|
|||||||
remote_main_hash = repo.commit(f"origin/{DEFAULT_BRANCH}").hexsha
|
remote_main_hash = repo.commit(f"origin/{DEFAULT_BRANCH}").hexsha
|
||||||
|
|
||||||
if local_main_hash == remote_main_hash:
|
if local_main_hash == remote_main_hash:
|
||||||
click.echo(
|
click.echo(f"Local branch is up-to-date with remote {DEFAULT_BRANCH}. Skip update.")
|
||||||
f"Local branch is up-to-date with remote {DEFAULT_BRANCH}. Skip update."
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -290,15 +286,11 @@ def copy_workflows_usr():
|
|||||||
shutil.copytree(old_usr_dir, new_usr_dir)
|
shutil.copytree(old_usr_dir, new_usr_dir)
|
||||||
click.echo(f"Copied {old_usr_dir} to {new_usr_dir} successfully.")
|
click.echo(f"Copied {old_usr_dir} to {new_usr_dir} successfully.")
|
||||||
else:
|
else:
|
||||||
click.echo(
|
click.echo(f"Skip copying usr dir. old exists: {old_exists}, new exists: {new_exists}.")
|
||||||
f"Skip copying usr dir. old exists: {old_exists}, new exists: {new_exists}."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command(help="Update the workflow_base dir.")
|
@click.command(help="Update the workflow_base dir.")
|
||||||
@click.option(
|
@click.option("-f", "--force", is_flag=True, help="Force update the workflows to the latest main.")
|
||||||
"-f", "--force", is_flag=True, help="Force update the workflows to the latest main."
|
|
||||||
)
|
|
||||||
def update(force: bool):
|
def update(force: bool):
|
||||||
click.echo(f"Updating wf repo... force: {force}")
|
click.echo(f"Updating wf repo... force: {force}")
|
||||||
click.echo(f"WORKFLOWS_BASE: {WORKFLOWS_BASE}")
|
click.echo(f"WORKFLOWS_BASE: {WORKFLOWS_BASE}")
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
# pylint: disable=invalid-name
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Optional, Dict
|
import sys
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
from .envs import MAMBA_BIN_PATH
|
from .envs import MAMBA_BIN_PATH
|
||||||
from .path import MAMBA_PY_ENVS, MAMBA_ROOT
|
from .path import MAMBA_PY_ENVS, MAMBA_ROOT
|
||||||
from .user_setting import USER_SETTINGS
|
|
||||||
from .schema import ExternalPyConf
|
from .schema import ExternalPyConf
|
||||||
|
from .user_setting import USER_SETTINGS
|
||||||
|
|
||||||
# CONDA_FORGE = [
|
# CONDA_FORGE = [
|
||||||
# "https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/",
|
# "https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/",
|
||||||
@ -28,8 +26,10 @@ def _get_external_envs() -> Dict[str, ExternalPyConf]:
|
|||||||
|
|
||||||
return external_pythons
|
return external_pythons
|
||||||
|
|
||||||
|
|
||||||
EXTERNAL_ENVS = _get_external_envs()
|
EXTERNAL_ENVS = _get_external_envs()
|
||||||
|
|
||||||
|
|
||||||
class PyEnvManager:
|
class PyEnvManager:
|
||||||
mamba_bin = MAMBA_BIN_PATH
|
mamba_bin = MAMBA_BIN_PATH
|
||||||
mamba_root = MAMBA_ROOT
|
mamba_root = MAMBA_ROOT
|
||||||
@ -82,15 +82,11 @@ class PyEnvManager:
|
|||||||
]
|
]
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env.pop("PYTHONPATH")
|
env.pop("PYTHONPATH")
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) as proc:
|
||||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
|
|
||||||
) as proc:
|
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
print(
|
print(f"Failed to install requirements: {requirements_file}", flush=True)
|
||||||
f"Failed to install requirements: {requirements_file}", flush=True
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -149,9 +145,7 @@ class PyEnvManager:
|
|||||||
f"python={py_version}",
|
f"python={py_version}",
|
||||||
"-y",
|
"-y",
|
||||||
]
|
]
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
||||||
) as proc:
|
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
@ -178,9 +172,7 @@ class PyEnvManager:
|
|||||||
self.mamba_root,
|
self.mamba_root,
|
||||||
"-y",
|
"-y",
|
||||||
]
|
]
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
||||||
) as proc:
|
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Explicitly define the environment variables used in the workflow engine.
|
Explicitly define the environment variables used in the workflow engine.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
PYTHON_PATH = os.environ.get("PYTHONPATH", "")
|
PYTHON_PATH = os.environ.get("PYTHONPATH", "")
|
||||||
|
@ -4,15 +4,17 @@ Namespace management for workflows
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import List
|
from typing import List
|
||||||
from pydantic import BaseModel, Extra, ValidationError
|
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
|
from pydantic import BaseModel, Extra, ValidationError
|
||||||
|
|
||||||
from devchat.utils import get_logger
|
from devchat.utils import get_logger
|
||||||
|
|
||||||
from .path import (
|
from .path import (
|
||||||
CUSTOM_BASE,
|
|
||||||
MERICO_WORKFLOWS,
|
|
||||||
COMMUNITY_WORKFLOWS,
|
COMMUNITY_WORKFLOWS,
|
||||||
|
CUSTOM_BASE,
|
||||||
CUSTOM_CONFIG_FILE,
|
CUSTOM_CONFIG_FILE,
|
||||||
|
MERICO_WORKFLOWS,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import re
|
import re
|
||||||
from typing import Optional, List, Dict, Union
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, validator, Extra, ValidationError
|
from pydantic import BaseModel, Extra, ValidationError, validator
|
||||||
|
|
||||||
|
|
||||||
class WorkflowPyConf(BaseModel):
|
class WorkflowPyConf(BaseModel):
|
||||||
@ -10,7 +10,7 @@ class WorkflowPyConf(BaseModel):
|
|||||||
env_name: Optional[str] # python env name, will use the workflow name if not set
|
env_name: Optional[str] # python env name, will use the workflow name if not set
|
||||||
|
|
||||||
@validator("version")
|
@validator("version")
|
||||||
def validate_version(cls, value): # pylint: disable=no-self-argument
|
def validate_version(cls, value):
|
||||||
pattern = r"^\d+\.\d+(\.\d+)?$"
|
pattern = r"^\d+\.\d+(\.\d+)?$"
|
||||||
if not re.match(pattern, value):
|
if not re.match(pattern, value):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
@ -41,7 +41,7 @@ class WorkflowConfig(BaseModel):
|
|||||||
help: Optional[Union[str, Dict[str, str]]] = None
|
help: Optional[Union[str, Dict[str, str]]] = None
|
||||||
|
|
||||||
@validator("input_required", pre=True)
|
@validator("input_required", pre=True)
|
||||||
def to_boolean(cls, value): # pylint: disable=no-self-argument
|
def to_boolean(cls, value):
|
||||||
return value.lower() == "required"
|
return value.lower() == "required"
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
# pylint: disable=invalid-name
|
import json
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import subprocess
|
|
||||||
import shlex
|
|
||||||
import json
|
|
||||||
from typing import Dict, Tuple, List
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from .schema import WorkflowConfig, RuntimeParameter
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
from .path import WORKFLOWS_BASE
|
from .path import WORKFLOWS_BASE
|
||||||
|
from .schema import RuntimeParameter, WorkflowConfig
|
||||||
|
|
||||||
|
|
||||||
class BuiltInVars(str, Enum):
|
class BuiltInVars(str, Enum):
|
||||||
@ -47,9 +46,7 @@ class WorkflowStep:
|
|||||||
"""
|
"""
|
||||||
return self._kwargs.get("run", "")
|
return self._kwargs.get("run", "")
|
||||||
|
|
||||||
def _setup_env(
|
def _setup_env(self, wf_config: WorkflowConfig, rt_param: RuntimeParameter) -> Dict[str, str]:
|
||||||
self, wf_config: WorkflowConfig, rt_param: RuntimeParameter
|
|
||||||
) -> Dict[str, str]:
|
|
||||||
"""
|
"""
|
||||||
Setup the environment variables for the subprocess.
|
Setup the environment variables for the subprocess.
|
||||||
"""
|
"""
|
||||||
@ -96,8 +93,7 @@ class WorkflowStep:
|
|||||||
if BuiltInVars.workflow_python in command_raw:
|
if BuiltInVars.workflow_python in command_raw:
|
||||||
if not rt_param.workflow_python:
|
if not rt_param.workflow_python:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The command uses $workflow_python, "
|
"The command uses $workflow_python, " "but the workflow_python is not set yet."
|
||||||
"but the workflow_python is not set yet."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
@ -129,10 +125,7 @@ class WorkflowStep:
|
|||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
def run(self, wf_config: WorkflowConfig, rt_param: RuntimeParameter) -> Tuple[int, str, str]:
|
||||||
def run(
|
|
||||||
self, wf_config: WorkflowConfig, rt_param: RuntimeParameter
|
|
||||||
) -> Tuple[int, str, str]:
|
|
||||||
"""
|
"""
|
||||||
Run the step in a subprocess.
|
Run the step in a subprocess.
|
||||||
|
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import oyaml as yaml
|
import oyaml as yaml
|
||||||
|
|
||||||
|
from .path import USER_SETTINGS_FILENAME, WORKFLOWS_BASE
|
||||||
from .schema import UserSettings
|
from .schema import UserSettings
|
||||||
from .path import WORKFLOWS_BASE, USER_SETTINGS_FILENAME
|
|
||||||
|
|
||||||
|
|
||||||
def _load_user_settings() -> UserSettings:
|
def _load_user_settings() -> UserSettings:
|
||||||
@ -20,4 +22,5 @@ def _load_user_settings() -> UserSettings:
|
|||||||
|
|
||||||
return UserSettings()
|
return UserSettings()
|
||||||
|
|
||||||
|
|
||||||
USER_SETTINGS = _load_user_settings()
|
USER_SETTINGS = _load_user_settings()
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional, Tuple, List, Dict
|
from typing import Dict, List, Optional, Tuple
|
||||||
import oyaml as yaml
|
|
||||||
from .step import WorkflowStep
|
|
||||||
from .schema import WorkflowConfig, RuntimeParameter
|
|
||||||
from .path import COMMAND_FILENAMES
|
|
||||||
from .namespace import get_prioritized_namespace_path
|
|
||||||
|
|
||||||
from .env_manager import PyEnvManager, EXTERNAL_ENVS
|
import oyaml as yaml
|
||||||
|
|
||||||
|
from .env_manager import EXTERNAL_ENVS, PyEnvManager
|
||||||
|
from .namespace import get_prioritized_namespace_path
|
||||||
|
from .path import COMMAND_FILENAMES
|
||||||
|
from .schema import RuntimeParameter, WorkflowConfig
|
||||||
|
from .step import WorkflowStep
|
||||||
|
|
||||||
|
|
||||||
class Workflow:
|
class Workflow:
|
||||||
@ -47,9 +46,7 @@ class Workflow:
|
|||||||
workflow_name = striped.split()[0][1:]
|
workflow_name = striped.split()[0][1:]
|
||||||
|
|
||||||
# remove the trigger prefix and the workflow name
|
# remove the trigger prefix and the workflow name
|
||||||
actual_input = user_input.replace(
|
actual_input = user_input.replace(f"{Workflow.TRIGGER_PREFIX}{workflow_name}", "", 1)
|
||||||
f"{Workflow.TRIGGER_PREFIX}{workflow_name}", "", 1
|
|
||||||
)
|
|
||||||
return workflow_name, actual_input
|
return workflow_name, actual_input
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
Loading…
x
Reference in New Issue
Block a user