diff --git a/site-packages/GitPython-3.1.41.dist-info/AUTHORS b/site-packages/GitPython-3.1.41.dist-info/AUTHORS new file mode 100644 index 0000000..3b97c94 --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/AUTHORS @@ -0,0 +1,57 @@ +GitPython was originally written by Michael Trier. +GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich. + +Contributors are: + +-Michael Trier +-Alan Briolat +-Florian Apolloner +-David Aguilar +-Jelmer Vernooij +-Steve Frécinaux +-Kai Lautaportti +-Paul Sowden +-Sebastian Thiel +-Jonathan Chu +-Vincent Driessen +-Phil Elson +-Bernard `Guyzmo` Pratz +-Timothy B. Hartman +-Konstantin Popov +-Peter Jones +-Anson Mansfield +-Ken Odegard +-Alexis Horgix Chotard +-Piotr Babij +-Mikuláš Poul +-Charles Bouchard-Légaré +-Yaroslav Halchenko +-Tim Swast +-William Luc Ritchie +-David Host +-A. Jesse Jiryu Davis +-Steven Whitman +-Stefan Stancu +-César Izurieta +-Arthur Milchior +-Anil Khatri +-JJ Graham +-Ben Thayer +-Dries Kennes +-Pratik Anurag +-Harmon +-Liam Beguin +-Ram Rachum +-Alba Mendez +-Robert Westman +-Hugo van Kemenade +-Hiroki Tokunaga +-Julien Mauroy +-Patrick Gerard +-Luke Twist +-Joseph Hale +-Santos Gallegos +-Wenhan Zhu +-Eliah Kagan + +Portions derived from other open source works and are clearly marked. diff --git a/site-packages/GitPython-3.1.41.dist-info/INSTALLER b/site-packages/GitPython-3.1.41.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/site-packages/GitPython-3.1.41.dist-info/LICENSE b/site-packages/GitPython-3.1.41.dist-info/LICENSE new file mode 100644 index 0000000..ba8a219 --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/LICENSE @@ -0,0 +1,29 @@ +Copyright (C) 2008, 2009 Michael Trier and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the GitPython project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/site-packages/GitPython-3.1.41.dist-info/METADATA b/site-packages/GitPython-3.1.41.dist-info/METADATA new file mode 100644 index 0000000..9a9d4a9 --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/METADATA @@ -0,0 +1,340 @@ +Metadata-Version: 2.1 +Name: GitPython +Version: 3.1.41 +Summary: GitPython is a Python library used to interact with Git repositories +Home-page: https://github.com/gitpython-developers/GitPython +Author: Sebastian Thiel, Michael Trier +Author-email: byronimo@gmail.com, mtrier@gmail.com +License: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: gitdb <5,>=4.0.1 +Requires-Dist: typing-extensions >=3.7.4.3 ; python_version < "3.8" +Provides-Extra: test +Requires-Dist: black ; extra == 'test' +Requires-Dist: coverage[toml] ; extra == 'test' +Requires-Dist: ddt !=1.4.3,>=1.1.1 ; extra == 'test' +Requires-Dist: mypy ; extra == 'test' +Requires-Dist: pre-commit ; extra == 'test' +Requires-Dist: pytest >=7.3.1 ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytest-instafail ; extra == 'test' +Requires-Dist: pytest-mock ; extra == 'test' +Requires-Dist: pytest-sugar ; extra == 'test' +Requires-Dist: sumtypes ; extra == 'test' +Requires-Dist: mock ; (python_version < "3.8") and extra == 'test' + +![Python package](https://github.com/gitpython-developers/GitPython/workflows/Python%20package/badge.svg) +[![Documentation Status](https://readthedocs.org/projects/gitpython/badge/?version=stable)](https://readthedocs.org/projects/gitpython/?badge=stable) +[![Packaging status](https://repology.org/badge/tiny-repos/python:gitpython.svg)](https://repology.org/metapackage/python:gitpython/versions) + +## [Gitoxide](https://github.com/Byron/gitoxide): A peek into the future… + +I started working on GitPython in 2009, back in the days when Python was 'my thing' and I had great plans with it. +Of course, back in the days, I didn't really know what I was doing and this shows in many places. Somewhat similar to +Python this happens to be 'good enough', but at the same time is deeply flawed and broken beyond repair. + +By now, GitPython is widely used and I am sure there is a good reason for that, it's something to be proud of and happy about. +The community is maintaining the software and is keeping it relevant for which I am absolutely grateful. For the time to come I am happy to continue maintaining GitPython, remaining hopeful that one day it won't be needed anymore. + +More than 15 years after my first meeting with 'git' I am still in excited about it, and am happy to finally have the tools and +probably the skills to scratch that itch of mine: implement `git` in a way that makes tool creation a piece of cake for most. + +If you like the idea and want to learn more, please head over to [gitoxide](https://github.com/Byron/gitoxide), an +implementation of 'git' in [Rust](https://www.rust-lang.org). + +## GitPython + +GitPython is a python library used to interact with git repositories, high-level like git-porcelain, +or low-level like git-plumbing. + +It provides abstractions of git objects for easy access of repository data often backed by calling the `git` +command-line program. + +### DEVELOPMENT STATUS + +This project is in **maintenance mode**, which means that + +- …there will be no feature development, unless these are contributed +- …there will be no bug fixes, unless they are relevant to the safety of users, or contributed +- …issues will be responded to with waiting times of up to a month + +The project is open to contributions of all kinds, as well as new maintainers. + +### REQUIREMENTS + +GitPython needs the `git` executable to be installed on the system and available in your `PATH` for most operations. +If it is not in your `PATH`, you can help GitPython find it by setting +the `GIT_PYTHON_GIT_EXECUTABLE=` environment variable. + +- Git (1.7.x or newer) +- Python >= 3.7 + +The list of dependencies are listed in `./requirements.txt` and `./test-requirements.txt`. +The installer takes care of installing them for you. + +### INSTALL + +GitPython and its required package dependencies can be installed in any of the following ways, all of which should typically be done in a [virtual environment](https://docs.python.org/3/tutorial/venv.html). + +#### From PyPI + +To obtain and install a copy [from PyPI](https://pypi.org/project/GitPython/), run: + +```bash +pip install GitPython +``` + +(A distribution package can also be downloaded for manual installation at [the PyPI page](https://pypi.org/project/GitPython/).) + +#### From downloaded source code + +If you have downloaded the source code, run this from inside the unpacked `GitPython` directory: + +```bash +pip install . +``` + +#### By cloning the source code repository + +To clone the [the GitHub repository](https://github.com/gitpython-developers/GitPython) from source to work on the code, you can do it like so: + +```bash +git clone https://github.com/gitpython-developers/GitPython +cd GitPython +./init-tests-after-clone.sh +``` + +On Windows, `./init-tests-after-clone.sh` can be run in a Git Bash shell. + +If you are cloning [your own fork](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/about-forks), then replace the above `git clone` command with one that gives the URL of your fork. Or use this [`gh`](https://cli.github.com/) command (assuming you have `gh` and your fork is called `GitPython`): + +```bash +gh repo clone GitPython +``` + +Having cloned the repo, create and activate your [virtual environment](https://docs.python.org/3/tutorial/venv.html). + +Then make an [editable install](https://pip.pypa.io/en/stable/topics/local-project-installs/#editable-installs): + +```bash +pip install -e ".[test]" +``` + +In the less common case that you do not want to install test dependencies, `pip install -e .` can be used instead. + +#### With editable *dependencies* (not preferred, and rarely needed) + +In rare cases, you may want to work on GitPython and one or both of its [gitdb](https://github.com/gitpython-developers/gitdb) and [smmap](https://github.com/gitpython-developers/smmap) dependencies at the same time, with changes in your local working copy of gitdb or smmap immediatley reflected in the behavior of your local working copy of GitPython. This can be done by making editable installations of those dependencies in the same virtual environment where you install GitPython. + +If you want to do that *and* you want the versions in GitPython's git submodules to be used, then pass `-e git/ext/gitdb` and/or `-e git/ext/gitdb/gitdb/ext/smmap` to `pip install`. This can be done in any order, and in separate `pip install` commands or the same one, so long as `-e` appears before *each* path. For example, you can install GitPython, gitdb, and smmap editably in the currently active virtual environment this way: + +```bash +pip install -e ".[test]" -e git/ext/gitdb -e git/ext/gitdb/gitdb/ext/smmap +``` + +The submodules must have been cloned for that to work, but that will already be the case if you have run `./init-tests-after-clone.sh`. You can use `pip list` to check which packages are installed editably and which are installed normally. + +To reiterate, this approach should only rarely be used. For most development it is preferable to allow the gitdb and smmap dependencices to be retrieved automatically from PyPI in their latest stable packaged versions. + +### Limitations + +#### Leakage of System Resources + +GitPython is not suited for long-running processes (like daemons) as it tends to +leak system resources. It was written in a time where destructors (as implemented +in the `__del__` method) still ran deterministically. + +In case you still want to use it in such a context, you will want to search the +codebase for `__del__` implementations and call these yourself when you see fit. + +Another way assure proper cleanup of resources is to factor out GitPython into a +separate process which can be dropped periodically. + +#### Windows support + +See [Issue #525](https://github.com/gitpython-developers/GitPython/issues/525). + +### RUNNING TESTS + +_Important_: Right after cloning this repository, please be sure to have executed +the `./init-tests-after-clone.sh` script in the repository root. Otherwise +you will encounter test failures. + +#### Install test dependencies + +Ensure testing libraries are installed. This is taken care of already if you installed with: + +```bash +pip install -e ".[test]" +``` + +Otherwise, you can run: + +```bash +pip install -r test-requirements.txt +``` + +#### Test commands + +To test, run: + +```bash +pytest +``` + +To lint, and apply automatic code formatting, run: + +```bash +pre-commit run --all-files +``` + +- Linting without modifying code can be done with: `make lint` +- Auto-formatting without other lint checks can be done with: `black .` + +To typecheck, run: + +```bash +mypy -p git +``` + +#### CI (and tox) + +The same linting, and running tests on all the different supported Python versions, will be performed: + +- Upon submitting a pull request. +- On each push, *if* you have a fork with GitHub Actions enabled. +- Locally, if you run [`tox`](https://tox.wiki/) (this skips any Python versions you don't have installed). + +#### Configuration files + +Specific tools: + +- Configurations for `mypy`, `pytest`, `coverage.py`, and `black` are in `./pyproject.toml`. +- Configuration for `flake8` is in the `./.flake8` file. + +Orchestration tools: + +- Configuration for `pre-commit` is in the `./.pre-commit-config.yaml` file. +- Configuration for `tox` is in `./tox.ini`. +- Configuration for GitHub Actions (CI) is in files inside `./.github/workflows/`. + +### Contributions + +Please have a look at the [contributions file][contributing]. + +### INFRASTRUCTURE + +- [User Documentation](http://gitpython.readthedocs.org) +- [Questions and Answers](http://stackexchange.com/filters/167317/gitpython) +- Please post on Stack Overflow and use the `gitpython` tag +- [Issue Tracker](https://github.com/gitpython-developers/GitPython/issues) + - Post reproducible bugs and feature requests as a new issue. + Please be sure to provide the following information if posting bugs: + - GitPython version (e.g. `import git; git.__version__`) + - Python version (e.g. `python --version`) + - The encountered stack-trace, if applicable + - Enough information to allow reproducing the issue + +### How to make a new release + +1. Update/verify the **version** in the `VERSION` file. +2. Update/verify that the `doc/source/changes.rst` changelog file was updated. It should include a link to the forthcoming release page: `https://github.com/gitpython-developers/GitPython/releases/tag/` +3. Commit everything. +4. Run `git tag -s ` to tag the version in Git. +5. _Optionally_ create and activate a [virtual environment](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment). (Then the next step can install `build` and `twine`.) +6. Run `make release`. +7. Go to [GitHub Releases](https://github.com/gitpython-developers/GitPython/releases) and publish a new one with the recently pushed tag. Generate the changelog. + +### How to verify a release (DEPRECATED) + +Note that what follows is deprecated and future releases won't be signed anymore. +More details about how it came to that can be found [in this issue](https://github.com/gitpython-developers/gitdb/issues/77). + +---- + +Please only use releases from `pypi` as you can verify the respective source +tarballs. + +This script shows how to verify the tarball was indeed created by the authors of +this project: + +```bash +curl https://files.pythonhosted.org/packages/09/bc/ae32e07e89cc25b9e5c793d19a1e5454d30a8e37d95040991160f942519e/GitPython-3.1.8-py3-none-any.whl > gitpython.whl +curl https://files.pythonhosted.org/packages/09/bc/ae32e07e89cc25b9e5c793d19a1e5454d30a8e37d95040991160f942519e/GitPython-3.1.8-py3-none-any.whl.asc > gitpython-signature.asc +gpg --verify gitpython-signature.asc gitpython.whl +``` + +which outputs + +```bash +gpg: Signature made Fr 4 Sep 10:04:50 2020 CST +gpg: using RSA key 27C50E7F590947D7273A741E85194C08421980C9 +gpg: Good signature from "Sebastian Thiel (YubiKey USB-C) " [ultimate] +gpg: aka "Sebastian Thiel (In Rust I trust) " [ultimate] +``` + +You can verify that the keyid indeed matches the release-signature key provided in this +repository by looking at the keys details: + +```bash +gpg --list-packets ./release-verification-key.asc +``` + +You can verify that the commit adding it was also signed by it using: + +```bash +git show --show-signature ./release-verification-key.asc +``` + +If you would like to trust it permanently, you can import and sign it: + +```bash +gpg --import ./release-verification-key.asc +gpg --edit-key 4C08421980C9 + +> sign +> save +``` + +### Projects using GitPython + +- [PyDriller](https://github.com/ishepard/pydriller) +- [Kivy Designer](https://github.com/kivy/kivy-designer) +- [Prowl](https://github.com/nettitude/Prowl) +- [Python Taint](https://github.com/python-security/pyt) +- [Buster](https://github.com/axitkhurana/buster) +- [git-ftp](https://github.com/ezyang/git-ftp) +- [Git-Pandas](https://github.com/wdm0006/git-pandas) +- [PyGitUp](https://github.com/msiemens/PyGitUp) +- [PyJFuzz](https://github.com/mseclab/PyJFuzz) +- [Loki](https://github.com/Neo23x0/Loki) +- [Omniwallet](https://github.com/OmniLayer/omniwallet) +- [GitViper](https://github.com/BeayemX/GitViper) +- [Git Gud](https://github.com/bthayer2365/git-gud) + +### LICENSE + +[3-Clause BSD License](https://opensource.org/license/bsd-3-clause/), also known as the New BSD License. See the [LICENSE file][license]. + +[contributing]: https://github.com/gitpython-developers/GitPython/blob/main/CONTRIBUTING.md +[license]: https://github.com/gitpython-developers/GitPython/blob/main/LICENSE diff --git a/site-packages/GitPython-3.1.41.dist-info/RECORD b/site-packages/GitPython-3.1.41.dist-info/RECORD new file mode 100644 index 0000000..a707c1c --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/RECORD @@ -0,0 +1,82 @@ +GitPython-3.1.41.dist-info/AUTHORS,sha256=te58dvSkF-Ru6CFfw39VqZ0BP5IrCf_qobf7LiiM4RQ,2242 +GitPython-3.1.41.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +GitPython-3.1.41.dist-info/LICENSE,sha256=hvyUwyGpr7wRUUcTURuv3tIl8lEA3MD3NQ6CvCMbi-s,1503 +GitPython-3.1.41.dist-info/METADATA,sha256=O2h4kXxWFoJHypZyGIzBW71W9PfEGxz9rX_RU-Oz2kI,14410 +GitPython-3.1.41.dist-info/RECORD,, +GitPython-3.1.41.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +GitPython-3.1.41.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4 +git/__init__.py,sha256=tsqz7gpXuDRVq_4v32kxsNcSL8OpPLPhnNE6x5YWVek,3565 +git/__pycache__/__init__.cpython-38.pyc,, +git/__pycache__/cmd.cpython-38.pyc,, +git/__pycache__/compat.cpython-38.pyc,, +git/__pycache__/config.cpython-38.pyc,, +git/__pycache__/db.cpython-38.pyc,, +git/__pycache__/diff.cpython-38.pyc,, +git/__pycache__/exc.cpython-38.pyc,, +git/__pycache__/remote.cpython-38.pyc,, +git/__pycache__/types.cpython-38.pyc,, +git/__pycache__/util.cpython-38.pyc,, +git/cmd.py,sha256=O7dS-QIt0CvrcP_8yfIjs1FkWCN40NLQ4PKCu9iN4GE,57713 +git/compat.py,sha256=WGmGWAwpeLnp1hGwXkDm57hGjvd00mqhXXFXtoeAmRQ,3430 +git/config.py,sha256=LPI99x5VU2W6Osr_0JfbIf8DQ41-Bckjd5QWpkeMy44,34541 +git/db.py,sha256=pHczdHStrA7kDNlZXWkHtHdCm16dxomWP1caoiCd9qQ,2373 +git/diff.py,sha256=C9IDk3DlvJSPqecTgW1znkP_17tb5Dw0A3zGTbcxFac,23583 +git/exc.py,sha256=EdTxJ_TMRqN377xDHSJnKthlyd9bxP4lrtxmy_4nPgs,6990 +git/index/__init__.py,sha256=p2THvsiwJxsHfmZ8y1qgRQYyixUTjCqA7UvFU3x-Gj4,245 +git/index/__pycache__/__init__.cpython-38.pyc,, +git/index/__pycache__/base.cpython-38.pyc,, +git/index/__pycache__/fun.cpython-38.pyc,, +git/index/__pycache__/typ.cpython-38.pyc,, +git/index/__pycache__/util.cpython-38.pyc,, +git/index/base.py,sha256=u7ZNDT_rrArRtMJdxWKUpGB548n9F_gsk98Cofe-WZM,58674 +git/index/fun.py,sha256=15bHv4NGc7ix5mrBXdpcv45UtAeFTxiBl_0AaOiPUJI,16480 +git/index/typ.py,sha256=YXcZkqj27zYyIIl63d6Ziy_HnaQfBYY7myx46gXwGDs,6399 +git/index/util.py,sha256=m29sIVFrviQBVCNpm0wneyDT2_a42ZGuHuA9qfjKyAU,3807 +git/objects/__init__.py,sha256=VCNAKk0DuLrDywRqpKvsclYPG3FQvvkfp2NKa5xvEtM,936 +git/objects/__pycache__/__init__.cpython-38.pyc,, +git/objects/__pycache__/base.cpython-38.pyc,, +git/objects/__pycache__/blob.cpython-38.pyc,, +git/objects/__pycache__/commit.cpython-38.pyc,, +git/objects/__pycache__/fun.cpython-38.pyc,, +git/objects/__pycache__/tag.cpython-38.pyc,, +git/objects/__pycache__/tree.cpython-38.pyc,, +git/objects/__pycache__/util.cpython-38.pyc,, +git/objects/base.py,sha256=5x8TiAstqVTfblTA8Rx9cOKf4OxRiEvVsxHyyok-zSo,7935 +git/objects/blob.py,sha256=saOGrQIHLMIptPPtZW9bBorcKTCD0AJTph5ENXYqy2g,990 +git/objects/commit.py,sha256=xSnO9k2Ol-lQaOhDqUig_MoVbVE0ciX1Fw0H4vnH61w,28991 +git/objects/fun.py,sha256=AVuHV94BZzEvu9Bk1n-BKvBVt4qJr_-ajlLDMrZWFX8,8786 +git/objects/submodule/__init__.py,sha256=5hDkqdqcqYZ6cQXP4S8JQT7UBWnrbYzo7GrrEn8b_TI,227 +git/objects/submodule/__pycache__/__init__.cpython-38.pyc,, +git/objects/submodule/__pycache__/base.cpython-38.pyc,, +git/objects/submodule/__pycache__/root.cpython-38.pyc,, +git/objects/submodule/__pycache__/util.cpython-38.pyc,, +git/objects/submodule/base.py,sha256=ohl5El3LyJ_J3_LhnpRWtv5fftLbApQl1iQYmou3I0E,61728 +git/objects/submodule/root.py,sha256=3RWww-v-WxoI6ejLlzJsToVGQwO8j3Q3gSyjibf_JP4,19960 +git/objects/submodule/util.py,sha256=T0JZAuxjWc7xvz37lnkOu60gDReC5Pmtvx98NYSQS20,3459 +git/objects/tag.py,sha256=hJ54onZiN_phrx6OpB0aZUaaBtHJx-uTZrgP8f9FfWQ,3823 +git/objects/tree.py,sha256=8l3ZN9LX3_yGq0gF_pCPv6SgTmv5_AIVbsfCXPUEiWE,14391 +git/objects/util.py,sha256=6ixwVW_ZWoptjNuZB8WHCzqGunc-Z4m_nS98awkiAnQ,22908 +git/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +git/refs/__init__.py,sha256=2tQPcfYLW_o_-46vCax0VhvAE8KMsE2OquGJ7TVi1C0,435 +git/refs/__pycache__/__init__.cpython-38.pyc,, +git/refs/__pycache__/head.cpython-38.pyc,, +git/refs/__pycache__/log.cpython-38.pyc,, +git/refs/__pycache__/reference.cpython-38.pyc,, +git/refs/__pycache__/remote.cpython-38.pyc,, +git/refs/__pycache__/symbolic.cpython-38.pyc,, +git/refs/__pycache__/tag.cpython-38.pyc,, +git/refs/head.py,sha256=B3LunzZo3Ez8gOzwsYePDaW7PG7ZtxFcD55Uhb9h1oI,10095 +git/refs/log.py,sha256=itRAZ-N-dT4FcoDrm1kBbCb4kI6727_CJQAEx8q1QKc,12072 +git/refs/reference.py,sha256=MV2ASx1pzGY_VBJrOmZpWTyI0tOeuxuYVGinFh7PgLU,5622 +git/refs/remote.py,sha256=wD_9tzh7LcHXy4ri9DugIVMhwHB8eBR1ygDxCnlWkPs,2779 +git/refs/symbolic.py,sha256=9oBKCCoBLSWnEWdO1h9JiQLkhNRc4ShSukxJ3ZvLYq8,33371 +git/refs/tag.py,sha256=XTJr--vJj2F__HvdtKmpyP1tJ1ADMY5evPLyrCjStTs,4547 +git/remote.py,sha256=_wwAYyyHYB6M_IW0LY3xbJUHERV5h3tKPoQzcTPzA6k,45368 +git/repo/__init__.py,sha256=R88bSPa2-QCSwaCJ4fjWgiNPcneCFs1eajYI1R-apg0,212 +git/repo/__pycache__/__init__.cpython-38.pyc,, +git/repo/__pycache__/base.cpython-38.pyc,, +git/repo/__pycache__/fun.cpython-38.pyc,, +git/repo/base.py,sha256=SOF2Xj3jEVKkujf6xJceZrN9DdPD6Thjc-ews2f29qY,56529 +git/repo/fun.py,sha256=fxKH9TKKO3JY294V749V6nqriDmcfyEgJgJ-I5NQIrs,13083 +git/types.py,sha256=gOGkkEo6FV5caSfH1bpm4nvgavhSZuIoOyBnfydFQ14,3062 +git/util.py,sha256=jPOEExcW9Dekccv6HIkMeDh0YAU5vf4NirlzsxvxpIU,42299 diff --git a/site-packages/GitPython-3.1.41.dist-info/WHEEL b/site-packages/GitPython-3.1.41.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/site-packages/GitPython-3.1.41.dist-info/top_level.txt b/site-packages/GitPython-3.1.41.dist-info/top_level.txt new file mode 100644 index 0000000..5664e30 --- /dev/null +++ b/site-packages/GitPython-3.1.41.dist-info/top_level.txt @@ -0,0 +1 @@ +git diff --git a/site-packages/PyYAML-6.0.1.dist-info/RECORD b/site-packages/PyYAML-6.0.1.dist-info/RECORD index 74bef23..63c334c 100644 --- a/site-packages/PyYAML-6.0.1.dist-info/RECORD +++ b/site-packages/PyYAML-6.0.1.dist-info/RECORD @@ -2,29 +2,28 @@ PyYAML-6.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwv PyYAML-6.0.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 PyYAML-6.0.1.dist-info/METADATA,sha256=UNNF8-SzzwOKXVo-kV5lXUGH2_wDWMBmGxqISpp5HQk,2058 PyYAML-6.0.1.dist-info/RECORD,, -PyYAML-6.0.1.dist-info/WHEEL,sha256=d8sjrkKo8h1ZY8Oxdq-K-58JEo6nLEfNKzkM0CYZtr0,108 +PyYAML-6.0.1.dist-info/WHEEL,sha256=5VvO-kIIQ_V5NA8SIsPWgxoMaPBQ8_Und1TjPh_V_7Y,108 PyYAML-6.0.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 _yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 -_yaml/__pycache__/__init__.cpython-39.pyc,, +_yaml/__pycache__/__init__.cpython-38.pyc,, yaml/__init__.py,sha256=bhl05qSeO-1ZxlSRjGrvl2m9nrXb1n9-GQatTN0Mrqc,12311 -yaml/__pycache__/__init__.cpython-39.pyc,, -yaml/__pycache__/composer.cpython-39.pyc,, -yaml/__pycache__/constructor.cpython-39.pyc,, -yaml/__pycache__/cyaml.cpython-39.pyc,, -yaml/__pycache__/dumper.cpython-39.pyc,, -yaml/__pycache__/emitter.cpython-39.pyc,, -yaml/__pycache__/error.cpython-39.pyc,, -yaml/__pycache__/events.cpython-39.pyc,, -yaml/__pycache__/loader.cpython-39.pyc,, -yaml/__pycache__/nodes.cpython-39.pyc,, -yaml/__pycache__/parser.cpython-39.pyc,, -yaml/__pycache__/reader.cpython-39.pyc,, -yaml/__pycache__/representer.cpython-39.pyc,, -yaml/__pycache__/resolver.cpython-39.pyc,, -yaml/__pycache__/scanner.cpython-39.pyc,, -yaml/__pycache__/serializer.cpython-39.pyc,, -yaml/__pycache__/tokens.cpython-39.pyc,, -yaml/_yaml.cpython-39-darwin.so,sha256=kzFH9vEUWVcU9mnwLz6lWG8d0QRDh1OUP3ztTRyYtRI,378855 +yaml/__pycache__/__init__.cpython-38.pyc,, +yaml/__pycache__/composer.cpython-38.pyc,, +yaml/__pycache__/constructor.cpython-38.pyc,, +yaml/__pycache__/cyaml.cpython-38.pyc,, +yaml/__pycache__/dumper.cpython-38.pyc,, +yaml/__pycache__/emitter.cpython-38.pyc,, +yaml/__pycache__/error.cpython-38.pyc,, +yaml/__pycache__/events.cpython-38.pyc,, +yaml/__pycache__/loader.cpython-38.pyc,, +yaml/__pycache__/nodes.cpython-38.pyc,, +yaml/__pycache__/parser.cpython-38.pyc,, +yaml/__pycache__/reader.cpython-38.pyc,, +yaml/__pycache__/representer.cpython-38.pyc,, +yaml/__pycache__/resolver.cpython-38.pyc,, +yaml/__pycache__/scanner.cpython-38.pyc,, +yaml/__pycache__/serializer.cpython-38.pyc,, +yaml/__pycache__/tokens.cpython-38.pyc,, yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 diff --git a/site-packages/PyYAML-6.0.1.dist-info/WHEEL b/site-packages/PyYAML-6.0.1.dist-info/WHEEL index 5438d5d..7f1d612 100644 --- a/site-packages/PyYAML-6.0.1.dist-info/WHEEL +++ b/site-packages/PyYAML-6.0.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.41.3) Root-Is-Purelib: false -Tag: cp39-cp39-macosx_11_0_arm64 +Tag: cp38-cp38-macosx_11_0_arm64 diff --git a/site-packages/anyio-4.2.0.dist-info/INSTALLER b/site-packages/anyio-4.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/site-packages/anyio-4.2.0.dist-info/LICENSE b/site-packages/anyio-4.2.0.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/site-packages/anyio-4.2.0.dist-info/METADATA b/site-packages/anyio-4.2.0.dist-info/METADATA new file mode 100644 index 0000000..267d118 --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 4.2.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author-email: Alex Grönholm +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: idna >=2.8 +Requires-Dist: sniffio >=1.1 +Requires-Dist: exceptiongroup >=1.0.2 ; python_version < "3.11" +Requires-Dist: typing-extensions >=4.1 ; python_version < "3.11" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: Sphinx >=7 ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc' +Provides-Extra: test +Requires-Dist: anyio[trio] ; extra == 'test' +Requires-Dist: coverage[toml] >=7 ; extra == 'test' +Requires-Dist: exceptiongroup >=1.2.0 ; extra == 'test' +Requires-Dist: hypothesis >=4.0 ; extra == 'test' +Requires-Dist: psutil >=5.9 ; extra == 'test' +Requires-Dist: pytest >=7.0 ; extra == 'test' +Requires-Dist: pytest-mock >=3.6.1 ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: uvloop >=0.17 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio >=0.23 ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/site-packages/anyio-4.2.0.dist-info/RECORD b/site-packages/anyio-4.2.0.dist-info/RECORD new file mode 100644 index 0000000..24e77cb --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-4.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.2.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.2.0.dist-info/METADATA,sha256=l-MautdCW4v0JPznRzh5eFwLiCItPSkFKZeZIyyF4-I,4599 +anyio-4.2.0.dist-info/RECORD,, +anyio-4.2.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +anyio-4.2.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.2.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=CxUxIHOIONI3KpsDLCg-dI6lQaDkW_4Zhtu5jWt1XO8,4344 +anyio/__pycache__/__init__.cpython-38.pyc,, +anyio/__pycache__/from_thread.cpython-38.pyc,, +anyio/__pycache__/lowlevel.cpython-38.pyc,, +anyio/__pycache__/pytest_plugin.cpython-38.pyc,, +anyio/__pycache__/to_process.cpython-38.pyc,, +anyio/__pycache__/to_thread.cpython-38.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-38.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-38.pyc,, +anyio/_backends/__pycache__/_trio.cpython-38.pyc,, +anyio/_backends/_asyncio.py,sha256=GgoboTDGh5h90BUD7v6J9bBAr6NaPJ4fpADi3wQlixE,81654 +anyio/_backends/_trio.py,sha256=4BnDfrBHFMD-nAAJJlv_PaNiPoKDzYk9BeACKiYEfm0,35182 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-38.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-38.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-38.pyc,, +anyio/_core/__pycache__/_fileio.cpython-38.pyc,, +anyio/_core/__pycache__/_resources.cpython-38.pyc,, +anyio/_core/__pycache__/_signals.cpython-38.pyc,, +anyio/_core/__pycache__/_sockets.cpython-38.pyc,, +anyio/_core/__pycache__/_streams.cpython-38.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-38.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-38.pyc,, +anyio/_core/__pycache__/_tasks.cpython-38.pyc,, +anyio/_core/__pycache__/_testing.cpython-38.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-38.pyc,, +anyio/_core/_eventloop.py,sha256=uCwWwGtN9Tf46nkcWLyku8iYEFWCkSjPW0AkDbnpCM0,4408 +anyio/_core/_exceptions.py,sha256=wUmhDu80qEB7z9EdCqUwVEhNUlNEok4_W2-rC6sCAUQ,2078 +anyio/_core/_fileio.py,sha256=MozNW2tPbiCsB8pPWUtlD2aYtdsWa9rYjFBWq20f6wA,19269 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=rDOVxtugZDgC5AhfW3lrwsre2n9Pj_adoRUidBiF6dA,878 +anyio/_core/_sockets.py,sha256=HwOMg0xUPw0T7N-aipxq_4OEM703llh3I9_YIg9a4XM,24048 +anyio/_core/_streams.py,sha256=Z8ZlTY6xom5EszrMsgCT3TphiT4JIlQG-y33CrD0NQY,1811 +anyio/_core/_subprocesses.py,sha256=ulmmLlUU917wF9SbsXGFXoc0Q-KDHAS8uLNRyyaQt3s,5394 +anyio/_core/_synchronization.py,sha256=rHrB3tipN1AQOqmxDOge0dzY4AQUelckt0NXX57fJcs,18417 +anyio/_core/_tasks.py,sha256=pvVEX2Fw159sf0ypAPerukKsZgRRwvFFedVW52nR2Vk,4764 +anyio/_core/_testing.py,sha256=i97S5rSWIFqfCGPm4mEMdiJaUpVskk-cWEjarWTeXXs,1964 +anyio/_core/_typedattr.py,sha256=QTbaIwZEewhwAKvbBHFBcO_cRhNP_lXjAobEldzExCU,2499 +anyio/abc/__init__.py,sha256=U44_s3BglL8BojWQiq0KuokvCqkunIp-ySH3GyRXxAc,2681 +anyio/abc/__pycache__/__init__.cpython-38.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-38.pyc,, +anyio/abc/__pycache__/_resources.cpython-38.pyc,, +anyio/abc/__pycache__/_sockets.cpython-38.pyc,, +anyio/abc/__pycache__/_streams.cpython-38.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-38.pyc,, +anyio/abc/__pycache__/_tasks.cpython-38.pyc,, +anyio/abc/__pycache__/_testing.cpython-38.pyc,, +anyio/abc/_eventloop.py,sha256=QOtkEHCkoE8czGu4RNzZ_q-xNjC0nRyoS0QQJ5KTvYU,10097 +anyio/abc/_resources.py,sha256=KBJP3wGbvSfKfTjfOLL4QCJdeiaNwqqF_6FwPsmQssM,763 +anyio/abc/_sockets.py,sha256=XdZ42TQ1omZN9Ec3HUfTMWG_i-21yMjXQ_FFslAZtzQ,6269 +anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=q3bEbCF46I2tQjYSbRdbaavq0R_HOV9JAjzQr8biprU,2747 +anyio/abc/_testing.py,sha256=EiWEaIVy15lHszO000Xp4FsB13NbBvC1BpUci47B5zs,1829 +anyio/from_thread.py,sha256=UTEY_NsiqQRukO3L3riQx4Eegulj3RyLlbITJz7pvLM,15749 +anyio/lowlevel.py,sha256=0awnMh05kA5WUNaOBoQZSImBj0xLNRlYOuMGGiztWnM,4185 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=TBgRAfT-Oxy6efhO1Tziq54NND3Jy4dRmwkMmQXSvhI,5386 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-38.pyc,, +anyio/streams/__pycache__/buffered.cpython-38.pyc,, +anyio/streams/__pycache__/file.cpython-38.pyc,, +anyio/streams/__pycache__/memory.cpython-38.pyc,, +anyio/streams/__pycache__/stapled.cpython-38.pyc,, +anyio/streams/__pycache__/text.cpython-38.pyc,, +anyio/streams/__pycache__/tls.cpython-38.pyc,, +anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 +anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 +anyio/streams/memory.py,sha256=bqN9YwAPA6ZtdohOsq_YBpLFlRHR5k-W8y0pD_jznb8,9296 +anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 +anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 +anyio/streams/tls.py,sha256=ev-6yNOGcIkziIkcIfKj8VmLqQJW-iDBJttaKgKDsF4,12752 +anyio/to_process.py,sha256=lx_bt0CUJsS1eSlraw662OpCjRgGXowoyf1Q-i-kOxo,9535 +anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/site-packages/anyio-4.2.0.dist-info/WHEEL b/site-packages/anyio-4.2.0.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/site-packages/anyio-4.2.0.dist-info/entry_points.txt b/site-packages/anyio-4.2.0.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/site-packages/anyio-4.2.0.dist-info/top_level.txt b/site-packages/anyio-4.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/site-packages/anyio-4.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/site-packages/anyio/__init__.py b/site-packages/anyio/__init__.py index 29fb356..7bfe231 100644 --- a/site-packages/anyio/__init__.py +++ b/site-packages/anyio/__init__.py @@ -1,165 +1,72 @@ from __future__ import annotations -__all__ = ( - "maybe_async", - "maybe_async_cm", - "run", - "sleep", - "sleep_forever", - "sleep_until", - "current_time", - "get_all_backends", - "get_cancelled_exc_class", - "BrokenResourceError", - "BrokenWorkerProcess", - "BusyResourceError", - "ClosedResourceError", - "DelimiterNotFound", - "EndOfStream", - "ExceptionGroup", - "IncompleteRead", - "TypedAttributeLookupError", - "WouldBlock", - "AsyncFile", - "Path", - "open_file", - "wrap_file", - "aclose_forcefully", - "open_signal_receiver", - "connect_tcp", - "connect_unix", - "create_tcp_listener", - "create_unix_listener", - "create_udp_socket", - "create_connected_udp_socket", - "getaddrinfo", - "getnameinfo", - "wait_socket_readable", - "wait_socket_writable", - "create_memory_object_stream", - "run_process", - "open_process", - "create_lock", - "CapacityLimiter", - "CapacityLimiterStatistics", - "Condition", - "ConditionStatistics", - "Event", - "EventStatistics", - "Lock", - "LockStatistics", - "Semaphore", - "SemaphoreStatistics", - "create_condition", - "create_event", - "create_semaphore", - "create_capacity_limiter", - "open_cancel_scope", - "fail_after", - "move_on_after", - "current_effective_deadline", - "TASK_STATUS_IGNORED", - "CancelScope", - "create_task_group", - "TaskInfo", - "get_current_task", - "get_running_tasks", - "wait_all_tasks_blocked", - "run_sync_in_worker_thread", - "run_async_from_thread", - "run_sync_from_thread", - "current_default_worker_thread_limiter", - "create_blocking_portal", - "start_blocking_portal", - "typed_attribute", - "TypedAttributeSet", - "TypedAttributeProvider", -) - from typing import Any -from ._core._compat import maybe_async, maybe_async_cm -from ._core._eventloop import ( - current_time, - get_all_backends, - get_cancelled_exc_class, - run, - sleep, - sleep_forever, - sleep_until, -) -from ._core._exceptions import ( - BrokenResourceError, - BrokenWorkerProcess, - BusyResourceError, - ClosedResourceError, - DelimiterNotFound, - EndOfStream, - ExceptionGroup, - IncompleteRead, - TypedAttributeLookupError, - WouldBlock, -) -from ._core._fileio import AsyncFile, Path, open_file, wrap_file -from ._core._resources import aclose_forcefully -from ._core._signals import open_signal_receiver +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket from ._core._sockets import ( - connect_tcp, - connect_unix, - create_connected_udp_socket, - create_tcp_listener, - create_udp_socket, - create_unix_listener, - getaddrinfo, - getnameinfo, - wait_socket_readable, - wait_socket_writable, + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, ) -from ._core._streams import create_memory_object_stream -from ._core._subprocesses import open_process, run_process +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter from ._core._synchronization import ( - CapacityLimiter, - CapacityLimiterStatistics, - Condition, - ConditionStatistics, - Event, - EventStatistics, - Lock, - LockStatistics, - Semaphore, - SemaphoreStatistics, - create_capacity_limiter, - create_condition, - create_event, - create_lock, - create_semaphore, -) -from ._core._tasks import ( - TASK_STATUS_IGNORED, - CancelScope, - create_task_group, - current_effective_deadline, - fail_after, - move_on_after, - open_cancel_scope, -) -from ._core._testing import ( - TaskInfo, - get_current_task, - get_running_tasks, - wait_all_tasks_blocked, -) -from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute - -# Re-exported here, for backwards compatibility -# isort: off -from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread -from .from_thread import ( - create_blocking_portal, - run_async_from_thread, - run_sync_from_thread, - start_blocking_portal, + CapacityLimiterStatistics as CapacityLimiterStatistics, ) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute # Re-export imports so they look like they live directly in this package key: str diff --git a/site-packages/anyio/_backends/_asyncio.py b/site-packages/anyio/_backends/_asyncio.py index bfdb4ea..e884f56 100644 --- a/site-packages/anyio/_backends/_asyncio.py +++ b/site-packages/anyio/_backends/_asyncio.py @@ -6,23 +6,34 @@ import concurrent.futures import math import socket import sys +import threading +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] from collections import OrderedDict, deque +from collections.abc import AsyncIterator, Generator, Iterable from concurrent.futures import Future +from contextlib import suppress from contextvars import Context, copy_context from dataclasses import dataclass from functools import partial, wraps from inspect import ( CORO_RUNNING, CORO_SUSPENDED, - GEN_RUNNING, - GEN_SUSPENDED, getcoroutinestate, - getgeneratorstate, + iscoroutine, ) from io import IOBase from os import PathLike from queue import Queue +from signal import Signals from socket import AddressFamily, SocketKind from threading import Thread from types import TracebackType @@ -33,15 +44,13 @@ from typing import ( Awaitable, Callable, Collection, + ContextManager, Coroutine, - Generator, - Iterable, Mapping, Optional, Sequence, Tuple, TypeVar, - Union, cast, ) from weakref import WeakKeyDictionary @@ -49,7 +58,6 @@ from weakref import WeakKeyDictionary import sniffio from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc -from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable from .._core._eventloop import claim_worker_thread, threadlocals from .._core._exceptions import ( BrokenResourceError, @@ -58,40 +66,220 @@ from .._core._exceptions import ( EndOfStream, WouldBlock, ) -from .._core._exceptions import ExceptionGroup as BaseExceptionGroup -from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter from .._core._synchronization import Event as BaseEvent from .._core._synchronization import ResourceGuard from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -if sys.version_info >= (3, 8): - - def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]: - return task.get_coro() - +if sys.version_info >= (3, 10): + from typing import ParamSpec else: + from typing_extensions import ParamSpec - def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]: - return task._coro +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack -from asyncio import all_tasks, create_task, current_task, get_running_loop -from asyncio import run as native_run + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False -def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: - return [cb for cb, context in task._callbacks] + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() T_Retval = TypeVar("T_Retval") T_contra = TypeVar("T_contra", contravariant=True) - -# Check whether there is native support for task names in asyncio (3.8+) -_native_task_names = hasattr(asyncio.Task, "get_name") - +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") _root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") @@ -104,7 +292,8 @@ def find_root_task() -> asyncio.Task: # Look for a task that has been started via run_until_complete() for task in all_tasks(): if task._callbacks and not task.done(): - for cb in _get_task_callbacks(task): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: if ( cb is _run_until_complete_cb or getattr(cb, "__module__", None) == "uvloop.loop" @@ -136,87 +325,22 @@ def get_callable_name(func: Callable) -> str: # Event loop # -_run_vars = ( - WeakKeyDictionary() -) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] - -current_token = get_running_loop +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() def _task_started(task: asyncio.Task) -> bool: """Return ``True`` if the task has been started and has not finished.""" - coro = cast(Coroutine[Any, Any, Any], get_coro(task)) try: - return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + return getcoroutinestate(task.get_coro()) in (CORO_RUNNING, CORO_SUSPENDED) except AttributeError: - try: - return getgeneratorstate(cast(Generator, coro)) in ( - GEN_RUNNING, - GEN_SUSPENDED, - ) - except AttributeError: - # task coro is async_genenerator_asend https://bugs.python.org/issue37771 - raise Exception(f"Cannot determine if task {task} has started or not") - - -def _maybe_set_event_loop_policy( - policy: asyncio.AbstractEventLoopPolicy | None, use_uvloop: bool -) -> None: - # On CPython, use uvloop when possible if no other policy has been given and if not - # explicitly disabled - if policy is None and use_uvloop and sys.implementation.name == "cpython": - try: - import uvloop - except ImportError: - pass - else: - # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) - if not hasattr( - asyncio.AbstractEventLoop, "shutdown_default_executor" - ) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"): - policy = uvloop.EventLoopPolicy() - - if policy is not None: - asyncio.set_event_loop_policy(policy) - - -def run( - func: Callable[..., Awaitable[T_Retval]], - *args: object, - debug: bool = False, - use_uvloop: bool = False, - policy: asyncio.AbstractEventLoopPolicy | None = None, -) -> T_Retval: - @wraps(func) - async def wrapper() -> T_Retval: - task = cast(asyncio.Task, current_task()) - task_state = TaskState(None, get_callable_name(func), None) - _task_states[task] = task_state - if _native_task_names: - task.set_name(task_state.name) - - try: - return await func(*args) - finally: - del _task_states[task] - - _maybe_set_event_loop_policy(policy, use_uvloop) - return native_run(wrapper(), debug=debug) - - -# -# Miscellaneous -# - -sleep = asyncio.sleep + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None # # Timeouts and cancellation # -CancelledError = asyncio.CancelledError - class CancelScope(BaseCancelScope): def __new__( @@ -228,14 +352,16 @@ class CancelScope(BaseCancelScope): self._deadline = deadline self._shield = shield self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() self._cancel_called = False + self._cancelled_caught = False self._active = False self._timeout_handle: asyncio.TimerHandle | None = None self._cancel_handle: asyncio.Handle | None = None self._tasks: set[asyncio.Task] = set() self._host_task: asyncio.Task | None = None - self._timeout_expired = False self._cancel_calls: int = 0 + self._cancelling: int | None = None def __enter__(self) -> CancelScope: if self._active: @@ -248,19 +374,23 @@ class CancelScope(BaseCancelScope): try: task_state = _task_states[host_task] except KeyError: - task_name = host_task.get_name() if _native_task_names else None - task_state = TaskState(None, task_name, self) + task_state = TaskState(None, self) _task_states[host_task] = task_state else: self._parent_scope = task_state.cancel_scope task_state.cancel_scope = self + if self._parent_scope is not None: + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.remove(host_task) self._timeout() self._active = True + if sys.version_info >= (3, 11): + self._cancelling = self._host_task.cancelling() # Start cancelling the host task if the scope was cancelled before entering if self._cancel_called: - self._deliver_cancellation() + self._deliver_cancellation(self) return self @@ -292,56 +422,60 @@ class CancelScope(BaseCancelScope): self._timeout_handle = None self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) host_task_state.cancel_scope = self._parent_scope - # Restart the cancellation effort in the farthest directly cancelled parent scope if this - # one was shielded - if self._shield: - self._deliver_cancellation_to_parent() + # Restart the cancellation effort in the closest directly cancelled parent + # scope if this one was shielded + self._restart_cancellation_in_parent() - if exc_val is not None: - exceptions = ( - exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] - ) - if all(isinstance(exc, CancelledError) for exc in exceptions): - if self._timeout_expired: - return self._uncancel() - elif not self._cancel_called: - # Task was cancelled natively - return None - elif not self._parent_cancelled(): - # This scope was directly cancelled - return self._uncancel() + if self._cancel_called and exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError): + self._cancelled_caught = self._uncancel(exc) + if self._cancelled_caught: + break + + return self._cancelled_caught return None - def _uncancel(self) -> bool: - if sys.version_info < (3, 11) or self._host_task is None: + def _uncancel(self, cancelled_exc: CancelledError) -> bool: + if sys.version_info < (3, 9) or self._host_task is None: self._cancel_calls = 0 return True - # Uncancel all AnyIO cancellations - for i in range(self._cancel_calls): - self._host_task.uncancel() + # Undo all cancellations done by this scope + if self._cancelling is not None: + while self._cancel_calls: + self._cancel_calls -= 1 + if self._host_task.uncancel() <= self._cancelling: + return True self._cancel_calls = 0 - return not self._host_task.cancelling() + return f"Cancelled by cancel scope {id(self):x}" in cancelled_exc.args def _timeout(self) -> None: if self._deadline != math.inf: loop = get_running_loop() if loop.time() >= self._deadline: - self._timeout_expired = True self.cancel() else: self._timeout_handle = loop.call_at(self._deadline, self._timeout) - def _deliver_cancellation(self) -> None: + def _deliver_cancellation(self, origin: CancelScope) -> bool: """ Deliver cancellation to directly contained tasks and nested cancel scopes. - Schedule another run at the end if we still have tasks eligible for cancellation. + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + """ should_retry = False current = current_task() @@ -349,37 +483,46 @@ class CancelScope(BaseCancelScope): if task._must_cancel: # type: ignore[attr-defined] continue - # The task is eligible for cancellation if it has started and is not in a cancel - # scope shielded from this one - cancel_scope = _task_states[task].cancel_scope - while cancel_scope is not self: - if cancel_scope is None or cancel_scope._shield: - break - else: - cancel_scope = cancel_scope._parent_scope - else: - should_retry = True - if task is not current and ( - task is self._host_task or _task_started(task) - ): + # The task is eligible for cancellation if it has started + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): self._cancel_calls += 1 - task.cancel() + if sys.version_info >= (3, 9): + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + else: + task.cancel() + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry # Schedule another callback if there are still tasks left - if should_retry: - self._cancel_handle = get_running_loop().call_soon( - self._deliver_cancellation - ) - else: - self._cancel_handle = None + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None - def _deliver_cancellation_to_parent(self) -> None: - """Start cancellation effort in the farthest directly cancelled parent scope""" + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ scope = self._parent_scope - scope_to_cancel: CancelScope | None = None while scope is not None: - if scope._cancel_called and scope._cancel_handle is None: - scope_to_cancel = scope + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break # No point in looking beyond any shielded scope if scope._shield: @@ -387,9 +530,6 @@ class CancelScope(BaseCancelScope): scope = scope._parent_scope - if scope_to_cancel is not None: - scope_to_cancel._deliver_cancellation() - def _parent_cancelled(self) -> bool: # Check whether any parent has been cancelled cancel_scope = self._parent_scope @@ -401,7 +541,7 @@ class CancelScope(BaseCancelScope): return False - def cancel(self) -> DeprecatedAwaitable: + def cancel(self) -> None: if not self._cancel_called: if self._timeout_handle: self._timeout_handle.cancel() @@ -409,9 +549,7 @@ class CancelScope(BaseCancelScope): self._cancel_called = True if self._host_task is not None: - self._deliver_cancellation() - - return DeprecatedAwaitable(self.cancel) + self._deliver_cancellation(self) @property def deadline(self) -> float: @@ -431,6 +569,10 @@ class CancelScope(BaseCancelScope): def cancel_called(self) -> bool: return self._cancel_called + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + @property def shield(self) -> bool: return self._shield @@ -440,59 +582,7 @@ class CancelScope(BaseCancelScope): if self._shield != value: self._shield = value if not value: - self._deliver_cancellation_to_parent() - - -async def checkpoint() -> None: - await sleep(0) - - -async def checkpoint_if_cancelled() -> None: - task = current_task() - if task is None: - return - - try: - cancel_scope = _task_states[task].cancel_scope - except KeyError: - return - - while cancel_scope: - if cancel_scope.cancel_called: - await sleep(0) - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - -async def cancel_shielded_checkpoint() -> None: - with CancelScope(shield=True): - await sleep(0) - - -def current_effective_deadline() -> float: - try: - cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] - except KeyError: - return math.inf - - deadline = math.inf - while cancel_scope: - deadline = min(deadline, cancel_scope.deadline) - if cancel_scope._cancel_called: - deadline = -math.inf - break - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - return deadline - - -def current_time() -> float: - return get_running_loop().time() + self._restart_cancellation_in_parent() # @@ -502,20 +592,14 @@ def current_time() -> float: class TaskState: """ - Encapsulates auxiliary task information that cannot be added to the Task instance itself - because there are no guarantees about its implementation. + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. """ - __slots__ = "parent_id", "name", "cancel_scope" + __slots__ = "parent_id", "cancel_scope" - def __init__( - self, - parent_id: int | None, - name: str | None, - cancel_scope: CancelScope | None, - ): + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): self.parent_id = parent_id - self.name = name self.cancel_scope = cancel_scope @@ -527,12 +611,6 @@ _task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, Task # -class ExceptionGroup(BaseExceptionGroup): - def __init__(self, exceptions: list[BaseException]): - super().__init__() - self.exceptions = exceptions - - class _AsyncioTaskStatus(abc.TaskStatus): def __init__(self, future: asyncio.Future, parent_id: int): self._future = future @@ -550,11 +628,22 @@ class _AsyncioTaskStatus(abc.TaskStatus): _task_states[task].parent_id = self._parent_id +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + class TaskGroup(abc.TaskGroup): def __init__(self) -> None: self.cancel_scope: CancelScope = CancelScope() self._active = False self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() async def __aenter__(self) -> TaskGroup: self.cancel_scope.__enter__() @@ -570,98 +659,49 @@ class TaskGroup(abc.TaskGroup): ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) if exc_val is not None: self.cancel_scope.cancel() - self._exceptions.append(exc_val) + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) - while self.cancel_scope._tasks: + cancelled_exc_while_waiting_tasks: CancelledError | None = None + while self._tasks: try: - await asyncio.wait(self.cancel_scope._tasks) - except asyncio.CancelledError: + await asyncio.wait(self._tasks) + except CancelledError as exc: + # This task was cancelled natively; reraise the CancelledError later + # unless this task was already interrupted by another exception self.cancel_scope.cancel() + if cancelled_exc_while_waiting_tasks is None: + cancelled_exc_while_waiting_tasks = exc self._active = False - if not self.cancel_scope._parent_cancelled(): - exceptions = self._filter_cancellation_errors(self._exceptions) - else: - exceptions = self._exceptions + if self._exceptions: + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) - try: - if len(exceptions) > 1: - if all( - isinstance(e, CancelledError) and not e.args for e in exceptions - ): - # Tasks were cancelled natively, without a cancellation message - raise CancelledError - else: - raise ExceptionGroup(exceptions) - elif exceptions and exceptions[0] is not exc_val: - raise exceptions[0] - except BaseException as exc: - # Clear the context here, as it can only be done in-flight. - # If the context is not cleared, it can result in recursive tracebacks (see #145). - exc.__context__ = None - raise + # Raise the CancelledError received while waiting for child tasks to exit, + # unless the context manager itself was previously exited with another + # exception, or if any of the child tasks raised an exception other than + # CancelledError + if cancelled_exc_while_waiting_tasks: + if exc_val is None or ignore_exception: + raise cancelled_exc_while_waiting_tasks return ignore_exception - @staticmethod - def _filter_cancellation_errors( - exceptions: Sequence[BaseException], - ) -> list[BaseException]: - filtered_exceptions: list[BaseException] = [] - for exc in exceptions: - if isinstance(exc, ExceptionGroup): - new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) - if len(new_exceptions) > 1: - filtered_exceptions.append(exc) - elif len(new_exceptions) == 1: - filtered_exceptions.append(new_exceptions[0]) - elif new_exceptions: - new_exc = ExceptionGroup(new_exceptions) - new_exc.__cause__ = exc.__cause__ - new_exc.__context__ = exc.__context__ - new_exc.__traceback__ = exc.__traceback__ - filtered_exceptions.append(new_exc) - elif not isinstance(exc, CancelledError) or exc.args: - filtered_exceptions.append(exc) - - return filtered_exceptions - - async def _run_wrapped_task( - self, coro: Coroutine, task_status_future: asyncio.Future | None - ) -> None: - # This is the code path for Python 3.7 on which asyncio freaks out if a task - # raises a BaseException. - __traceback_hide__ = __tracebackhide__ = True # noqa: F841 - task = cast(asyncio.Task, current_task()) - try: - await coro - except BaseException as exc: - if task_status_future is None or task_status_future.done(): - self._exceptions.append(exc) - self.cancel_scope.cancel() - else: - task_status_future.set_exception(exc) - else: - if task_status_future is not None and not task_status_future.done(): - task_status_future.set_exception( - RuntimeError("Child exited without calling task_status.started()") - ) - finally: - if task in self.cancel_scope._tasks: - self.cancel_scope._tasks.remove(task) - del _task_states[task] - def _spawn( self, - func: Callable[..., Awaitable[Any]], - args: tuple, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], name: object, task_status_future: asyncio.Future | None = None, ) -> asyncio.Task: def task_done(_task: asyncio.Task) -> None: - # This is the code path for Python 3.8+ - assert _task in self.cancel_scope._tasks - self.cancel_scope._tasks.remove(_task) + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) del _task_states[_task] try: @@ -674,8 +714,11 @@ class TaskGroup(abc.TaskGroup): if exc is not None: if task_status_future is None or task_status_future.done(): - self._exceptions.append(exc) - self.cancel_scope.cancel() + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._parent_cancelled(): + self.cancel_scope.cancel() else: task_status_future.set_exception(exc) elif task_status_future is not None and not task_status_future.done(): @@ -688,11 +731,6 @@ class TaskGroup(abc.TaskGroup): "This task group is not active; no new tasks can be started." ) - options: dict[str, Any] = {} - name = get_callable_name(func) if name is None else str(name) - if _native_task_names: - options["name"] = name - kwargs = {} if task_status_future: parent_id = id(current_task()) @@ -703,46 +741,52 @@ class TaskGroup(abc.TaskGroup): parent_id = id(self.cancel_scope._host_task) coro = func(*args, **kwargs) - if not asyncio.iscoroutine(coro): + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" raise TypeError( - f"Expected an async function, but {func} appears to be synchronous" + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" ) - foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame") - if foreign_coro or sys.version_info < (3, 8): - coro = self._run_wrapped_task(coro, task_status_future) - - task = create_task(coro, **options) - if not foreign_coro and sys.version_info >= (3, 8): - task.add_done_callback(task_done) + name = get_callable_name(func) if name is None else str(name) + task = create_task(coro, name=name) + task.add_done_callback(task_done) # Make the spawned task inherit the task group's cancel scope _task_states[task] = TaskState( - parent_id=parent_id, name=name, cancel_scope=self.cancel_scope + parent_id=parent_id, cancel_scope=self.cancel_scope ) self.cancel_scope._tasks.add(task) + self._tasks.add(task) return task def start_soon( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, ) -> None: self._spawn(func, args, name) async def start( self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> None: + ) -> Any: future: asyncio.Future = asyncio.Future() task = self._spawn(func, args, name, future) - # If the task raises an exception after sending a start value without a switch point - # between, the task group is cancelled and this method never proceeds to process the - # completed future. That's why we have to have a shielded cancel scope here. - with CancelScope(shield=True): - try: - return await future - except CancelledError: - task.cancel() - raise + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise # @@ -767,15 +811,15 @@ class WorkerThread(Thread): self.idle_workers = idle_workers self.loop = root_task._loop self.queue: Queue[ - tuple[Context, Callable, tuple, asyncio.Future] | None + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None ] = Queue(2) - self.idle_since = current_time() + self.idle_since = AsyncIOBackend.current_time() self.stopping = False def _report_result( self, future: asyncio.Future, result: Any, exc: BaseException | None ) -> None: - self.idle_since = current_time() + self.idle_since = AsyncIOBackend.current_time() if not self.stopping: self.idle_workers.append(self) @@ -791,22 +835,24 @@ class WorkerThread(Thread): future.set_result(result) def run(self) -> None: - with claim_worker_thread("asyncio"): - threadlocals.loop = self.loop + with claim_worker_thread(AsyncIOBackend, self.loop): while True: item = self.queue.get() if item is None: # Shutdown command received return - context, func, args, future = item + context, func, args, future, cancel_scope = item if not future.cancelled(): result = None exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope try: result = context.run(func, *args) except BaseException as exc: exception = exc + finally: + del threadlocals.current_cancel_scope if not self.loop.is_closed(): self.loop.call_soon_threadsafe( @@ -831,81 +877,6 @@ _threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( _threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - await checkpoint() - - # If this is the first run in this event loop thread, set up the necessary variables - try: - idle_workers = _threadpool_idle_workers.get() - workers = _threadpool_workers.get() - except LookupError: - idle_workers = deque() - workers = set() - _threadpool_idle_workers.set(idle_workers) - _threadpool_workers.set(workers) - - async with (limiter or current_default_thread_limiter()): - with CancelScope(shield=not cancellable): - future: asyncio.Future = asyncio.Future() - root_task = find_root_task() - if not idle_workers: - worker = WorkerThread(root_task, workers, idle_workers) - worker.start() - workers.add(worker) - root_task.add_done_callback(worker.stop) - else: - worker = idle_workers.pop() - - # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer - now = current_time() - while idle_workers: - if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: - break - - expired_worker = idle_workers.popleft() - expired_worker.root_task.remove_done_callback(expired_worker.stop) - expired_worker.stop() - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - worker.queue.put_nowait((context, func, args, future)) - return await future - - -def run_sync_from_thread( - func: Callable[..., T_Retval], - *args: object, - loop: asyncio.AbstractEventLoop | None = None, -) -> T_Retval: - @wraps(func) - def wrapper() -> None: - try: - f.set_result(func(*args)) - except BaseException as exc: - f.set_exception(exc) - if not isinstance(exc, Exception): - raise - - f: concurrent.futures.Future[T_Retval] = Future() - loop = loop or threadlocals.loop - loop.call_soon_threadsafe(wrapper) - return f.result() - - -def run_async_from_thread( - func: Callable[..., Awaitable[T_Retval]], *args: object -) -> T_Retval: - f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( - func(*args), threadlocals.loop - ) - return f.result() - - class BlockingPortal(abc.BlockingPortal): def __new__(cls) -> BlockingPortal: return object.__new__(cls) @@ -916,20 +887,16 @@ class BlockingPortal(abc.BlockingPortal): def _spawn_task_from_thread( self, - func: Callable, - args: tuple, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], kwargs: dict[str, Any], name: object, - future: Future, + future: Future[T_Retval], ) -> None: - run_sync_from_thread( + AsyncIOBackend.run_sync_from_thread( partial(self._task_group.start_soon, name=name), - self._call_func, - func, - args, - kwargs, - future, - loop=self._loop, + (self._call_func, func, args, kwargs, future), + self._loop, ) @@ -1015,55 +982,17 @@ class Process(abc.Process): return self._stderr -async def open_process( - command: str | bytes | Sequence[str | bytes], - *, - shell: bool, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - cwd: str | bytes | PathLike | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> Process: - await checkpoint() - if shell: - process = await asyncio.create_subprocess_shell( - cast(Union[str, bytes], command), - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - else: - process = await asyncio.create_subprocess_exec( - *command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - - stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None - stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None - stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - def _forcibly_shutdown_process_pool_on_exit( workers: set[Process], _task: object ) -> None: """ Forcibly shuts down worker processes belonging to this event loop.""" - child_watcher: asyncio.AbstractChildWatcher | None - try: - child_watcher = asyncio.get_event_loop_policy().get_child_watcher() - except NotImplementedError: - child_watcher = None + child_watcher: asyncio.AbstractChildWatcher | None = None + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass # Close as much as possible (w/o async/await) to avoid warnings for process in workers: @@ -1078,14 +1007,15 @@ def _forcibly_shutdown_process_pool_on_exit( child_watcher.remove_child_handler(process.pid) -async def _shutdown_process_pool_on_exit(workers: set[Process]) -> None: +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: """ Shuts down worker processes belonging to this event loop. - NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). """ - process: Process + process: abc.Process try: await sleep(math.inf) except asyncio.CancelledError: @@ -1097,16 +1027,6 @@ async def _shutdown_process_pool_on_exit(workers: set[Process]) -> None: await process.aclose() -def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None: - kwargs: dict[str, Any] = ( - {"name": "AnyIO process pool shutdown task"} if _native_task_names else {} - ) - create_task(_shutdown_process_pool_on_exit(workers), **kwargs) - find_root_task().add_done_callback( - partial(_forcibly_shutdown_process_pool_on_exit, workers) - ) - - # # Sockets and networking # @@ -1193,7 +1113,7 @@ class SocketStream(abc.SocketStream): async def receive(self, max_bytes: int = 65536) -> bytes: with self._receive_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() if ( not self._protocol.read_event.is_set() @@ -1209,7 +1129,7 @@ class SocketStream(abc.SocketStream): if self._closed: raise ClosedResourceError from None elif self._protocol.exception: - raise self._protocol.exception + raise self._protocol.exception from None else: raise EndOfStream from None @@ -1218,8 +1138,8 @@ class SocketStream(abc.SocketStream): chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] self._protocol.read_queue.appendleft(leftover) - # If the read queue is empty, clear the flag so that the next call will block until - # data is available + # If the read queue is empty, clear the flag so that the next call will + # block until data is available if not self._protocol.read_queue: self._protocol.read_event.clear() @@ -1227,7 +1147,7 @@ class SocketStream(abc.SocketStream): async def send(self, item: bytes) -> None: with self._send_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() if self._closed: raise ClosedResourceError @@ -1263,14 +1183,13 @@ class SocketStream(abc.SocketStream): self._transport.abort() -class UNIXSocketStream(abc.SocketStream): +class _RawSocketMixin: _receive_future: asyncio.Future | None = None _send_future: asyncio.Future | None = None _closing = False def __init__(self, raw_socket: socket.socket): self.__raw_socket = raw_socket - self._loop = get_running_loop() self._receive_guard = ResourceGuard("reading from") self._send_guard = ResourceGuard("writing to") @@ -1284,7 +1203,7 @@ class UNIXSocketStream(abc.SocketStream): loop.remove_reader(self.__raw_socket) f = self._receive_future = asyncio.Future() - self._loop.add_reader(self.__raw_socket, f.set_result, None) + loop.add_reader(self.__raw_socket, f.set_result, None) f.add_done_callback(callback) return f @@ -1294,21 +1213,34 @@ class UNIXSocketStream(abc.SocketStream): loop.remove_writer(self.__raw_socket) f = self._send_future = asyncio.Future() - self._loop.add_writer(self.__raw_socket, f.set_result, None) + loop.add_writer(self.__raw_socket, f.set_result, None) f.add_done_callback(callback) return f + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): async def send_eof(self) -> None: with self._send_guard: self._raw_socket.shutdown(socket.SHUT_WR) async def receive(self, max_bytes: int = 65536) -> bytes: loop = get_running_loop() - await checkpoint() + await AsyncIOBackend.checkpoint() with self._receive_guard: while True: try: - data = self.__raw_socket.recv(max_bytes) + data = self._raw_socket.recv(max_bytes) except BlockingIOError: await self._wait_until_readable(loop) except OSError as exc: @@ -1324,12 +1256,12 @@ class UNIXSocketStream(abc.SocketStream): async def send(self, item: bytes) -> None: loop = get_running_loop() - await checkpoint() + await AsyncIOBackend.checkpoint() with self._send_guard: view = memoryview(item) while view: try: - bytes_sent = self.__raw_socket.send(view) + bytes_sent = self._raw_socket.send(view) except BlockingIOError: await self._wait_until_writable(loop) except OSError as exc: @@ -1348,11 +1280,11 @@ class UNIXSocketStream(abc.SocketStream): loop = get_running_loop() fds = array.array("i") - await checkpoint() + await AsyncIOBackend.checkpoint() with self._receive_guard: while True: try: - message, ancdata, flags, addr = self.__raw_socket.recvmsg( + message, ancdata, flags, addr = self._raw_socket.recvmsg( msglen, socket.CMSG_LEN(maxfds * fds.itemsize) ) except BlockingIOError: @@ -1394,13 +1326,13 @@ class UNIXSocketStream(abc.SocketStream): filenos.append(fd.fileno()) fdarray = array.array("i", filenos) - await checkpoint() + await AsyncIOBackend.checkpoint() with self._send_guard: while True: try: # The ignore can be removed after mypy picks up # https://github.com/python/typeshed/pull/5545 - self.__raw_socket.sendmsg( + self._raw_socket.sendmsg( [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] ) break @@ -1412,17 +1344,6 @@ class UNIXSocketStream(abc.SocketStream): else: raise BrokenResourceError from exc - async def aclose(self) -> None: - if not self._closing: - self._closing = True - if self.__raw_socket.fileno() != -1: - self.__raw_socket.close() - - if self._receive_future: - self._receive_future.set_result(None) - if self._send_future: - self._send_future.set_result(None) - class TCPSocketListener(abc.SocketListener): _accept_scope: CancelScope | None = None @@ -1442,7 +1363,7 @@ class TCPSocketListener(abc.SocketListener): raise ClosedResourceError with self._accept_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() with CancelScope() as self._accept_scope: try: client_sock, _addr = await self._loop.sock_accept(self._raw_socket) @@ -1492,7 +1413,7 @@ class UNIXSocketListener(abc.SocketListener): self._closed = False async def accept(self) -> abc.SocketStream: - await checkpoint() + await AsyncIOBackend.checkpoint() with self._accept_guard: while True: try: @@ -1542,7 +1463,7 @@ class UDPSocket(abc.UDPSocket): async def receive(self) -> tuple[bytes, IPSockAddrType]: with self._receive_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() # If the buffer is empty, ask for more data if not self._protocol.read_queue and not self._transport.is_closing(): @@ -1559,7 +1480,7 @@ class UDPSocket(abc.UDPSocket): async def send(self, item: UDPPacketType) -> None: with self._send_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() await self._protocol.write_event.wait() if self._closed: raise ClosedResourceError @@ -1590,7 +1511,7 @@ class ConnectedUDPSocket(abc.ConnectedUDPSocket): async def receive(self) -> bytes: with self._receive_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() # If the buffer is empty, ask for more data if not self._protocol.read_queue and not self._transport.is_closing(): @@ -1609,7 +1530,7 @@ class ConnectedUDPSocket(abc.ConnectedUDPSocket): async def send(self, item: bytes) -> None: with self._send_guard: - await checkpoint() + await AsyncIOBackend.checkpoint() await self._protocol.write_event.wait() if self._closed: raise ClosedResourceError @@ -1619,142 +1540,82 @@ class ConnectedUDPSocket(abc.ConnectedUDPSocket): self._transport.sendto(item) -async def connect_tcp( - host: str, port: int, local_addr: tuple[str, int] | None = None -) -> SocketStream: - transport, protocol = cast( - Tuple[asyncio.Transport, StreamProtocol], - await get_running_loop().create_connection( - StreamProtocol, host, port, local_addr=local_addr - ), - ) - transport.pause_reading() - return SocketStream(transport, protocol) +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return -async def connect_unix(path: str) -> UNIXSocketStream: - await checkpoint() - loop = get_running_loop() - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) - while True: - try: - raw_socket.connect(path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return UNIXSocketStream(raw_socket) +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data - -async def create_udp_socket( - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, -) -> UDPSocket | ConnectedUDPSocket: - result = await get_running_loop().create_datagram_endpoint( - DatagramProtocol, - local_addr=local_address, - remote_addr=remote_address, - family=family, - reuse_port=reuse_port, - ) - transport = result[0] - protocol = result[1] - if protocol.exception: - transport.close() - raise protocol.exception - - if not remote_address: - return UDPSocket(transport, protocol) - else: - return ConnectedUDPSocket(transport, protocol) - - -async def getaddrinfo( - host: bytes | str, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, -) -> GetAddrInfoReturnType: - # https://github.com/python/typeshed/pull/4304 - result = await get_running_loop().getaddrinfo( - host, port, family=family, type=type, proto=proto, flags=flags - ) - return cast(GetAddrInfoReturnType, result) - - -async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> tuple[str, str]: - return await get_running_loop().getnameinfo(sockaddr, flags) + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return _read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events") _write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events") -async def wait_socket_readable(sock: socket.socket) -> None: - await checkpoint() - try: - read_events = _read_events.get() - except LookupError: - read_events = {} - _read_events.set(read_events) - - if read_events.get(sock): - raise BusyResourceError("reading from") from None - - loop = get_running_loop() - event = read_events[sock] = asyncio.Event() - loop.add_reader(sock, event.set) - try: - await event.wait() - finally: - if read_events.pop(sock, None) is not None: - loop.remove_reader(sock) - readable = True - else: - readable = False - - if not readable: - raise ClosedResourceError - - -async def wait_socket_writable(sock: socket.socket) -> None: - await checkpoint() - try: - write_events = _write_events.get() - except LookupError: - write_events = {} - _write_events.set(write_events) - - if write_events.get(sock): - raise BusyResourceError("writing to") from None - - loop = get_running_loop() - event = write_events[sock] = asyncio.Event() - loop.add_writer(sock.fileno(), event.set) - try: - await event.wait() - finally: - if write_events.pop(sock, None) is not None: - loop.remove_writer(sock) - writable = True - else: - writable = False - - if not writable: - raise ClosedResourceError - - # # Synchronization # @@ -1767,16 +1628,17 @@ class Event(BaseEvent): def __init__(self) -> None: self._event = asyncio.Event() - def set(self) -> DeprecatedAwaitable: + def set(self) -> None: self._event.set() - return DeprecatedAwaitable(self.set) def is_set(self) -> bool: return self._event.is_set() async def wait(self) -> None: - if await self._event.wait(): - await checkpoint() + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() def statistics(self) -> EventStatistics: return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] @@ -1815,19 +1677,14 @@ class CapacityLimiter(BaseCapacityLimiter): if value < 1: raise ValueError("total_tokens must be >= 1") - old_value = self._total_tokens + waiters_to_notify = max(value - self._total_tokens, 0) self._total_tokens = value - events = [] - for event in self._wait_queue.values(): - if value <= old_value: - break - if not event.is_set(): - events.append(event) - old_value += 1 - - for event in events: + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] event.set() + waiters_to_notify -= 1 @property def borrowed_tokens(self) -> int: @@ -1837,11 +1694,10 @@ class CapacityLimiter(BaseCapacityLimiter): def available_tokens(self) -> float: return self._total_tokens - len(self._borrowers) - def acquire_nowait(self) -> DeprecatedAwaitable: + def acquire_nowait(self) -> None: self.acquire_on_behalf_of_nowait(current_task()) - return DeprecatedAwaitable(self.acquire_nowait) - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: if borrower in self._borrowers: raise RuntimeError( "this borrower is already holding one of this CapacityLimiter's " @@ -1852,13 +1708,12 @@ class CapacityLimiter(BaseCapacityLimiter): raise WouldBlock self._borrowers.add(borrower) - return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) async def acquire(self) -> None: return await self.acquire_on_behalf_of(current_task()) async def acquire_on_behalf_of(self, borrower: object) -> None: - await checkpoint_if_cancelled() + await AsyncIOBackend.checkpoint_if_cancelled() try: self.acquire_on_behalf_of_nowait(borrower) except WouldBlock: @@ -1873,7 +1728,7 @@ class CapacityLimiter(BaseCapacityLimiter): self._borrowers.add(borrower) else: try: - await cancel_shielded_checkpoint() + await AsyncIOBackend.cancel_shielded_checkpoint() except BaseException: self.release() raise @@ -1906,29 +1761,20 @@ class CapacityLimiter(BaseCapacityLimiter): _default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") -def current_default_thread_limiter() -> CapacityLimiter: - try: - return _default_thread_limiter.get() - except LookupError: - limiter = CapacityLimiter(40) - _default_thread_limiter.set(limiter) - return limiter - - # # Operating system signals # -class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): - def __init__(self, signals: tuple[int, ...]): +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): self._signals = signals self._loop = get_running_loop() - self._signal_queue: deque[int] = deque() + self._signal_queue: deque[Signals] = deque() self._future: asyncio.Future = asyncio.Future() - self._handled_signals: set[int] = set() + self._handled_signals: set[Signals] = set() - def _deliver(self, signum: int) -> None: + def _deliver(self, signum: Signals) -> None: self._signal_queue.append(signum) if not self._future.done(): self._future.set_result(None) @@ -1953,8 +1799,8 @@ class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): def __aiter__(self) -> _SignalReceiver: return self - async def __anext__(self) -> int: - await checkpoint() + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() if not self._signal_queue: self._future = asyncio.Future() await self._future @@ -1962,10 +1808,6 @@ class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): return self._signal_queue.popleft() -def open_signal_receiver(*signals: int) -> _SignalReceiver: - return _SignalReceiver(signals) - - # # Testing and debugging # @@ -1974,69 +1816,47 @@ def open_signal_receiver(*signals: int) -> _SignalReceiver: def _create_task_info(task: asyncio.Task) -> TaskInfo: task_state = _task_states.get(task) if task_state is None: - name = task.get_name() if _native_task_names else None parent_id = None else: - name = task_state.name parent_id = task_state.parent_id - return TaskInfo(id(task), parent_id, name, get_coro(task)) - - -def get_current_task() -> TaskInfo: - return _create_task_info(current_task()) # type: ignore[arg-type] - - -def get_running_tasks() -> list[TaskInfo]: - return [_create_task_info(task) for task in all_tasks() if not task.done()] - - -async def wait_all_tasks_blocked() -> None: - await checkpoint() - this_task = current_task() - while True: - for task in all_tasks(): - if task is this_task: - continue - - if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] - await sleep(0.1) - break - else: - return + return TaskInfo(id(task), parent_id, task.get_name(), task.get_coro()) class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + def __init__( self, - debug: bool = False, + *, + debug: bool | None = None, use_uvloop: bool = False, - policy: asyncio.AbstractEventLoopPolicy | None = None, - ): + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + import uvloop + + loop_factory = uvloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) self._exceptions: list[BaseException] = [] - _maybe_set_event_loop_policy(policy, use_uvloop) - self._loop = asyncio.new_event_loop() - self._loop.set_debug(debug) - self._loop.set_exception_handler(self._exception_handler) - asyncio.set_event_loop(self._loop) + self._runner_task: asyncio.Task | None = None - def _cancel_all_tasks(self) -> None: - to_cancel = all_tasks(self._loop) - if not to_cancel: - return + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self - for task in to_cancel: - task.cancel() + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) - self._loop.run_until_complete( - asyncio.gather(*to_cancel, return_exceptions=True) - ) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - raise cast(BaseException, task.exception()) + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() def _exception_handler( self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] @@ -2053,56 +1873,77 @@ class TestRunner(abc.TestRunner): if len(exceptions) == 1: raise exceptions[0] elif exceptions: - raise ExceptionGroup(exceptions) + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) - def close(self) -> None: - try: - self._cancel_all_tasks() - self._loop.run_until_complete(self._loop.shutdown_asyncgens()) - finally: - asyncio.set_event_loop(None) - self._loop.close() + @staticmethod + async def _run_tests_and_fixtures( + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + with receive_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + Tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future def run_asyncgen_fixture( self, fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], kwargs: dict[str, Any], ) -> Iterable[T_Retval]: - async def fixture_runner() -> None: - agen = fixture_func(**kwargs) - try: - retval = await agen.asend(None) - self._raise_async_exceptions() - except BaseException as exc: - f.set_exception(exc) - return - else: - f.set_result(retval) - - await event.wait() - try: - await agen.asend(None) - except StopAsyncIteration: - pass - else: - await agen.aclose() - raise RuntimeError("Async generator fixture did not stop") - - f = self._loop.create_future() - event = asyncio.Event() - fixture_task = self._loop.create_task(fixture_runner()) - self._loop.run_until_complete(f) - yield f.result() - event.set() - self._loop.run_until_complete(fixture_task) + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) self._raise_async_exceptions() + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + def run_fixture( self, fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], kwargs: dict[str, Any], ) -> T_Retval: - retval = self._loop.run_until_complete(fixture_func(**kwargs)) + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) self._raise_async_exceptions() return retval @@ -2110,8 +1951,518 @@ class TestRunner(abc.TestRunner): self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] ) -> None: try: - self._loop.run_until_complete(test_func(**kwargs)) + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) except Exception as exc: self._exceptions.append(exc) self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", False) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + import uvloop + + loop_factory = uvloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + try: + cancel_scope = _task_states[ + current_task() # type: ignore[index] + ].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper(scope: CancelScope) -> T_Retval: + __tracebackhide__ = True + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + scope._tasks.discard(task) + + loop = cast(AbstractEventLoop, token) + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "asyncio") + wrapper = task_wrapper(threadlocals.current_cancel_scope) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, wrapper, loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + sniffio.current_async_library_cvar.set("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = cast(AbstractEventLoop, token) + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + await cls.checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + cast("str | bytes", command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError("reading from") from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError("writing to") from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> list[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/site-packages/anyio/_backends/_trio.py b/site-packages/anyio/_backends/_trio.py index cf28943..a0d14c7 100644 --- a/site-packages/anyio/_backends/_trio.py +++ b/site-packages/anyio/_backends/_trio.py @@ -3,41 +3,48 @@ from __future__ import annotations import array import math import socket +import sys +import types +from collections.abc import AsyncIterator, Iterable from concurrent.futures import Future -from contextvars import copy_context from dataclasses import dataclass from functools import partial from io import IOBase from os import PathLike from signal import Signals +from socket import AddressFamily, SocketKind from types import TracebackType from typing import ( IO, - TYPE_CHECKING, Any, AsyncGenerator, - AsyncIterator, Awaitable, Callable, Collection, + ContextManager, Coroutine, Generic, - Iterable, Mapping, NoReturn, Sequence, TypeVar, cast, + overload, ) -import sniffio import trio.from_thread +import trio.lowlevel from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + wait_readable, + wait_writable, +) from trio.socket import SocketType as TrioSocketType from trio.to_thread import run_sync from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc -from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable from .._core._eventloop import claim_worker_thread from .._core._exceptions import ( BrokenResourceError, @@ -45,53 +52,41 @@ from .._core._exceptions import ( ClosedResourceError, EndOfStream, ) -from .._core._exceptions import ExceptionGroup as BaseExceptionGroup from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter from .._core._synchronization import Event as BaseEvent from .._core._synchronization import ResourceGuard from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend +from ..streams.memory import MemoryObjectSendStream -if TYPE_CHECKING: - from trio_typing import TaskStatus - -try: - from trio import lowlevel as trio_lowlevel -except ImportError: - from trio import hazmat as trio_lowlevel # type: ignore[no-redef] - from trio.hazmat import wait_readable, wait_writable +if sys.version_info >= (3, 10): + from typing import ParamSpec else: - from trio.lowlevel import wait_readable, wait_writable + from typing_extensions import ParamSpec -try: - trio_open_process = trio_lowlevel.open_process -except AttributeError: - # isort: off - from trio import ( # type: ignore[attr-defined, no-redef] - open_process as trio_open_process, - ) +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack +T = TypeVar("T") T_Retval = TypeVar("T_Retval") T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") # # Event loop # -run = trio.run -current_token = trio.lowlevel.current_trio_token RunVar = trio.lowlevel.RunVar -# -# Miscellaneous -# - -sleep = trio.sleep - - # # Timeouts and cancellation # @@ -117,13 +112,10 @@ class CancelScope(BaseCancelScope): exc_tb: TracebackType | None, ) -> bool | None: # https://github.com/python-trio/trio-typing/pull/79 - return self.__original.__exit__( # type: ignore[func-returns-value] - exc_type, exc_val, exc_tb - ) + return self.__original.__exit__(exc_type, exc_val, exc_tb) - def cancel(self) -> DeprecatedAwaitable: + def cancel(self) -> None: self.__original.cancel() - return DeprecatedAwaitable(self.cancel) @property def deadline(self) -> float: @@ -137,6 +129,10 @@ class CancelScope(BaseCancelScope): def cancel_called(self) -> bool: return self.__original.cancel_called + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + @property def shield(self) -> bool: return self.__original.shield @@ -146,27 +142,15 @@ class CancelScope(BaseCancelScope): self.__original.shield = value -CancelledError = trio.Cancelled -checkpoint = trio.lowlevel.checkpoint -checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled -cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint -current_effective_deadline = trio.current_effective_deadline -current_time = trio.current_time - - # # Task groups # -class ExceptionGroup(BaseExceptionGroup, trio.MultiError): - pass - - class TaskGroup(abc.TaskGroup): def __init__(self) -> None: self._active = False - self._nursery_manager = trio.open_nursery() + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) self.cancel_scope = None # type: ignore[assignment] async def __aenter__(self) -> TaskGroup: @@ -183,13 +167,21 @@ class TaskGroup(abc.TaskGroup): ) -> bool | None: try: return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) - except trio.MultiError as exc: - raise ExceptionGroup(exc.exceptions) from None + except BaseExceptionGroup as exc: + _, rest = exc.split(trio.Cancelled) + if not rest: + cancelled_exc = trio.Cancelled._create() + raise cancelled_exc from exc + + raise finally: self._active = False def start_soon( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, ) -> None: if not self._active: raise RuntimeError( @@ -200,7 +192,7 @@ class TaskGroup(abc.TaskGroup): async def start( self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> object: + ) -> Any: if not self._active: raise RuntimeError( "This task group is not active; no new tasks can be started." @@ -214,53 +206,6 @@ class TaskGroup(abc.TaskGroup): # -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: trio.CapacityLimiter | None = None, -) -> T_Retval: - def wrapper() -> T_Retval: - with claim_worker_thread("trio"): - return func(*args) - - # TODO: remove explicit context copying when trio 0.20 is the minimum requirement - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - return await run_sync( - context.run, wrapper, cancellable=cancellable, limiter=limiter - ) - - -# TODO: remove this workaround when trio 0.20 is the minimum requirement -def run_async_from_thread( - fn: Callable[..., Awaitable[T_Retval]], *args: Any -) -> T_Retval: - async def wrapper() -> T_Retval: - retval: T_Retval - - async def inner() -> None: - nonlocal retval - __tracebackhide__ = True - retval = await fn(*args) - - async with trio.open_nursery() as n: - context.run(n.start_soon, inner) - - __tracebackhide__ = True - return retval # noqa: F821 - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "trio") - return trio.from_thread.run(wrapper) - - -def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: - # TODO: remove explicit context copying when trio 0.20 is the minimum requirement - retval = trio.from_thread.run_sync(copy_context().run, fn, *args) - return cast(T_Retval, retval) - - class BlockingPortal(abc.BlockingPortal): def __new__(cls) -> BlockingPortal: return object.__new__(cls) @@ -271,16 +216,13 @@ class BlockingPortal(abc.BlockingPortal): def _spawn_task_from_thread( self, - func: Callable, - args: tuple, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], kwargs: dict[str, Any], name: object, - future: Future, + future: Future[T_Retval], ) -> None: - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "trio") trio.from_thread.run_sync( - context.run, partial(self._task_group.start_soon, name=name), self._call_func, func, @@ -383,47 +325,19 @@ class Process(abc.Process): return self._stderr -async def open_process( - command: str | bytes | Sequence[str | bytes], - *, - shell: bool, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - cwd: str | bytes | PathLike | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> Process: - process = await trio_open_process( # type: ignore[misc] - command, # type: ignore[arg-type] - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=shell, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None - stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None - stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - class _ProcessPoolShutdownInstrument(trio.abc.Instrument): def after_run(self) -> None: super().after_run() -current_default_worker_process_limiter: RunVar = RunVar( +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( "current_default_worker_process_limiter" ) -async def _shutdown_process_pool(workers: set[Process]) -> None: - process: Process +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: try: - await sleep(math.inf) + await trio.sleep(math.inf) except trio.Cancelled: for process in workers: if process.returncode is None: @@ -434,10 +348,6 @@ async def _shutdown_process_pool(workers: set[Process]) -> None: await process.aclose() -def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None: - trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) - - # # Sockets and networking # @@ -515,7 +425,7 @@ class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): raise ValueError("maxfds must be a positive integer") fds = array.array("i") - await checkpoint() + await trio.lowlevel.checkpoint() with self._receive_guard: while True: try: @@ -555,7 +465,7 @@ class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): filenos.append(fd.fileno()) fdarray = array.array("i", filenos) - await checkpoint() + await trio.lowlevel.checkpoint() with self._send_guard: while True: try: @@ -564,7 +474,7 @@ class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): [ ( socket.SOL_SOCKET, - socket.SCM_RIGHTS, # type: ignore[list-item] + socket.SCM_RIGHTS, fdarray, ) ], @@ -648,76 +558,49 @@ class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocke self._convert_socket_error(exc) -async def connect_tcp( - host: str, port: int, local_address: IPSockAddrType | None = None -) -> SocketStream: - family = socket.AF_INET6 if ":" in host else socket.AF_INET - trio_socket = trio.socket.socket(family) - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if local_address: - await trio_socket.bind(local_address) +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") - try: - await trio_socket.connect((host, port)) - except BaseException: - trio_socket.close() - raise + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) - return SocketStream(trio_socket) + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) -async def connect_unix(path: str) -> UNIXSocketStream: - trio_socket = trio.socket.socket(socket.AF_UNIX) - try: - await trio_socket.connect(path) - except BaseException: - trio_socket.close() - raise +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") - return UNIXSocketStream(trio_socket) + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) - -async def create_udp_socket( - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, -) -> UDPSocket | ConnectedUDPSocket: - trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) - - if reuse_port: - trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - if local_address: - await trio_socket.bind(local_address) - - if remote_address: - await trio_socket.connect(remote_address) - return ConnectedUDPSocket(trio_socket) - else: - return UDPSocket(trio_socket) - - -getaddrinfo = trio.socket.getaddrinfo -getnameinfo = trio.socket.getnameinfo - - -async def wait_socket_readable(sock: socket.socket) -> None: - try: - await wait_readable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("reading from") from None - - -async def wait_socket_writable(sock: socket.socket) -> None: - try: - await wait_writable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("writing to") from None + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) # @@ -742,13 +625,14 @@ class Event(BaseEvent): orig_statistics = self.__original.statistics() return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) - def set(self) -> DeprecatedAwaitable: + def set(self) -> None: self.__original.set() - return DeprecatedAwaitable(self.set) class CapacityLimiter(BaseCapacityLimiter): - def __new__(cls, *args: object, **kwargs: object) -> CapacityLimiter: + def __new__( + cls, *args: Any, original: trio.CapacityLimiter | None = None + ) -> CapacityLimiter: return object.__new__(cls) def __init__( @@ -783,13 +667,11 @@ class CapacityLimiter(BaseCapacityLimiter): def available_tokens(self) -> float: return self.__original.available_tokens - def acquire_nowait(self) -> DeprecatedAwaitable: + def acquire_nowait(self) -> None: self.__original.acquire_nowait() - return DeprecatedAwaitable(self.acquire_nowait) - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: self.__original.acquire_on_behalf_of_nowait(borrower) - return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) async def acquire(self) -> None: await self.__original.acquire() @@ -808,23 +690,12 @@ class CapacityLimiter(BaseCapacityLimiter): return CapacityLimiterStatistics( borrowed_tokens=orig.borrowed_tokens, total_tokens=orig.total_tokens, - borrowers=orig.borrowers, + borrowers=tuple(orig.borrowers), tasks_waiting=orig.tasks_waiting, ) -_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper") - - -def current_default_thread_limiter() -> CapacityLimiter: - try: - return _capacity_limiter_wrapper.get() - except LookupError: - limiter = CapacityLimiter( - original=trio.to_thread.current_default_thread_limiter() - ) - _capacity_limiter_wrapper.set(limiter) - return limiter +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") # @@ -832,7 +703,7 @@ def current_default_thread_limiter() -> CapacityLimiter: # -class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): +class _SignalReceiver: _iterator: AsyncIterator[int] def __init__(self, signals: tuple[Signals, ...]): @@ -859,138 +730,423 @@ class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): return Signals(signum) -def open_signal_receiver(*signals: Signals) -> _SignalReceiver: - return _SignalReceiver(signals) - - # # Testing and debugging # -def get_current_task() -> TaskInfo: - task = trio_lowlevel.current_task() - - parent_id = None - if task.parent_nursery and task.parent_nursery.parent_task: - parent_id = id(task.parent_nursery.parent_task) - - return TaskInfo(id(task), parent_id, task.name, task.coro) - - -def get_running_tasks() -> list[TaskInfo]: - root_task = trio_lowlevel.current_root_task() - task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] - nurseries = root_task.child_nurseries - while nurseries: - new_nurseries: list[trio.Nursery] = [] - for nursery in nurseries: - for task in nursery.child_tasks: - task_infos.append( - TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro) - ) - new_nurseries.extend(task.child_nurseries) - - nurseries = new_nurseries - - return task_infos - - -def wait_all_tasks_blocked() -> Awaitable[None]: - import trio.testing - - return trio.testing.wait_all_tasks_blocked() - - class TestRunner(abc.TestRunner): def __init__(self, **options: Any) -> None: - from collections import deque from queue import Queue - self._call_queue: Queue[Callable[..., object]] = Queue() - self._result_queue: deque[Outcome] = deque() - self._stop_event: trio.Event | None = None - self._nursery: trio.Nursery | None = None + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None self._options = options - async def _trio_main(self) -> None: - self._stop_event = trio.Event() - async with trio.open_nursery() as self._nursery: - await self._stop_event.wait() - - async def _call_func( - self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, ) -> None: - try: - retval = await func(*args, **kwargs) - except BaseException as exc: - self._result_queue.append(Error(exc)) - else: - self._result_queue.append(Value(retval)) + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) def _main_task_finished(self, outcome: object) -> None: - self._nursery = None + self._send_stream = None - def _get_nursery(self) -> trio.Nursery: - if self._nursery is None: + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: trio.lowlevel.start_guest_run( - self._trio_main, + self._run_tests_and_fixtures, run_sync_soon_threadsafe=self._call_queue.put, done_callback=self._main_task_finished, **self._options, ) - while self._nursery is None: + while self._send_stream is None: self._call_queue.get()() - return self._nursery - - def _call( - self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object - ) -> T_Retval: - self._get_nursery().start_soon(self._call_func, func, args, kwargs) - while not self._result_queue: + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: self._call_queue.get()() - outcome = self._result_queue.pop() - return outcome.unwrap() - - def close(self) -> None: - if self._stop_event: - self._stop_event.set() - while self._nursery is not None: - self._call_queue.get()() + return outcome_holder[0].unwrap() def run_asyncgen_fixture( self, fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], kwargs: dict[str, Any], ) -> Iterable[T_Retval]: - async def fixture_runner(*, task_status: TaskStatus[T_Retval]) -> None: - agen = fixture_func(**kwargs) - retval = await agen.asend(None) - task_status.started(retval) - await teardown_event.wait() - try: - await agen.asend(None) - except StopAsyncIteration: - pass - else: - await agen.aclose() - raise RuntimeError("Async generator fixture did not stop") + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) - teardown_event = trio.Event() - fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner)) - yield fixture_value - teardown_event.set() + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") def run_fixture( self, fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], kwargs: dict[str, Any], ) -> T_Retval: - return self._call(fixture_func, **kwargs) + return self._call_in_runner_task(fixture_func, **kwargs) def run_test( self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] ) -> None: - self._call(test_func, **kwargs) + self._call_in_runner_task(test_func, **kwargs) + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run(func, *args) + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run_sync(func, *args) + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + process = await trio.lowlevel.open_process( # type: ignore[misc] + command, # type: ignore[arg-type] + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=shell, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: + ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: + ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + @classmethod + def get_running_tasks(cls) -> list[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo( + id(task), id(nursery.parent_task), task.name, task.coro + ) + ) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/site-packages/anyio/_core/_eventloop.py b/site-packages/anyio/_core/_eventloop.py index ae98648..a9c6e82 100644 --- a/site-packages/anyio/_core/_eventloop.py +++ b/site-packages/anyio/_core/_eventloop.py @@ -3,30 +3,33 @@ from __future__ import annotations import math import sys import threading +from collections.abc import Awaitable, Callable, Generator from contextlib import contextmanager from importlib import import_module -from typing import ( - Any, - Awaitable, - Callable, - Generator, - TypeVar, -) +from typing import TYPE_CHECKING, Any, TypeVar import sniffio -# This must be updated when new backends are introduced -from ._compat import DeprecatedAwaitableFloat +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced BACKENDS = "asyncio", "trio" T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + threadlocals = threading.local() def run( - func: Callable[..., Awaitable[T_Retval]], - *args: object, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], backend: str = "asyncio", backend_options: dict[str, Any] | None = None, ) -> T_Retval: @@ -37,12 +40,13 @@ def run( :param func: a coroutine function :param args: positional arguments to ``func`` - :param backend: name of the asynchronous event loop implementation – currently either - ``asyncio`` or ``trio`` - :param backend_options: keyword arguments to call the backend ``run()`` implementation with - (documented :ref:`here `) + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) :return: the return value of the coroutine function - :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises RuntimeError: if an asynchronous event loop is already running in this + thread :raises LookupError: if the named backend is not found """ @@ -54,18 +58,19 @@ def run( raise RuntimeError(f"Already running {asynclib_name} in this thread") try: - asynclib = import_module(f"..._backends._{backend}", package=__name__) + async_backend = get_async_backend(backend) except ImportError as exc: raise LookupError(f"No such backend: {backend}") from exc token = None if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async library + # Since we're in control of the event loop, we can cache the name of the async + # library token = sniffio.current_async_library_cvar.set(backend) try: backend_options = backend_options or {} - return asynclib.run(func, *args, **backend_options) + return async_backend.run(func, args, {}, backend_options) finally: if token: sniffio.current_async_library_cvar.reset(token) @@ -78,7 +83,7 @@ async def sleep(delay: float) -> None: :param delay: the duration, in seconds """ - return await get_asynclib().sleep(delay) + return await get_async_backend().sleep(delay) async def sleep_forever() -> None: @@ -97,8 +102,8 @@ async def sleep_until(deadline: float) -> None: """ Pause the current task until the given time. - :param deadline: the absolute time to wake up at (according to the internal monotonic clock of - the event loop) + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) .. versionadded:: 3.1 @@ -107,14 +112,14 @@ async def sleep_until(deadline: float) -> None: await sleep(max(deadline - now, 0)) -def current_time() -> DeprecatedAwaitableFloat: +def current_time() -> float: """ Return the current value of the event loop's internal clock. :return: the clock value (seconds) """ - return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + return get_async_backend().current_time() def get_all_backends() -> tuple[str, ...]: @@ -124,7 +129,7 @@ def get_all_backends() -> tuple[str, ...]: def get_cancelled_exc_class() -> type[BaseException]: """Return the current async library's cancellation exception class.""" - return get_asynclib().CancelledError + return get_async_backend().cancelled_exception_class() # @@ -133,21 +138,26 @@ def get_cancelled_exc_class() -> type[BaseException]: @contextmanager -def claim_worker_thread(backend: str) -> Generator[Any, None, None]: - module = sys.modules["anyio._backends._" + backend] - threadlocals.current_async_module = module +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + threadlocals.current_async_backend = backend_class + threadlocals.current_token = token try: yield finally: - del threadlocals.current_async_module + del threadlocals.current_async_backend + del threadlocals.current_token -def get_asynclib(asynclib_name: str | None = None) -> Any: +def get_async_backend(asynclib_name: str | None = None) -> AsyncBackend: if asynclib_name is None: asynclib_name = sniffio.current_async_library() modulename = "anyio._backends._" + asynclib_name try: - return sys.modules[modulename] + module = sys.modules[modulename] except KeyError: - return import_module(modulename) + module = import_module(modulename) + + return getattr(module, "backend_class") diff --git a/site-packages/anyio/_core/_exceptions.py b/site-packages/anyio/_core/_exceptions.py index 92ccd77..571c3b8 100644 --- a/site-packages/anyio/_core/_exceptions.py +++ b/site-packages/anyio/_core/_exceptions.py @@ -1,24 +1,25 @@ from __future__ import annotations -from traceback import format_exception - class BrokenResourceError(Exception): """ - Raised when trying to use a resource that has been rendered unusable due to external causes - (e.g. a send stream whose peer has disconnected). + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). """ class BrokenWorkerProcess(Exception): """ - Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise - misbehaves. + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or + otherwise misbehaves. """ class BusyResourceError(Exception): - """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ def __init__(self, action: str): super().__init__(f"Another task is already {action} this resource") @@ -30,7 +31,8 @@ class ClosedResourceError(Exception): class DelimiterNotFound(Exception): """ - Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the maximum number of bytes has been read without the delimiter being found. """ @@ -41,38 +43,15 @@ class DelimiterNotFound(Exception): class EndOfStream(Exception): - """Raised when trying to read from a stream that has been closed from the other end.""" - - -class ExceptionGroup(BaseException): """ - Raised when multiple exceptions have been raised in a task group. - - :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + Raised when trying to read from a stream that has been closed from the other end. """ - SEPARATOR = "----------------------------\n" - - exceptions: list[BaseException] - - def __str__(self) -> str: - tracebacks = [ - "".join(format_exception(type(exc), exc, exc.__traceback__)) - for exc in self.exceptions - ] - return ( - f"{len(self.exceptions)} exceptions were raised in the task group:\n" - f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}" - ) - - def __repr__(self) -> str: - exception_reprs = ", ".join(repr(exc) for exc in self.exceptions) - return f"<{self.__class__.__name__}: {exception_reprs}>" - class IncompleteRead(Exception): """ - Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the connection is closed before the requested amount of bytes has been read. """ @@ -85,8 +64,8 @@ class IncompleteRead(Exception): class TypedAttributeLookupError(LookupError): """ - Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not - found and no default value has been given. + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. """ diff --git a/site-packages/anyio/_core/_fileio.py b/site-packages/anyio/_core/_fileio.py index 35e8e8a..dc22440 100644 --- a/site-packages/anyio/_core/_fileio.py +++ b/site-packages/anyio/_core/_fileio.py @@ -3,6 +3,7 @@ from __future__ import annotations import os import pathlib import sys +from collections.abc import Callable, Iterable, Iterator, Sequence from dataclasses import dataclass from functools import partial from os import PathLike @@ -12,23 +13,14 @@ from typing import ( Any, AnyStr, AsyncIterator, - Callable, + Final, Generic, - Iterable, - Iterator, - Sequence, - cast, overload, ) from .. import to_thread from ..abc import AsyncResource -if sys.version_info >= (3, 8): - from typing import Final -else: - from typing_extensions import Final - if TYPE_CHECKING: from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer else: @@ -39,8 +31,8 @@ class AsyncFile(AsyncResource, Generic[AnyStr]): """ An asynchronous file object. - This class wraps a standard file object and provides async friendly versions of the following - blocking methods (where available on the original file object): + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): * read * read1 @@ -57,8 +49,8 @@ class AsyncFile(AsyncResource, Generic[AnyStr]): All other methods are directly passed through. - This class supports the asynchronous context manager protocol which closes the underlying file - at the end of the context block. + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. This class also supports asynchronous iteration:: @@ -212,22 +204,25 @@ class _PathIterator(AsyncIterator["Path"]): iterator: Iterator[PathLike[str]] async def __anext__(self) -> Path: - nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) if nextval is None: raise StopAsyncIteration from None - return Path(cast("PathLike[str]", nextval)) + return Path(nextval) class Path: """ An asynchronous version of :class:`pathlib.Path`. - This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but - it is compatible with the :class:`os.PathLike` interface. + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. - It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the - deprecated :meth:`~pathlib.Path.link_to` method. + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. Any methods that do disk I/O need to be awaited on. These methods are: @@ -263,7 +258,8 @@ class Path: * :meth:`~pathlib.Path.write_bytes` * :meth:`~pathlib.Path.write_text` - Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: * :meth:`~pathlib.Path.glob` * :meth:`~pathlib.Path.iterdir` @@ -296,26 +292,26 @@ class Path: target = other._path if isinstance(other, Path) else other return self._path.__eq__(target) - def __lt__(self, other: Path) -> bool: + def __lt__(self, other: pathlib.PurePath | Path) -> bool: target = other._path if isinstance(other, Path) else other return self._path.__lt__(target) - def __le__(self, other: Path) -> bool: + def __le__(self, other: pathlib.PurePath | Path) -> bool: target = other._path if isinstance(other, Path) else other return self._path.__le__(target) - def __gt__(self, other: Path) -> bool: + def __gt__(self, other: pathlib.PurePath | Path) -> bool: target = other._path if isinstance(other, Path) else other return self._path.__gt__(target) - def __ge__(self, other: Path) -> bool: + def __ge__(self, other: pathlib.PurePath | Path) -> bool: target = other._path if isinstance(other, Path) else other return self._path.__ge__(target) - def __truediv__(self, other: Any) -> Path: + def __truediv__(self, other: str | PathLike[str]) -> Path: return Path(self._path / other) - def __rtruediv__(self, other: Any) -> Path: + def __rtruediv__(self, other: str | PathLike[str]) -> Path: return Path(other) / self @property @@ -371,13 +367,16 @@ class Path: def match(self, path_pattern: str) -> bool: return self._path.match(path_pattern) - def is_relative_to(self, *other: str | PathLike[str]) -> bool: + def is_relative_to(self, other: str | PathLike[str]) -> bool: try: - self.relative_to(*other) + self.relative_to(other) return True except ValueError: return False + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: func = partial(os.chmod, follow_symlinks=follow_symlinks) return await to_thread.run_sync(func, self._path, mode) @@ -388,19 +387,23 @@ class Path: return cls(path) async def exists(self) -> bool: - return await to_thread.run_sync(self._path.exists, cancellable=True) + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) async def expanduser(self) -> Path: - return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) def glob(self, pattern: str) -> AsyncIterator[Path]: gen = self._path.glob(pattern) return _PathIterator(gen) async def group(self) -> str: - return await to_thread.run_sync(self._path.group, cancellable=True) + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) - async def hardlink_to(self, target: str | pathlib.Path | Path) -> None: + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: if isinstance(target, Path): target = target._path @@ -415,31 +418,37 @@ class Path: return self._path.is_absolute() async def is_block_device(self) -> bool: - return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) async def is_char_device(self) -> bool: - return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) async def is_dir(self) -> bool: - return await to_thread.run_sync(self._path.is_dir, cancellable=True) + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) async def is_fifo(self) -> bool: - return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) async def is_file(self) -> bool: - return await to_thread.run_sync(self._path.is_file, cancellable=True) + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) async def is_mount(self) -> bool: - return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) def is_reserved(self) -> bool: return self._path.is_reserved() async def is_socket(self) -> bool: - return await to_thread.run_sync(self._path.is_socket, cancellable=True) + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) async def is_symlink(self) -> bool: - return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) def iterdir(self) -> AsyncIterator[Path]: gen = self._path.iterdir() @@ -452,7 +461,7 @@ class Path: await to_thread.run_sync(self._path.lchmod, mode) async def lstat(self) -> os.stat_result: - return await to_thread.run_sync(self._path.lstat, cancellable=True) + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) async def mkdir( self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False @@ -495,7 +504,7 @@ class Path: return AsyncFile(fp) async def owner(self) -> str: - return await to_thread.run_sync(self._path.owner, cancellable=True) + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) async def read_bytes(self) -> bytes: return await to_thread.run_sync(self._path.read_bytes) @@ -510,7 +519,7 @@ class Path: async def readlink(self) -> Path: target = await to_thread.run_sync(os.readlink, self._path) - return Path(cast(str, target)) + return Path(target) async def rename(self, target: str | pathlib.PurePath | Path) -> Path: if isinstance(target, Path): @@ -528,7 +537,7 @@ class Path: async def resolve(self, strict: bool = False) -> Path: func = partial(self._path.resolve, strict=strict) - return Path(await to_thread.run_sync(func, cancellable=True)) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) def rglob(self, pattern: str) -> AsyncIterator[Path]: gen = self._path.rglob(pattern) @@ -537,23 +546,21 @@ class Path: async def rmdir(self) -> None: await to_thread.run_sync(self._path.rmdir) - async def samefile( - self, other_path: str | bytes | int | pathlib.Path | Path - ) -> bool: + async def samefile(self, other_path: str | PathLike[str]) -> bool: if isinstance(other_path, Path): other_path = other_path._path return await to_thread.run_sync( - self._path.samefile, other_path, cancellable=True + self._path.samefile, other_path, abandon_on_cancel=True ) async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: func = partial(os.stat, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, cancellable=True) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) async def symlink_to( self, - target: str | pathlib.Path | Path, + target: str | bytes | PathLike[str] | PathLike[bytes], target_is_directory: bool = False, ) -> None: if isinstance(target, Path): @@ -571,6 +578,29 @@ class Path: if not missing_ok: raise + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + def with_name(self, name: str) -> Path: return Path(self._path.with_name(name)) @@ -580,6 +610,9 @@ class Path: def with_suffix(self, suffix: str) -> Path: return Path(self._path.with_suffix(suffix)) + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + async def write_bytes(self, data: bytes) -> int: return await to_thread.run_sync(self._path.write_bytes, data) diff --git a/site-packages/anyio/_core/_signals.py b/site-packages/anyio/_core/_signals.py index 8ea54af..115c749 100644 --- a/site-packages/anyio/_core/_signals.py +++ b/site-packages/anyio/_core/_signals.py @@ -1,26 +1,25 @@ from __future__ import annotations -from typing import AsyncIterator +from collections.abc import AsyncIterator +from signal import Signals +from typing import ContextManager -from ._compat import DeprecatedAsyncContextManager -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend -def open_signal_receiver( - *signals: int, -) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: +def open_signal_receiver(*signals: Signals) -> ContextManager[AsyncIterator[Signals]]: """ Start receiving operating system signals. :param signals: signals to receive (e.g. ``signal.SIGINT``) - :return: an asynchronous context manager for an asynchronous iterator which yields signal - numbers + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers - .. warning:: Windows does not support signals natively so it is best to avoid relying on this - in cross-platform applications. + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. - .. warning:: On asyncio, this permanently replaces any previous signal handler for the given - signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. """ - return get_asynclib().open_signal_receiver(*signals) + return get_async_backend().open_signal_receiver(*signals) diff --git a/site-packages/anyio/_core/_sockets.py b/site-packages/anyio/_core/_sockets.py index e6970be..0f0a314 100644 --- a/site-packages/anyio/_core/_sockets.py +++ b/site-packages/anyio/_core/_sockets.py @@ -1,41 +1,41 @@ from __future__ import annotations +import errno +import os import socket import ssl +import stat import sys +from collections.abc import Awaitable from ipaddress import IPv6Address, ip_address from os import PathLike, chmod -from pathlib import Path from socket import AddressFamily, SocketKind -from typing import Awaitable, List, Tuple, cast, overload +from typing import Any, Literal, cast, overload from .. import to_thread from ..abc import ( ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, IPAddressType, IPSockAddrType, SocketListener, SocketStream, UDPSocket, + UNIXDatagramSocket, UNIXSocketStream, ) from ..streams.stapled import MultiListener from ..streams.tls import TLSStream -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend from ._resources import aclose_forcefully from ._synchronization import Event from ._tasks import create_task_group, move_on_after -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 -GetAddrInfoReturnType = List[ - Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]] -] AnyIPAddressFamily = Literal[ AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 ] @@ -142,18 +142,21 @@ async def connect_tcp( :param remote_host: the IP address or host name to connect to :param remote_port: port on the target host to connect to - :param local_host: the interface address or name to bind the socket to before connecting + :param local_host: the interface address or name to bind the socket to before + connecting :param tls: ``True`` to do a TLS handshake with the connected stream and return a :class:`~anyio.streams.tls.TLSStream` instead - :param ssl_context: the SSL context object to use (if omitted, a default context is created) - :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing - the stream and requires that the server does this as well. Otherwise, - :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. Some protocols, such as HTTP, require this option to be ``False``. See :meth:`~ssl.SSLContext.wrap_socket` for details. - :param tls_hostname: host name to check the server certificate against (defaults to the value - of ``remote_host``) - :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream :raises OSError: if the connection attempt fails @@ -177,7 +180,7 @@ async def connect_tcp( finally: event.set() - asynclib = get_asynclib() + asynclib = get_async_backend() local_address: IPSockAddrType | None = None family = socket.AF_UNSPEC if local_host: @@ -193,8 +196,8 @@ async def connect_tcp( target_host, remote_port, family=family, type=socket.SOCK_STREAM ) - # Organize the list so that the first address is an IPv6 address (if available) and the - # second one is an IPv4 addresses. The rest can be in whatever order. + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. v6_found = v4_found = False target_addrs: list[tuple[socket.AddressFamily, str]] = [] for af, *rest, sa in gai_res: @@ -221,7 +224,11 @@ async def connect_tcp( await event.wait() if connected_stream is None: - cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) raise OSError("All connection attempts failed") from cause if tls or tls_hostname or ssl_context: @@ -240,7 +247,7 @@ async def connect_tcp( return connected_stream -async def connect_unix(path: str | PathLike[str]) -> UNIXSocketStream: +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: """ Connect to the given UNIX socket. @@ -250,8 +257,8 @@ async def connect_unix(path: str | PathLike[str]) -> UNIXSocketStream: :return: a socket stream object """ - path = str(Path(path)) - return await get_asynclib().connect_unix(path) + path = os.fspath(path) + return await get_async_backend().connect_unix(path) async def create_tcp_listener( @@ -277,11 +284,11 @@ async def create_tcp_listener( :return: a list of listener objects """ - asynclib = get_asynclib() + asynclib = get_async_backend() backlog = min(backlog, 65536) local_host = str(local_host) if local_host is not None else None gai_res = await getaddrinfo( - local_host, # type: ignore[arg-type] + local_host, local_port, family=family, type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, @@ -302,7 +309,8 @@ async def create_tcp_listener( raw_socket = socket.socket(fam) raw_socket.setblocking(False) - # For Windows, enable exclusive address use. For others, enable address reuse. + # For Windows, enable exclusive address use. For others, enable address + # reuse. if sys.platform == "win32": raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) else: @@ -322,7 +330,7 @@ async def create_tcp_listener( raw_socket.bind(sockaddr) raw_socket.listen(backlog) - listener = asynclib.TCPSocketListener(raw_socket) + listener = asynclib.create_tcp_listener(raw_socket) listeners.append(listener) except BaseException: for listener in listeners: @@ -334,7 +342,7 @@ async def create_tcp_listener( async def create_unix_listener( - path: str | PathLike[str], + path: str | bytes | PathLike[Any], *, mode: int | None = None, backlog: int = 65536, @@ -346,29 +354,20 @@ async def create_unix_listener( :param path: path of the socket :param mode: permissions to set on the socket - :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or - 65536) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) :return: a listener object .. versionchanged:: 3.0 - If a socket already exists on the file system in the given path, it will be removed first. + If a socket already exists on the file system in the given path, it will be + removed first. """ - path_str = str(path) - path = Path(path) - if path.is_socket(): - path.unlink() - backlog = min(backlog, 65536) - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) try: - await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) - if mode is not None: - await to_thread.run_sync(chmod, path_str, mode, cancellable=True) - raw_socket.listen(backlog) - return get_asynclib().UNIXSocketListener(raw_socket) + return get_async_backend().create_unix_listener(raw_socket) except BaseException: raw_socket.close() raise @@ -384,15 +383,15 @@ async def create_udp_socket( """ Create a UDP socket. - If ``local_port`` has been given, the socket will be bound to this port on the local + If ``port`` has been given, the socket will be bound to this port on the local machine, making this socket suitable for providing UDP based services. - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from - ``local_host`` if omitted + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted :param local_host: IP address or host name of the local interface to bind to :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port - (not supported on Windows) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) :return: a UDP socket """ @@ -414,9 +413,10 @@ async def create_udp_socket( else: local_address = ("0.0.0.0", 0) - return await get_asynclib().create_udp_socket( + sock = await get_async_backend().create_udp_socket( family, local_address, None, reuse_port ) + return cast(UDPSocket, sock) async def create_connected_udp_socket( @@ -431,17 +431,17 @@ async def create_connected_udp_socket( """ Create a connected UDP socket. - Connected UDP sockets can only communicate with the specified remote host/port, and any packets - sent from other sources are dropped. + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. :param remote_host: remote host to set as the default target :param remote_port: port on the remote host to set as the default target - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from - ``local_host`` or ``remote_host`` if omitted + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted :param local_host: IP address or host name of the local interface to bind to :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port - (not supported on Windows) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) :return: a connected UDP socket """ @@ -463,25 +463,87 @@ async def create_connected_udp_socket( family = cast(AnyIPAddressFamily, gai_res[0][0]) remote_address = gai_res[0][-1] - return await get_asynclib().create_udp_socket( + sock = await get_async_backend().create_udp_socket( family, local_address, remote_address, reuse_port ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) async def getaddrinfo( - host: bytearray | bytes | str, + host: bytes | str | None, port: str | int | None, *, family: int | AddressFamily = 0, type: int | SocketKind = 0, proto: int = 0, flags: int = 0, -) -> GetAddrInfoReturnType: +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: """ Look up a numeric IP address given a host name. - Internationalized domain names are translated according to the (non-transitional) IDNA 2008 - standard. + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of (host, port), unlike what :func:`socket.getaddrinfo` does. @@ -500,7 +562,7 @@ async def getaddrinfo( # Handle unicode hostnames if isinstance(host, str): try: - encoded_host = host.encode("ascii") + encoded_host: bytes | None = host.encode("ascii") except UnicodeEncodeError: import idna @@ -508,7 +570,7 @@ async def getaddrinfo( else: encoded_host = host - gai_res = await get_asynclib().getaddrinfo( + gai_res = await get_async_backend().getaddrinfo( encoded_host, port, family=family, type=type, proto=proto, flags=flags ) return [ @@ -528,18 +590,18 @@ def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str .. seealso:: :func:`socket.getnameinfo` """ - return get_asynclib().getnameinfo(sockaddr, flags) + return get_async_backend().getnameinfo(sockaddr, flags) def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: """ Wait until the given socket has data to be read. - This does **NOT** work on Windows when using the asyncio backend with a proactor event loop - (default on py3.8+). + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). - .. warning:: Only use this on raw sockets that have not been wrapped by any higher level - constructs like socket streams! + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! :param sock: a socket object :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the @@ -548,18 +610,18 @@ def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: to become readable """ - return get_asynclib().wait_socket_readable(sock) + return get_async_backend().wait_socket_readable(sock) def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: """ Wait until the given socket can be written to. - This does **NOT** work on Windows when using the asyncio backend with a proactor event loop - (default on py3.8+). + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). - .. warning:: Only use this on raw sockets that have not been wrapped by any higher level - constructs like socket streams! + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! :param sock: a socket object :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the @@ -568,7 +630,7 @@ def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: to become writable """ - return get_asynclib().wait_socket_writable(sock) + return get_async_backend().wait_socket_writable(sock) # @@ -577,7 +639,7 @@ def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: def convert_ipv6_sockaddr( - sockaddr: tuple[str, int, int, int] | tuple[str, int] + sockaddr: tuple[str, int, int, int] | tuple[str, int], ) -> tuple[str, int]: """ Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. @@ -592,7 +654,7 @@ def convert_ipv6_sockaddr( """ # This is more complicated than it should be because of MyPy if isinstance(sockaddr, tuple) and len(sockaddr) == 4: - host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + host, port, flowinfo, scope_id = sockaddr if scope_id: # PyPy (as of v7.3.11) leaves the interface name in the result, so # we discard it and only get the scope ID from the end @@ -604,4 +666,51 @@ def convert_ipv6_sockaddr( else: return host, port else: - return cast(Tuple[str, int], sockaddr) + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | bytes | None + if path is not None: + path_str = os.fspath(path) + + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EBADF, errno.ELOOP): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket diff --git a/site-packages/anyio/_core/_streams.py b/site-packages/anyio/_core/_streams.py index 54ea2b2..aa6b0c2 100644 --- a/site-packages/anyio/_core/_streams.py +++ b/site-packages/anyio/_core/_streams.py @@ -1,7 +1,8 @@ from __future__ import annotations import math -from typing import Any, TypeVar, overload +from typing import Tuple, TypeVar +from warnings import warn from ..streams.memory import ( MemoryObjectReceiveStream, @@ -12,36 +13,40 @@ from ..streams.memory import ( T_Item = TypeVar("T_Item") -@overload -def create_memory_object_stream( - max_buffer_size: float = ..., -) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: - ... - - -@overload -def create_memory_object_stream( - max_buffer_size: float = ..., item_type: type[T_Item] = ... -) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: - ... - - -def create_memory_object_stream( - max_buffer_size: float = 0, item_type: type[T_Item] | None = None -) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: +class create_memory_object_stream( + Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): """ Create a memory object stream. - :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking - :param item_type: type of item, for marking the streams with the right generic type for - static typing (not used at run time) + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. :return: a tuple of (send stream, receive stream) """ - if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): - raise ValueError("max_buffer_size must be either an integer or math.inf") - if max_buffer_size < 0: - raise ValueError("max_buffer_size cannot be negative") - state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) - return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/site-packages/anyio/_core/_subprocesses.py b/site-packages/anyio/_core/_subprocesses.py index 1a26ac8..c161029 100644 --- a/site-packages/anyio/_core/_subprocesses.py +++ b/site-packages/anyio/_core/_subprocesses.py @@ -1,19 +1,13 @@ from __future__ import annotations +from collections.abc import AsyncIterable, Mapping, Sequence from io import BytesIO from os import PathLike from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess -from typing import ( - IO, - Any, - AsyncIterable, - Mapping, - Sequence, - cast, -) +from typing import IO, Any, cast from ..abc import Process -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend from ._tasks import create_task_group @@ -33,22 +27,24 @@ async def run_process( .. seealso:: :func:`subprocess.run` - :param command: either a string to pass to the shell, or an iterable of strings containing the - executable name or path and its arguments + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments :param input: bytes passed to the standard input of the subprocess - :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or - :data:`subprocess.STDOUT` - :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process - terminates with a return code other than 0 - :param cwd: If not ``None``, change the working directory to this before running the command - :param env: if not ``None``, this mapping replaces the inherited environment variables from the - parent process - :param start_new_session: if ``true`` the setsid() system call will be made in the child - process prior to the execution of the subprocess. (POSIX only) + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) :return: an object representing the completed process - :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a - nonzero return code + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code """ @@ -106,8 +102,8 @@ async def open_process( .. seealso:: :class:`subprocess.Popen` - :param command: either a string to pass to the shell, or an iterable of strings containing the - executable name or path and its arguments + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a file-like object, or ``None`` :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, @@ -115,21 +111,32 @@ async def open_process( :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, :data:`subprocess.STDOUT`, a file-like object, or ``None`` :param cwd: If not ``None``, the working directory is changed before executing - :param env: If env is not ``None``, it must be a mapping that defines the environment - variables for the new process - :param start_new_session: if ``true`` the setsid() system call will be made in the child - process prior to the execution of the subprocess. (POSIX only) + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) :return: an asynchronous process object """ - shell = isinstance(command, str) - return await get_asynclib().open_process( - command, - shell=shell, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) + if isinstance(command, (str, bytes)): + return await get_async_backend().open_process( + command, + shell=True, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + return await get_async_backend().open_process( + command, + shell=False, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) diff --git a/site-packages/anyio/_core/_synchronization.py b/site-packages/anyio/_core/_synchronization.py index 783570c..33172dc 100644 --- a/site-packages/anyio/_core/_synchronization.py +++ b/site-packages/anyio/_core/_synchronization.py @@ -1,13 +1,14 @@ from __future__ import annotations +import math from collections import deque from dataclasses import dataclass from types import TracebackType -from warnings import warn + +from sniffio import AsyncLibraryNotFoundError from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled -from ._compat import DeprecatedAwaitable -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend from ._exceptions import BusyResourceError, WouldBlock from ._tasks import CancelScope from ._testing import TaskInfo, get_current_task @@ -27,9 +28,10 @@ class CapacityLimiterStatistics: """ :ivar int borrowed_tokens: number of tokens currently borrowed by tasks :ivar float total_tokens: total number of available tokens - :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this - limiter - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or :meth:`~.CapacityLimiter.acquire_on_behalf_of` """ @@ -43,8 +45,8 @@ class CapacityLimiterStatistics: class LockStatistics: """ :ivar bool locked: flag indicating if this lock is locked or not - :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not - held by any task) + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` """ @@ -57,7 +59,8 @@ class LockStatistics: class ConditionStatistics: """ :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` - :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` """ tasks_waiting: int @@ -76,9 +79,12 @@ class SemaphoreStatistics: class Event: def __new__(cls) -> Event: - return get_asynclib().Event() + try: + return get_async_backend().create_event() + except AsyncLibraryNotFoundError: + return EventAdapter() - def set(self) -> DeprecatedAwaitable: + def set(self) -> None: """Set the flag, notifying all listeners.""" raise NotImplementedError @@ -90,7 +96,8 @@ class Event: """ Wait until the flag has been set. - If the flag has already been set when this method is called, it returns immediately. + If the flag has already been set when this method is called, it returns + immediately. """ raise NotImplementedError @@ -100,6 +107,35 @@ class Event: raise NotImplementedError +class EventAdapter(Event): + _internal_event: Event | None = None + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + + return self._internal_event + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._internal_event is not None and self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + class Lock: _owner_task: TaskInfo | None = None @@ -161,7 +197,7 @@ class Lock: self._owner_task = task - def release(self) -> DeprecatedAwaitable: + def release(self) -> None: """Release the lock.""" if self._owner_task != get_current_task(): raise RuntimeError("The current task is not holding this lock") @@ -172,8 +208,6 @@ class Lock: else: del self._owner_task - return DeprecatedAwaitable(self.release) - def locked(self) -> bool: """Return True if the lock is currently held.""" return self._owner_task is not None @@ -224,10 +258,9 @@ class Condition: self._lock.acquire_nowait() self._owner_task = get_current_task() - def release(self) -> DeprecatedAwaitable: + def release(self) -> None: """Release the underlying lock.""" self._lock.release() - return DeprecatedAwaitable(self.release) def locked(self) -> bool: """Return True if the lock is set.""" @@ -344,7 +377,7 @@ class Semaphore: self._value -= 1 - def release(self) -> DeprecatedAwaitable: + def release(self) -> None: """Increment the semaphore value.""" if self._max_value is not None and self._value == self._max_value: raise ValueError("semaphore released too many times") @@ -354,8 +387,6 @@ class Semaphore: else: self._value += 1 - return DeprecatedAwaitable(self.release) - @property def value(self) -> int: """The current value of the semaphore.""" @@ -377,7 +408,10 @@ class Semaphore: class CapacityLimiter: def __new__(cls, total_tokens: float) -> CapacityLimiter: - return get_asynclib().CapacityLimiter(total_tokens) + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except AsyncLibraryNotFoundError: + return CapacityLimiterAdapter(total_tokens) async def __aenter__(self) -> None: raise NotImplementedError @@ -396,7 +430,8 @@ class CapacityLimiter: The total number of tokens available for borrowing. This is a read-write property. If the total number of tokens is increased, the - proportionate number of tasks waiting on this limiter will be granted their tokens. + proportionate number of tasks waiting on this limiter will be granted their + tokens. .. versionchanged:: 3.0 The property is now writable. @@ -408,14 +443,6 @@ class CapacityLimiter: def total_tokens(self, value: float) -> None: raise NotImplementedError - async def set_total_tokens(self, value: float) -> None: - warn( - "CapacityLimiter.set_total_tokens has been deprecated. Set the value of the" - '"total_tokens" attribute directly.', - DeprecationWarning, - ) - self.total_tokens = value - @property def borrowed_tokens(self) -> int: """The number of tokens that have currently been borrowed.""" @@ -426,16 +453,17 @@ class CapacityLimiter: """The number of tokens currently available to be borrowed""" raise NotImplementedError - def acquire_nowait(self) -> DeprecatedAwaitable: + def acquire_nowait(self) -> None: """ - Acquire a token for the current task without waiting for one to become available. + Acquire a token for the current task without waiting for one to become + available. :raises ~anyio.WouldBlock: if there are no tokens available for borrowing """ raise NotImplementedError - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: """ Acquire a token without waiting for one to become available. @@ -447,7 +475,8 @@ class CapacityLimiter: async def acquire(self) -> None: """ - Acquire a token for the current task, waiting if necessary for one to become available. + Acquire a token for the current task, waiting if necessary for one to become + available. """ raise NotImplementedError @@ -464,7 +493,9 @@ class CapacityLimiter: def release(self) -> None: """ Release the token held by the current task. - :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. """ raise NotImplementedError @@ -473,7 +504,8 @@ class CapacityLimiter: """ Release the token held by the given borrower. - :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. """ raise NotImplementedError @@ -488,96 +520,115 @@ class CapacityLimiter: raise NotImplementedError -def create_lock() -> Lock: - """ - Create an asynchronous lock. +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None - :return: a lock object + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) - .. deprecated:: 3.0 - Use :class:`~Lock` directly. + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens - """ - warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning) - return Lock() + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + return self._internal_limiter -def create_condition(lock: Lock | None = None) -> Condition: - """ - Create an asynchronous condition. + async def __aenter__(self) -> None: + await self._limiter.__aenter__() - :param lock: the lock to base the condition object on - :return: a condition object + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) - .. deprecated:: 3.0 - Use :class:`~Condition` directly. + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens - """ - warn( - "create_condition() is deprecated -- use Condition() directly", - DeprecationWarning, - ) - return Condition(lock=lock) + return self._internal_limiter.total_tokens + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") -def create_event() -> Event: - """ - Create an asynchronous event object. + if self._internal_limiter is None: + self._total_tokens = value + return - :return: an event object + self._limiter.total_tokens = value - .. deprecated:: 3.0 - Use :class:`~Event` directly. + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 - """ - warn("create_event() is deprecated -- use Event() directly", DeprecationWarning) - return get_asynclib().Event() + return self._internal_limiter.borrowed_tokens + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens -def create_semaphore(value: int, *, max_value: int | None = None) -> Semaphore: - """ - Create an asynchronous semaphore. + return self._internal_limiter.available_tokens - :param value: the semaphore's initial value - :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the - semaphore's value would exceed this number - :return: a semaphore object + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() - .. deprecated:: 3.0 - Use :class:`~Semaphore` directly. + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) - """ - warn( - "create_semaphore() is deprecated -- use Semaphore() directly", - DeprecationWarning, - ) - return Semaphore(value, max_value=max_value) + async def acquire(self) -> None: + await self._limiter.acquire() + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) -def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: - """ - Create a capacity limiter. + def release(self) -> None: + self._limiter.release() - :param total_tokens: the total number of tokens available for borrowing (can be an integer or - :data:`math.inf`) - :return: a capacity limiter object + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) - .. deprecated:: 3.0 - Use :class:`~CapacityLimiter` directly. + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) - """ - warn( - "create_capacity_limiter() is deprecated -- use CapacityLimiter() directly", - DeprecationWarning, - ) - return get_asynclib().CapacityLimiter(total_tokens) + return self._internal_limiter.statistics() class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + """ + __slots__ = "action", "_guarded" - def __init__(self, action: str): - self.action = action + def __init__(self, action: str = "using"): + self.action: str = action self._guarded = False def __enter__(self) -> None: diff --git a/site-packages/anyio/_core/_tasks.py b/site-packages/anyio/_core/_tasks.py index e9d9c2b..2f21ea2 100644 --- a/site-packages/anyio/_core/_tasks.py +++ b/site-packages/anyio/_core/_tasks.py @@ -1,16 +1,12 @@ from __future__ import annotations import math +from collections.abc import Generator +from contextlib import contextmanager from types import TracebackType -from warnings import warn from ..abc._tasks import TaskGroup, TaskStatus -from ._compat import ( - DeprecatedAsyncContextManager, - DeprecatedAwaitable, - DeprecatedAwaitableFloat, -) -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend class _IgnoredTaskStatus(TaskStatus[object]): @@ -21,7 +17,7 @@ class _IgnoredTaskStatus(TaskStatus[object]): TASK_STATUS_IGNORED = _IgnoredTaskStatus() -class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): +class CancelScope: """ Wraps a unit of work that can be made separately cancellable. @@ -32,9 +28,9 @@ class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): def __new__( cls, *, deadline: float = math.inf, shield: bool = False ) -> CancelScope: - return get_asynclib().CancelScope(shield=shield, deadline=deadline) + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) - def cancel(self) -> DeprecatedAwaitable: + def cancel(self) -> None: """Cancel this scope immediately.""" raise NotImplementedError @@ -57,6 +53,19 @@ class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): """``True`` if :meth:`cancel` has been called.""" raise NotImplementedError + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + @property def shield(self) -> bool: """ @@ -83,81 +92,52 @@ class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): raise NotImplementedError -def open_cancel_scope(*, shield: bool = False) -> CancelScope: +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: """ - Open a cancel scope. + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a cancel scope - - .. deprecated:: 3.0 - Use :class:`~CancelScope` directly. - - """ - warn( - "open_cancel_scope() is deprecated -- use CancelScope() directly", - DeprecationWarning, - ) - return get_asynclib().CancelScope(shield=shield) - - -class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): - def __init__(self, cancel_scope: CancelScope): - self._cancel_scope = cancel_scope - - def __enter__(self) -> CancelScope: - return self._cancel_scope.__enter__() - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) - if self._cancel_scope.cancel_called: - raise TimeoutError - - return retval - - -def fail_after(delay: float | None, shield: bool = False) -> FailAfterContextManager: - """ - Create a context manager which raises a :class:`TimeoutError` if does not finish in time. - - :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to - disable the timeout + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout :param shield: ``True`` to shield the cancel scope from external cancellation :return: a context manager that yields a cancel scope :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] """ - deadline = ( - (get_asynclib().current_time() + delay) if delay is not None else math.inf - ) - cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) - return FailAfterContextManager(cancel_scope) + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: """ Create a cancel scope with a deadline that expires after the given delay. - :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` - to disable the timeout + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout :param shield: ``True`` to shield the cancel scope from external cancellation :return: a cancel scope """ deadline = ( - (get_asynclib().current_time() + delay) if delay is not None else math.inf + (get_async_backend().current_time() + delay) if delay is not None else math.inf ) - return get_asynclib().CancelScope(deadline=deadline, shield=shield) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) -def current_effective_deadline() -> DeprecatedAwaitableFloat: +def current_effective_deadline() -> float: """ - Return the nearest deadline among all the cancel scopes effective for the current task. + Return the nearest deadline among all the cancel scopes effective for the current + task. :return: a clock value from the event loop's internal clock (or ``float('inf')`` if there is no deadline in effect, or ``float('-inf')`` if the current scope has @@ -165,9 +145,7 @@ def current_effective_deadline() -> DeprecatedAwaitableFloat: :rtype: float """ - return DeprecatedAwaitableFloat( - get_asynclib().current_effective_deadline(), current_effective_deadline - ) + return get_async_backend().current_effective_deadline() def create_task_group() -> TaskGroup: @@ -177,4 +155,4 @@ def create_task_group() -> TaskGroup: :return: a task group """ - return get_asynclib().TaskGroup() + return get_async_backend().create_task_group() diff --git a/site-packages/anyio/_core/_testing.py b/site-packages/anyio/_core/_testing.py index c8191b3..1dae3b1 100644 --- a/site-packages/anyio/_core/_testing.py +++ b/site-packages/anyio/_core/_testing.py @@ -1,9 +1,9 @@ from __future__ import annotations -from typing import Any, Awaitable, Generator +from collections.abc import Awaitable, Generator +from typing import Any -from ._compat import DeprecatedAwaitableList, _warn_deprecation -from ._eventloop import get_asynclib +from ._eventloop import get_async_backend class TaskInfo: @@ -45,13 +45,6 @@ class TaskInfo: def __repr__(self) -> str: return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" - def __await__(self) -> Generator[None, None, TaskInfo]: - _warn_deprecation(self) - if False: - yield - - return self - def _unwrap(self) -> TaskInfo: return self @@ -63,20 +56,19 @@ def get_current_task() -> TaskInfo: :return: a representation of the current task """ - return get_asynclib().get_current_task() + return get_async_backend().get_current_task() -def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: +def get_running_tasks() -> list[TaskInfo]: """ Return a list of running tasks in the current event loop. :return: a list of task info objects """ - tasks = get_asynclib().get_running_tasks() - return DeprecatedAwaitableList(tasks, func=get_running_tasks) + return get_async_backend().get_running_tasks() async def wait_all_tasks_blocked() -> None: """Wait until all other tasks are waiting for something.""" - await get_asynclib().wait_all_tasks_blocked() + await get_async_backend().wait_all_tasks_blocked() diff --git a/site-packages/anyio/_core/_typedattr.py b/site-packages/anyio/_core/_typedattr.py index bf9202e..74c6b8f 100644 --- a/site-packages/anyio/_core/_typedattr.py +++ b/site-packages/anyio/_core/_typedattr.py @@ -1,15 +1,10 @@ from __future__ import annotations -import sys -from typing import Any, Callable, Mapping, TypeVar, overload +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload from ._exceptions import TypedAttributeLookupError -if sys.version_info >= (3, 8): - from typing import final -else: - from typing_extensions import final - T_Attr = TypeVar("T_Attr") T_Default = TypeVar("T_Default") undefined = object() @@ -44,11 +39,12 @@ class TypedAttributeProvider: @property def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: """ - A mapping of the extra attributes to callables that return the corresponding values. + A mapping of the extra attributes to callables that return the corresponding + values. - If the provider wraps another provider, the attributes from that wrapper should also be - included in the returned mapping (but the wrapper may override the callables from the - wrapped instance). + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). """ return {} @@ -68,10 +64,12 @@ class TypedAttributeProvider: Return the value of the given typed extra attribute. - :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for - :param default: the value that should be returned if no value is found for the attribute - :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was - given + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given """ try: diff --git a/site-packages/anyio/abc/__init__.py b/site-packages/anyio/abc/__init__.py index 72c34e5..1ca0fcf 100644 --- a/site-packages/anyio/abc/__init__.py +++ b/site-packages/anyio/abc/__init__.py @@ -1,86 +1,53 @@ from __future__ import annotations -__all__ = ( - "AsyncResource", - "IPAddressType", - "IPSockAddrType", - "SocketAttribute", - "SocketStream", - "SocketListener", - "UDPSocket", - "UNIXSocketStream", - "UDPPacketType", - "ConnectedUDPSocket", - "UnreliableObjectReceiveStream", - "UnreliableObjectSendStream", - "UnreliableObjectStream", - "ObjectReceiveStream", - "ObjectSendStream", - "ObjectStream", - "ByteReceiveStream", - "ByteSendStream", - "ByteStream", - "AnyUnreliableByteReceiveStream", - "AnyUnreliableByteSendStream", - "AnyUnreliableByteStream", - "AnyByteReceiveStream", - "AnyByteSendStream", - "AnyByteStream", - "Listener", - "Process", - "Event", - "Condition", - "Lock", - "Semaphore", - "CapacityLimiter", - "CancelScope", - "TaskGroup", - "TaskStatus", - "TestRunner", - "BlockingPortal", -) - from typing import Any -from ._resources import AsyncResource -from ._sockets import ( - ConnectedUDPSocket, - IPAddressType, - IPSockAddrType, - SocketAttribute, - SocketListener, - SocketStream, - UDPPacketType, - UDPSocket, - UNIXSocketStream, -) -from ._streams import ( - AnyByteReceiveStream, - AnyByteSendStream, - AnyByteStream, - AnyUnreliableByteReceiveStream, - AnyUnreliableByteSendStream, - AnyUnreliableByteStream, - ByteReceiveStream, - ByteSendStream, - ByteStream, - Listener, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, - UnreliableObjectReceiveStream, - UnreliableObjectSendStream, - UnreliableObjectStream, -) -from ._subprocesses import Process -from ._tasks import TaskGroup, TaskStatus -from ._testing import TestRunner +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner # Re-exported here, for backwards compatibility # isort: off -from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore -from .._core._tasks import CancelScope -from ..from_thread import BlockingPortal +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal # Re-export imports so they look like they live directly in this package key: str diff --git a/site-packages/anyio/abc/_eventloop.py b/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000..4470d83 --- /dev/null +++ b/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,392 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Mapping +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Sequence, + TypeVar, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from typing import Literal + + from .._core._synchronization import CapacityLimiter, Event + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + + :return: + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @overload + async def open_process( + cls, + command: str | bytes, + *, + shell: Literal[True], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @overload + async def open_process( + cls, + command: Sequence[str | bytes], + *, + shell: Literal[False], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: + ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: + ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_socket_readable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + async def wait_socket_writable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> list[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/site-packages/anyio/abc/_resources.py b/site-packages/anyio/abc/_resources.py index e0a283f..9693835 100644 --- a/site-packages/anyio/abc/_resources.py +++ b/site-packages/anyio/abc/_resources.py @@ -11,8 +11,8 @@ class AsyncResource(metaclass=ABCMeta): """ Abstract base class for all closeable asynchronous resources. - Works as an asynchronous context manager which returns the instance itself on enter, and calls - :meth:`aclose` on exit. + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. """ async def __aenter__(self: T) -> T: diff --git a/site-packages/anyio/abc/_sockets.py b/site-packages/anyio/abc/_sockets.py index 6aac5f7..b321225 100644 --- a/site-packages/anyio/abc/_sockets.py +++ b/site-packages/anyio/abc/_sockets.py @@ -2,21 +2,14 @@ from __future__ import annotations import socket from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping from contextlib import AsyncExitStack from io import IOBase from ipaddress import IPv4Address, IPv6Address from socket import AddressFamily -from typing import ( - Any, - Callable, - Collection, - Mapping, - Tuple, - TypeVar, - Union, -) +from types import TracebackType +from typing import Any, Tuple, TypeVar, Union -from .._core._tasks import create_task_group from .._core._typedattr import ( TypedAttributeProvider, TypedAttributeSet, @@ -29,9 +22,23 @@ IPAddressType = Union[str, IPv4Address, IPv6Address] IPSockAddrType = Tuple[str, int] SockAddrType = Union[IPSockAddrType, str] UDPPacketType = Tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = Tuple[bytes, str] T_Retval = TypeVar("T_Retval") +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return None + + class SocketAttribute(TypedAttributeSet): #: the address family of the underlying socket family: AddressFamily = typed_attribute() @@ -70,9 +77,9 @@ class _SocketProvider(TypedAttributeProvider): # Provide local and remote ports for IP based sockets if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): - attributes[ - SocketAttribute.local_port - ] = lambda: self._raw_socket.getsockname()[1] + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) if peername is not None: remote_port = peername[1] attributes[SocketAttribute.remote_port] = lambda: remote_port @@ -100,8 +107,8 @@ class UNIXSocketStream(SocketStream): Send file descriptors along with a message to the peer. :param message: a non-empty bytestring - :param fds: a collection of files (either numeric file descriptors or open file or socket - objects) + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) """ @abstractmethod @@ -131,9 +138,11 @@ class SocketListener(Listener[SocketStream], _SocketProvider): handler: Callable[[SocketStream], Any], task_group: TaskGroup | None = None, ) -> None: - async with AsyncExitStack() as exit_stack: + from .. import create_task_group + + async with AsyncExitStack() as stack: if task_group is None: - task_group = await exit_stack.enter_async_context(create_task_group()) + task_group = await stack.enter_async_context(create_task_group()) while True: stream = await self.accept() @@ -148,7 +157,10 @@ class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): """ async def sendto(self, data: bytes, host: str, port: int) -> None: - """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ return await self.send((data, (host, port))) @@ -158,3 +170,25 @@ class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): Supports all relevant extra attributes from :class:`~SocketAttribute`. """ + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/site-packages/anyio/abc/_streams.py b/site-packages/anyio/abc/_streams.py index 4fa7ccc..8c63868 100644 --- a/site-packages/anyio/abc/_streams.py +++ b/site-packages/anyio/abc/_streams.py @@ -1,7 +1,8 @@ from __future__ import annotations from abc import abstractmethod -from typing import Any, Callable, Generic, TypeVar, Union +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union from .._core._exceptions import EndOfStream from .._core._typedattr import TypedAttributeProvider @@ -19,11 +20,11 @@ class UnreliableObjectReceiveStream( """ An interface for receiving objects. - This interface makes no guarantees that the received messages arrive in the order in which they - were sent, or that no messages are missed. + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. - Asynchronously iterating over objects of this type will yield objects matching the given type - parameter. + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. """ def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: @@ -54,8 +55,8 @@ class UnreliableObjectSendStream( """ An interface for sending objects. - This interface makes no guarantees that the messages sent will reach the recipient(s) in the - same order in which they were sent, or at all. + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. """ @abstractmethod @@ -75,22 +76,22 @@ class UnreliableObjectStream( UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] ): """ - A bidirectional message stream which does not guarantee the order or reliability of message - delivery. + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. """ class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): """ - A receive message stream which guarantees that messages are received in the same order in - which they were sent, and that no messages are missed. + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. """ class ObjectSendStream(UnreliableObjectSendStream[T_contra]): """ - A send message stream which guarantees that messages are delivered in the same order in which - they were sent, without missing any messages in the middle. + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. """ @@ -100,7 +101,8 @@ class ObjectStream( UnreliableObjectStream[T_Item], ): """ - A bidirectional message stream which guarantees the order and reliability of message delivery. + A bidirectional message stream which guarantees the order and reliability of message + delivery. """ @abstractmethod @@ -108,8 +110,8 @@ class ObjectStream( """ Send an end-of-file indication to the peer. - You should not try to send any further data to this stream after calling this method. - This method is idempotent (does nothing on successive calls). + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). """ @@ -117,8 +119,8 @@ class ByteReceiveStream(AsyncResource, TypedAttributeProvider): """ An interface for receiving bytes from a single peer. - Iterating this byte stream will yield a byte string of arbitrary length, but no more than - 65536 bytes. + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. """ def __aiter__(self) -> ByteReceiveStream: @@ -135,8 +137,8 @@ class ByteReceiveStream(AsyncResource, TypedAttributeProvider): """ Receive at most ``max_bytes`` bytes from the peer. - .. note:: Implementors of this interface should not return an empty :class:`bytes` object, - and users should ignore them. + .. note:: Implementors of this interface should not return an empty + :class:`bytes` object, and users should ignore them. :param max_bytes: maximum number of bytes to receive :return: the received bytes @@ -164,8 +166,8 @@ class ByteStream(ByteReceiveStream, ByteSendStream): """ Send an end-of-file indication to the peer. - You should not try to send any further data to this stream after calling this method. - This method is idempotent (does nothing on successive calls). + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). """ @@ -190,14 +192,12 @@ class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): @abstractmethod async def serve( - self, - handler: Callable[[T_co], Any], - task_group: TaskGroup | None = None, + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None ) -> None: """ Accept incoming connections as they come in and start tasks to handle them. :param handler: a callable that will be used to handle each accepted connection - :param task_group: the task group that will be used to start tasks for handling each - accepted connection (if omitted, an ad-hoc task group will be created) + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) """ diff --git a/site-packages/anyio/abc/_subprocesses.py b/site-packages/anyio/abc/_subprocesses.py index 704b44a..ce0564c 100644 --- a/site-packages/anyio/abc/_subprocesses.py +++ b/site-packages/anyio/abc/_subprocesses.py @@ -59,8 +59,8 @@ class Process(AsyncResource): @abstractmethod def returncode(self) -> int | None: """ - The return code of the process. If the process has not yet terminated, this will be - ``None``. + The return code of the process. If the process has not yet terminated, this will + be ``None``. """ @property diff --git a/site-packages/anyio/abc/_tasks.py b/site-packages/anyio/abc/_tasks.py index e48d3c1..7ad4938 100644 --- a/site-packages/anyio/abc/_tasks.py +++ b/site-packages/anyio/abc/_tasks.py @@ -2,20 +2,21 @@ from __future__ import annotations import sys from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable from types import TracebackType -from typing import TYPE_CHECKING, Any, Awaitable, Callable, TypeVar, overload -from warnings import warn +from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload -if sys.version_info >= (3, 8): - from typing import Protocol +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack else: - from typing_extensions import Protocol + from typing_extensions import TypeVarTuple, Unpack if TYPE_CHECKING: - from anyio._core._tasks import CancelScope + from .._core._tasks import CancelScope T_Retval = TypeVar("T_Retval") T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") class TaskStatus(Protocol[T_contra]): @@ -45,35 +46,11 @@ class TaskGroup(metaclass=ABCMeta): cancel_scope: CancelScope - async def spawn( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> None: - """ - Start a new task in this task group. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - - .. deprecated:: 3.0 - Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you - can keep using this until AnyIO 4. - - """ - warn( - 'spawn() is deprecated -- use start_soon() (without the "await") instead', - DeprecationWarning, - ) - self.start_soon(func, *args, name=name) - @abstractmethod def start_soon( self, - func: Callable[..., Awaitable[Any]], - *args: object, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], name: object = None, ) -> None: """ @@ -100,7 +77,8 @@ class TaskGroup(metaclass=ABCMeta): :param args: positional arguments to call the function with :param name: name of the task, for the purposes of introspection and debugging :return: the value passed to ``task_status.started()`` - :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` .. versionadded:: 3.0 """ diff --git a/site-packages/anyio/abc/_testing.py b/site-packages/anyio/abc/_testing.py index ee2cff5..4d70b9e 100644 --- a/site-packages/anyio/abc/_testing.py +++ b/site-packages/anyio/abc/_testing.py @@ -2,33 +2,29 @@ from __future__ import annotations import types from abc import ABCMeta, abstractmethod -from collections.abc import AsyncGenerator, Iterable -from typing import Any, Callable, Coroutine, TypeVar +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar _T = TypeVar("_T") class TestRunner(metaclass=ABCMeta): """ - Encapsulates a running event loop. Every call made through this object will use the same event - loop. + Encapsulates a running event loop. Every call made through this object will use the + same event loop. """ def __enter__(self) -> TestRunner: return self + @abstractmethod def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None, ) -> bool | None: - self.close() - return None - - @abstractmethod - def close(self) -> None: - """Close the event loop.""" + ... @abstractmethod def run_asyncgen_fixture( diff --git a/site-packages/anyio/from_thread.py b/site-packages/anyio/from_thread.py index 6b76861..4a98703 100644 --- a/site-packages/anyio/from_thread.py +++ b/site-packages/anyio/from_thread.py @@ -1,36 +1,43 @@ from __future__ import annotations +import sys import threading -from asyncio import iscoroutine +from collections.abc import Awaitable, Callable, Generator from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait from contextlib import AbstractContextManager, contextmanager +from inspect import isawaitable from types import TracebackType from typing import ( Any, AsyncContextManager, - Awaitable, - Callable, ContextManager, - Generator, Generic, Iterable, TypeVar, cast, overload, ) -from warnings import warn from ._core import _eventloop -from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals from ._core._synchronization import Event from ._core._tasks import CancelScope, create_task_group +from .abc import AsyncBackend from .abc._tasks import TaskStatus +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + T_Retval = TypeVar("T_Retval") -T_co = TypeVar("T_co") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") -def run(func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval: +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] +) -> T_Retval: """ Call a coroutine function from a worker thread. @@ -40,24 +47,19 @@ def run(func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval: """ try: - asynclib = threadlocals.current_async_module + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token except AttributeError: - raise RuntimeError("This function can only be run from an AnyIO worker thread") + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None - return asynclib.run_async_from_thread(func, *args) + return async_backend.run_async_from_thread(func, args, token=token) -def run_async_from_thread( - func: Callable[..., Awaitable[T_Retval]], *args: object +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] ) -> T_Retval: - warn( - "run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead", - DeprecationWarning, - ) - return run(func, *args) - - -def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: """ Call a function in the event loop thread from a worker thread. @@ -67,24 +69,19 @@ def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: """ try: - asynclib = threadlocals.current_async_module + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token except AttributeError: - raise RuntimeError("This function can only be run from an AnyIO worker thread") + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None - return asynclib.run_sync_from_thread(func, *args) - - -def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: - warn( - "run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead", - DeprecationWarning, - ) - return run_sync(func, *args) + return async_backend.run_sync_from_thread(func, args, token=token) class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): - _enter_future: Future - _exit_future: Future + _enter_future: Future[T_co] + _exit_future: Future[bool | None] _exit_event: Event _exit_exc_info: tuple[ type[BaseException] | None, BaseException | None, TracebackType | None @@ -120,8 +117,7 @@ class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): def __enter__(self) -> T_co: self._enter_future = Future() self._exit_future = self._portal.start_task_soon(self.run_async_cm) - cm = self._enter_future.result() - return cast(T_co, cm) + return self._enter_future.result() def __exit__( self, @@ -146,7 +142,7 @@ class BlockingPortal: """An object that lets external threads run code in an asynchronous event loop.""" def __new__(cls) -> BlockingPortal: - return get_asynclib().BlockingPortal() + return get_async_backend().create_blocking_portal() def __init__(self) -> None: self._event_loop_thread_id: int | None = threading.get_ident() @@ -186,8 +182,8 @@ class BlockingPortal: This marks the portal as no longer accepting new calls and exits from :meth:`sleep_until_stopped`. - :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them - finish before returning + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning """ self._event_loop_thread_id = None @@ -196,9 +192,13 @@ class BlockingPortal: self._task_group.cancel_scope.cancel() async def _call_func( - self, func: Callable, args: tuple, kwargs: dict[str, Any], future: Future + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], ) -> None: - def callback(f: Future) -> None: + def callback(f: Future[T_Retval]) -> None: if f.cancelled() and self._event_loop_thread_id not in ( None, threading.get_ident(), @@ -206,17 +206,20 @@ class BlockingPortal: self.call(scope.cancel) try: - retval = func(*args, **kwargs) - if iscoroutine(retval): + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): with CancelScope() as scope: if future.cancelled(): scope.cancel() else: future.add_done_callback(callback) - retval = await retval + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable except self._cancelled_exc_class: future.cancel() + future.set_running_or_notify_cancel() except BaseException as exc: if not future.cancelled(): future.set_exception(exc) @@ -232,11 +235,11 @@ class BlockingPortal: def _spawn_task_from_thread( self, - func: Callable, - args: tuple, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], kwargs: dict[str, Any], name: object, - future: Future, + future: Future[T_Retval], ) -> None: """ Spawn a new task using the given callable. @@ -247,22 +250,30 @@ class BlockingPortal: :param args: positional arguments to be passed to the callable :param kwargs: keyword arguments to be passed to the callable :param name: name of the task (will be coerced to a string if not ``None``) - :param future: a future that will resolve to the return value of the callable, or the - exception raised during its execution + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution """ raise NotImplementedError @overload - def call(self, func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval: + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... @overload - def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... def call( - self, func: Callable[..., Awaitable[T_Retval] | T_Retval], *args: object + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], ) -> T_Retval: """ Call the given function in the event loop thread. @@ -270,82 +281,41 @@ class BlockingPortal: If the callable returns a coroutine object, it is awaited on. :param func: any callable - :raises RuntimeError: if the portal is not running or if this method is called from within - the event loop thread + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread """ return cast(T_Retval, self.start_task_soon(func, *args).result()) @overload - def spawn_task( + def start_task_soon( self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], name: object = None, ) -> Future[T_Retval]: ... @overload - def spawn_task( - self, func: Callable[..., T_Retval], *args: object, name: object = None + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, ) -> Future[T_Retval]: ... - def spawn_task( + def start_task_soon( self, - func: Callable[..., Awaitable[T_Retval] | T_Retval], - *args: object, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], name: object = None, ) -> Future[T_Retval]: """ Start a task in the portal's task group. - :param func: the target coroutine function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a future that resolves with the return value of the callable if the task completes - successfully, or with the exception raised in the task - :raises RuntimeError: if the portal is not running or if this method is called from within - the event loop thread - - .. versionadded:: 2.1 - .. deprecated:: 3.0 - Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you - can keep using this until AnyIO 4. - - """ - warn( - "spawn_task() is deprecated -- use start_task_soon() instead", - DeprecationWarning, - ) - return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] - - @overload - def start_task_soon( - self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - ... - - @overload - def start_task_soon( - self, func: Callable[..., T_Retval], *args: object, name: object = None - ) -> Future[T_Retval]: - ... - - def start_task_soon( - self, - func: Callable[..., Awaitable[T_Retval] | T_Retval], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - """ - Start a task in the portal's task group. - - The task will be run inside a cancel scope which can be cancelled by cancelling the - returned future. + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. :param func: the target function :param args: positional arguments passed to ``func`` @@ -360,13 +330,16 @@ class BlockingPortal: """ self._check_running() - f: Future = Future() + f: Future[T_Retval] = Future() self._spawn_task_from_thread(func, args, {}, name, f) return f def start_task( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> tuple[Future[Any], Any]: + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: """ Start a task in the portal's task group and wait until it signals for readiness. @@ -378,13 +351,13 @@ class BlockingPortal: :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the value passed to ``task_status.started()`` from within the target function - :rtype: tuple[concurrent.futures.Future[Any], Any] + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] .. versionadded:: 3.0 """ - def task_done(future: Future) -> None: + def task_done(future: Future[T_Retval]) -> None: if not task_status_future.done(): if future.cancelled(): task_status_future.cancel() @@ -410,8 +383,8 @@ class BlockingPortal: """ Wrap an async context manager as a synchronous context manager via this portal. - Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the - middle until the synchronous context manager exits. + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. :param cm: an asynchronous context manager :return: a synchronous context manager @@ -422,25 +395,6 @@ class BlockingPortal: return _BlockingAsyncContextManager(cm, self) -def create_blocking_portal() -> BlockingPortal: - """ - Create a portal for running functions in the event loop thread from external threads. - - Use this function in asynchronous code when you need to allow external threads access to the - event loop where your asynchronous code is currently running. - - .. deprecated:: 3.0 - Use :class:`.BlockingPortal` directly. - - """ - warn( - "create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() " - "directly", - DeprecationWarning, - ) - return BlockingPortal() - - @contextmanager def start_blocking_portal( backend: str = "asyncio", backend_options: dict[str, Any] | None = None @@ -468,8 +422,8 @@ def start_blocking_portal( future: Future[BlockingPortal] = Future() with ThreadPoolExecutor(1) as executor: run_future = executor.submit( - _eventloop.run, - run_portal, # type: ignore[arg-type] + _eventloop.run, # type: ignore[arg-type] + run_portal, backend=backend, backend_options=backend_options, ) @@ -498,3 +452,25 @@ def start_blocking_portal( pass run_future.result() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + async_backend: AsyncBackend = threadlocals.current_async_backend + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + async_backend.check_cancelled() diff --git a/site-packages/anyio/lowlevel.py b/site-packages/anyio/lowlevel.py index 0e908c6..a9e10f4 100644 --- a/site-packages/anyio/lowlevel.py +++ b/site-packages/anyio/lowlevel.py @@ -1,17 +1,11 @@ from __future__ import annotations import enum -import sys from dataclasses import dataclass -from typing import Any, Generic, TypeVar, overload +from typing import Any, Generic, Literal, TypeVar, overload from weakref import WeakKeyDictionary -from ._core._eventloop import get_asynclib - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal +from ._core._eventloop import get_async_backend T = TypeVar("T") D = TypeVar("D") @@ -30,7 +24,7 @@ async def checkpoint() -> None: .. versionadded:: 3.0 """ - await get_asynclib().checkpoint() + await get_async_backend().checkpoint() async def checkpoint_if_cancelled() -> None: @@ -42,7 +36,7 @@ async def checkpoint_if_cancelled() -> None: .. versionadded:: 3.0 """ - await get_asynclib().checkpoint_if_cancelled() + await get_async_backend().checkpoint_if_cancelled() async def cancel_shielded_checkpoint() -> None: @@ -58,12 +52,16 @@ async def cancel_shielded_checkpoint() -> None: .. versionadded:: 3.0 """ - await get_asynclib().cancel_shielded_checkpoint() + await get_async_backend().cancel_shielded_checkpoint() def current_token() -> object: - """Return a backend specific token object that can be used to get back to the event loop.""" - return get_asynclib().current_token() + """ + Return a backend specific token object that can be used to get back to the event + loop. + + """ + return get_async_backend().current_token() _run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() @@ -101,9 +99,7 @@ class RunVar(Generic[T]): _token_wrappers: set[_TokenWrapper] = set() def __init__( - self, - name: str, - default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET, + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET ): self._name = name self._default = default @@ -111,18 +107,11 @@ class RunVar(Generic[T]): @property def _current_vars(self) -> dict[str, T]: token = current_token() - while True: - try: - return _run_vars[token] - except TypeError: - # Happens when token isn't weak referable (TrioToken). - # This workaround does mean that some memory will leak on Trio until the problem - # is fixed on their end. - token = _TokenWrapper(token) - self._token_wrappers.add(token) - except KeyError: - run_vars = _run_vars[token] = {} - return run_vars + try: + return _run_vars[token] + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars @overload def get(self, default: D) -> T | D: diff --git a/site-packages/anyio/pytest_plugin.py b/site-packages/anyio/pytest_plugin.py index 044ce69..a8dd6f3 100644 --- a/site-packages/anyio/pytest_plugin.py +++ b/site-packages/anyio/pytest_plugin.py @@ -1,16 +1,19 @@ from __future__ import annotations -from contextlib import contextmanager +from collections.abc import Iterator +from contextlib import ExitStack, contextmanager from inspect import isasyncgenfunction, iscoroutinefunction -from typing import Any, Dict, Generator, Tuple, cast +from typing import Any, Dict, Tuple, cast import pytest import sniffio -from ._core._eventloop import get_all_backends, get_asynclib +from ._core._eventloop import get_all_backends, get_async_backend from .abc import TestRunner _current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: @@ -26,27 +29,31 @@ def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: @contextmanager def get_runner( backend_name: str, backend_options: dict[str, Any] -) -> Generator[TestRunner, object, None]: - global _current_runner - if _current_runner: - yield _current_runner - return +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = sniffio.current_async_library_cvar.set(backend_name) + _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) - asynclib = get_asynclib(backend_name) - token = None - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async library - token = sniffio.current_async_library_cvar.set(backend_name) - - try: backend_options = backend_options or {} - with asynclib.TestRunner(**backend_options) as runner: - _current_runner = runner - yield runner + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner finally: - _current_runner = None - if token: - sniffio.current_async_library_cvar.reset(token) + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None def pytest_configure(config: Any) -> None: @@ -69,8 +76,8 @@ def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: else: yield runner.run_fixture(func, kwargs) - # Only apply this to coroutine functions and async generator functions in requests that involve - # the anyio_backend fixture + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture func = fixturedef.func if isasyncgenfunction(func) or iscoroutinefunction(func): if "anyio_backend" in request.fixturenames: @@ -121,7 +128,7 @@ def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: return None -@pytest.fixture(params=get_all_backends()) +@pytest.fixture(scope="module", params=get_all_backends()) def anyio_backend(request: Any) -> Any: return request.param diff --git a/site-packages/anyio/streams/buffered.py b/site-packages/anyio/streams/buffered.py index 11474c1..f5d5e83 100644 --- a/site-packages/anyio/streams/buffered.py +++ b/site-packages/anyio/streams/buffered.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable, Mapping from dataclasses import dataclass, field -from typing import Any, Callable, Mapping +from typing import Any from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead from ..abc import AnyByteReceiveStream, ByteReceiveStream @@ -10,8 +11,8 @@ from ..abc import AnyByteReceiveStream, ByteReceiveStream @dataclass(eq=False) class BufferedByteReceiveStream(ByteReceiveStream): """ - Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving - capabilities in the form of a byte stream. + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. """ receive_stream: AnyByteReceiveStream @@ -42,8 +43,8 @@ class BufferedByteReceiveStream(ByteReceiveStream): elif isinstance(self.receive_stream, ByteReceiveStream): return await self.receive_stream.receive(max_bytes) else: - # With a bytes-oriented object stream, we need to handle any surplus bytes we get from - # the receive() call + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call chunk = await self.receive_stream.receive() if len(chunk) > max_bytes: # Save the surplus bytes in the buffer diff --git a/site-packages/anyio/streams/file.py b/site-packages/anyio/streams/file.py index 2840d40..f492464 100644 --- a/site-packages/anyio/streams/file.py +++ b/site-packages/anyio/streams/file.py @@ -1,9 +1,10 @@ from __future__ import annotations +from collections.abc import Callable, Mapping from io import SEEK_SET, UnsupportedOperation from os import PathLike from pathlib import Path -from typing import Any, BinaryIO, Callable, Mapping, cast +from typing import Any, BinaryIO, cast from .. import ( BrokenResourceError, @@ -130,8 +131,8 @@ class FileWriteStream(_BaseFileStream, ByteSendStream): Create a file write stream by opening the given file for writing. :param path: path of the file to write to - :param append: if ``True``, open the file for appending; if ``False``, any existing file - at the given path will be truncated + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated """ mode = "ab" if append else "wb" diff --git a/site-packages/anyio/streams/memory.py b/site-packages/anyio/streams/memory.py index a6499c1..bc2425b 100644 --- a/site-packages/anyio/streams/memory.py +++ b/site-packages/anyio/streams/memory.py @@ -10,9 +10,7 @@ from .. import ( ClosedResourceError, EndOfStream, WouldBlock, - get_cancelled_exc_class, ) -from .._core._compat import DeprecatedAwaitable from ..abc import Event, ObjectReceiveStream, ObjectSendStream from ..lowlevel import checkpoint @@ -27,7 +25,8 @@ class MemoryObjectStreamStatistics(NamedTuple): max_buffer_size: float open_send_streams: int #: number of unclosed clones of the send stream open_receive_streams: int #: number of unclosed clones of the receive stream - tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` tasks_waiting_receive: int @@ -104,11 +103,6 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): try: await receive_event.wait() - except get_cancelled_exc_class(): - # Ignore the immediate cancellation if we already received an item, so as not to - # lose it - if not container: - raise finally: self._state.waiting_receivers.pop(receive_event, None) @@ -121,8 +115,8 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): """ Create a clone of this receive stream. - Each clone can be closed separately. Only when all clones have been closed will the - receiving end of the memory stream be considered closed by the sending ends. + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. :return: the cloned stream @@ -136,8 +130,8 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): """ Close the stream. - This works the exact same way as :meth:`aclose`, but is provided as a special case for the - benefit of synchronous callbacks. + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. """ if not self._closed: @@ -179,7 +173,7 @@ class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): def __post_init__(self) -> None: self._state.open_send_channels += 1 - def send_nowait(self, item: T_contra) -> DeprecatedAwaitable: + def send_nowait(self, item: T_contra) -> None: """ Send an item immediately if it can be done without waiting. @@ -205,9 +199,19 @@ class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): else: raise WouldBlock - return DeprecatedAwaitable(self.send_nowait) - async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ await checkpoint() try: self.send_nowait(item) @@ -218,18 +222,18 @@ class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): try: await send_event.wait() except BaseException: - self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + self._state.waiting_senders.pop(send_event, None) raise - if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] - raise BrokenResourceError + if self._state.waiting_senders.pop(send_event, None): + raise BrokenResourceError from None def clone(self) -> MemoryObjectSendStream[T_contra]: """ Create a clone of this send stream. - Each clone can be closed separately. Only when all clones have been closed will the - sending end of the memory stream be considered closed by the receiving ends. + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. :return: the cloned stream @@ -243,8 +247,8 @@ class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): """ Close the stream. - This works the exact same way as :meth:`aclose`, but is provided as a special case for the - benefit of synchronous callbacks. + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. """ if not self._closed: diff --git a/site-packages/anyio/streams/stapled.py b/site-packages/anyio/streams/stapled.py index 1b2862e..80f64a2 100644 --- a/site-packages/anyio/streams/stapled.py +++ b/site-packages/anyio/streams/stapled.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable, Mapping, Sequence from dataclasses import dataclass -from typing import Any, Callable, Generic, Mapping, Sequence, TypeVar +from typing import Any, Generic, TypeVar from ..abc import ( ByteReceiveStream, @@ -23,8 +24,8 @@ class StapledByteStream(ByteStream): """ Combines two byte streams into a single, bidirectional byte stream. - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. :param ByteSendStream send_stream: the sending byte stream :param ByteReceiveStream receive_stream: the receiving byte stream @@ -59,8 +60,8 @@ class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): """ Combines two object streams into a single, bidirectional object stream. - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. :param ObjectSendStream send_stream: the sending object stream :param ObjectReceiveStream receive_stream: the receiving object stream @@ -95,11 +96,11 @@ class MultiListener(Generic[T_Stream], Listener[T_Stream]): """ Combines multiple listeners into one, serving connections from all of them at once. - Any MultiListeners in the given collection of listeners will have their listeners moved into - this one. + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. - Extra attributes are provided from each listener, with each successive listener overriding any - conflicting attributes from the previous one. + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. :param listeners: listeners to serve :type listeners: Sequence[Listener[T_Stream]] diff --git a/site-packages/anyio/streams/text.py b/site-packages/anyio/streams/text.py index bba2d3f..f1a1127 100644 --- a/site-packages/anyio/streams/text.py +++ b/site-packages/anyio/streams/text.py @@ -1,8 +1,9 @@ from __future__ import annotations import codecs +from collections.abc import Callable, Mapping from dataclasses import InitVar, dataclass, field -from typing import Any, Callable, Mapping +from typing import Any from ..abc import ( AnyByteReceiveStream, @@ -19,16 +20,17 @@ class TextReceiveStream(ObjectReceiveStream[str]): """ Stream wrapper that decodes bytes to strings using the given encoding. - Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely - received unicode characters as soon as they come in. + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. :param transport_stream: any bytes-based receive stream - :param encoding: character encoding to use for decoding bytes to strings (defaults to - ``utf-8``) + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) :param errors: handling scheme for decoding errors (defaults to ``strict``; see the `codecs module documentation`_ for a comprehensive list of options) - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects """ transport_stream: AnyByteReceiveStream @@ -62,12 +64,13 @@ class TextSendStream(ObjectSendStream[str]): Sends strings to the wrapped stream as bytes using the given encoding. :param AnyByteSendStream transport_stream: any bytes-based send stream - :param str encoding: character encoding to use for encoding strings to bytes (defaults to - ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects """ transport_stream: AnyByteSendStream @@ -93,19 +96,20 @@ class TextSendStream(ObjectSendStream[str]): @dataclass(eq=False) class TextStream(ObjectStream[str]): """ - A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on - send. + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. :param AnyByteStream transport_stream: any bytes-based stream - :param str encoding: character encoding to use for encoding/decoding strings to/from bytes - (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects """ transport_stream: AnyByteStream diff --git a/site-packages/anyio/streams/tls.py b/site-packages/anyio/streams/tls.py index 9f9e9fd..e913eed 100644 --- a/site-packages/anyio/streams/tls.py +++ b/site-packages/anyio/streams/tls.py @@ -3,9 +3,11 @@ from __future__ import annotations import logging import re import ssl +import sys +from collections.abc import Callable, Mapping from dataclasses import dataclass from functools import wraps -from typing import Any, Callable, Mapping, Tuple, TypeVar +from typing import Any, Tuple, TypeVar from .. import ( BrokenResourceError, @@ -16,7 +18,13 @@ from .. import ( from .._core._typedattr import TypedAttributeSet, typed_attribute from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") _PCTRTT = Tuple[Tuple[str, str], ...] _PCTRTTT = Tuple[_PCTRTT, ...] @@ -31,8 +39,8 @@ class TLSAttribute(TypedAttributeSet): #: the selected cipher cipher: tuple[str, str, int] = typed_attribute() #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` - #: for more information) - peer_certificate: dict[str, str | _PCTRTTT | _PCTRTT] | None = typed_attribute() + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() #: the peer certificate in binary form peer_certificate_binary: bytes | None = typed_attribute() #: ``True`` if this is the server side of the connection @@ -90,8 +98,9 @@ class TLSStream(ByteStream): :param hostname: host name of the peer (if host name checking is desired) :param ssl_context: the SSLContext object to use (if not provided, a secure default will be created) - :param standard_compatible: if ``False``, skip the closing handshake when closing the - connection, and don't raise an exception if the peer does the same + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same :raises ~ssl.SSLError: if the TLS handshake fails """ @@ -124,7 +133,7 @@ class TLSStream(ByteStream): return wrapper async def _call_sslobject_method( - self, func: Callable[..., T_Retval], *args: object + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] ) -> T_Retval: while True: try: @@ -222,7 +231,9 @@ class TLSStream(ByteStream): return { **self.transport_stream.extra_attributes, TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, - TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), TLSAttribute.cipher: self._ssl_object.cipher, TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( @@ -241,11 +252,12 @@ class TLSStream(ByteStream): @dataclass(eq=False) class TLSListener(Listener[TLSStream]): """ - A convenience listener that wraps another listener and auto-negotiates a TLS session on every - accepted connection. + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. - If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is - called to do whatever post-mortem processing is deemed necessary. + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. @@ -281,7 +293,13 @@ class TLSListener(Listener[TLSStream]): # Log all except cancellation exceptions if not isinstance(exc, get_cancelled_exc_class()): - logging.getLogger(__name__).exception("Error during TLS handshake") + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) # Only reraise base exceptions and cancellation exceptions if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): diff --git a/site-packages/anyio/to_process.py b/site-packages/anyio/to_process.py index 7ba9d44..1ff06f0 100644 --- a/site-packages/anyio/to_process.py +++ b/site-packages/anyio/to_process.py @@ -5,10 +5,11 @@ import pickle import subprocess import sys from collections import deque +from collections.abc import Callable from importlib.util import module_from_spec, spec_from_file_location -from typing import Callable, TypeVar, cast +from typing import TypeVar, cast -from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class from ._core._exceptions import BrokenWorkerProcess from ._core._subprocesses import open_process from ._core._synchronization import CapacityLimiter @@ -17,9 +18,16 @@ from .abc import ByteReceiveStream, ByteSendStream, Process from .lowlevel import RunVar, checkpoint_if_cancelled from .streams.buffered import BufferedByteReceiveStream +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + WORKER_MAX_IDLE_TIME = 300 # 5 minutes T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + _process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") _process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( "_process_pool_idle_workers" @@ -28,23 +36,24 @@ _default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_lim async def run_sync( - func: Callable[..., T_Retval], - *args: object, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], cancellable: bool = False, limiter: CapacityLimiter | None = None, ) -> T_Retval: """ Call the given function with the given arguments in a worker process. - If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, - the worker process running it will be abruptly terminated using SIGKILL (or - ``terminateProcess()`` on Windows). + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). :param func: a callable :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation while it's running - :param limiter: capacity limiter to use to limit the total amount of processes running - (if omitted, the default limiter is used) + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) :return: an awaitable that yields the return value of the function. """ @@ -94,11 +103,11 @@ async def run_sync( idle_workers = deque() _process_pool_workers.set(workers) _process_pool_idle_workers.set(idle_workers) - get_asynclib().setup_process_pool_exit_at_shutdown(workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) - async with (limiter or current_default_process_limiter()): - # Pop processes from the pool (starting from the most recently used) until we find one that - # hasn't exited yet + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet process: Process while idle_workers: process, idle_since = idle_workers.pop() @@ -108,22 +117,22 @@ async def run_sync( cast(ByteReceiveStream, process.stdout) ) - # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or - # longer + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer now = current_time() killed_processes: list[Process] = [] while idle_workers: if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: break - process, idle_since = idle_workers.popleft() - process.kill() - workers.remove(process) - killed_processes.append(process) + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) with CancelScope(shield=True): - for process in killed_processes: - await process.aclose() + for killed_process in killed_processes: + await killed_process.aclose() break @@ -172,7 +181,8 @@ async def run_sync( def current_default_process_limiter() -> CapacityLimiter: """ - Return the capacity limiter that is used by default to limit the number of worker processes. + Return the capacity limiter that is used by default to limit the number of worker + processes. :return: a capacity limiter object @@ -214,8 +224,8 @@ def process_worker() -> None: sys.path, main_module_path = args del sys.modules["__main__"] if main_module_path: - # Load the parent's main module but as __mp_main__ instead of __main__ - # (like multiprocessing does) to avoid infinite recursion + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion try: spec = spec_from_file_location("__mp_main__", main_module_path) if spec and spec.loader: diff --git a/site-packages/anyio/to_thread.py b/site-packages/anyio/to_thread.py index 9315d1e..5070516 100644 --- a/site-packages/anyio/to_thread.py +++ b/site-packages/anyio/to_thread.py @@ -1,67 +1,69 @@ from __future__ import annotations -from typing import Callable, TypeVar +import sys +from collections.abc import Callable +from typing import TypeVar from warnings import warn -from ._core._eventloop import get_asynclib +from ._core._eventloop import get_async_backend from .abc import CapacityLimiter +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") async def run_sync( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, limiter: CapacityLimiter | None = None, ) -> T_Retval: """ Call the given function with the given arguments in a worker thread. - If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, - the thread will still run its course but its return value (or any raised exception) will be - ignored. + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. :param func: a callable :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed :param limiter: capacity limiter to use to limit the total amount of threads running (if omitted, the default limiter is used) :return: an awaitable that yields the return value of the function. """ - return await get_asynclib().run_sync_in_worker_thread( - func, *args, cancellable=cancellable, limiter=limiter - ) + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) - -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - warn( - "run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead", - DeprecationWarning, + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter ) - return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) def current_default_thread_limiter() -> CapacityLimiter: """ - Return the capacity limiter that is used by default to limit the number of concurrent threads. + Return the capacity limiter that is used by default to limit the number of + concurrent threads. :return: a capacity limiter object """ - return get_asynclib().current_default_thread_limiter() - - -def current_default_worker_thread_limiter() -> CapacityLimiter: - warn( - "current_default_worker_thread_limiter() has been deprecated, " - "use anyio.to_thread.current_default_thread_limiter() instead", - DeprecationWarning, - ) - return current_default_thread_limiter() + return get_async_backend().current_default_thread_limiter() diff --git a/site-packages/certifi-2024.2.2.dist-info/INSTALLER b/site-packages/certifi-2024.2.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/site-packages/certifi-2024.2.2.dist-info/LICENSE b/site-packages/certifi-2024.2.2.dist-info/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/site-packages/certifi-2024.2.2.dist-info/METADATA b/site-packages/certifi-2024.2.2.dist-info/METADATA new file mode 100644 index 0000000..c688a62 --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.2.2 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/site-packages/certifi-2024.2.2.dist-info/RECORD b/site-packages/certifi-2024.2.2.dist-info/RECORD new file mode 100644 index 0000000..6b6ef29 --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2024.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170 +certifi-2024.2.2.dist-info/RECORD,, +certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-38.pyc,, +certifi/__pycache__/__main__.cpython-38.pyc,, +certifi/__pycache__/core.cpython-38.pyc,, +certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/site-packages/certifi-2024.2.2.dist-info/WHEEL b/site-packages/certifi-2024.2.2.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/site-packages/certifi-2024.2.2.dist-info/top_level.txt b/site-packages/certifi-2024.2.2.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/site-packages/certifi-2024.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/site-packages/certifi/__init__.py b/site-packages/certifi/__init__.py index 8ce89ce..1c91f3e 100644 --- a/site-packages/certifi/__init__.py +++ b/site-packages/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2023.07.22" +__version__ = "2024.02.02" diff --git a/site-packages/certifi/cacert.pem b/site-packages/certifi/cacert.pem index 0212369..fac3c31 100644 --- a/site-packages/certifi/cacert.pem +++ b/site-packages/certifi/cacert.pem @@ -245,34 +245,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK 4SVhM7JZG+Ju1zdXtg2pEto= -----END CERTIFICATE----- -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Label: "XRamp Global CA Root" @@ -881,49 +853,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH WD9f -----END CERTIFICATE----- -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - # Issuer: CN=Izenpe.com O=IZENPE S.A. # Subject: CN=Izenpe.com O=IZENPE S.A. # Label: "Izenpe.com" @@ -4633,3 +4562,253 @@ o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== -----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- diff --git a/site-packages/certifi/core.py b/site-packages/certifi/core.py index de02898..91f538b 100644 --- a/site-packages/certifi/core.py +++ b/site-packages/certifi/core.py @@ -5,6 +5,10 @@ certifi.py This module returns the installation location of cacert.pem or its contents. """ import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] if sys.version_info >= (3, 11): @@ -35,6 +39,7 @@ if sys.version_info >= (3, 11): # we will also store that at the global level as well. _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH @@ -70,6 +75,7 @@ elif sys.version_info >= (3, 7): # we will also store that at the global level as well. _CACERT_CTX = get_path("certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH diff --git a/site-packages/charset_normalizer-3.3.2.dist-info/RECORD b/site-packages/charset_normalizer-3.3.2.dist-info/RECORD index 1bd8c3d..7c3b88d 100644 --- a/site-packages/charset_normalizer-3.3.2.dist-info/RECORD +++ b/site-packages/charset_normalizer-3.3.2.dist-info/RECORD @@ -1,4 +1,4 @@ -../../../bin/normalizer,sha256=kihdyWtCGlWGyMKjHiPs_eLP1DXyuK3r8cvuk2gln6Y,264 +../../../bin/normalizer,sha256=xOO86q_rGy8xdHcTD2YWGVEF0H9si8C7n91mGxtCWX0,274 charset_normalizer-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550 @@ -9,22 +9,22 @@ charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcU charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 -charset_normalizer/__pycache__/__init__.cpython-39.pyc,, -charset_normalizer/__pycache__/__main__.cpython-39.pyc,, -charset_normalizer/__pycache__/api.cpython-39.pyc,, -charset_normalizer/__pycache__/cd.cpython-39.pyc,, -charset_normalizer/__pycache__/constant.cpython-39.pyc,, -charset_normalizer/__pycache__/legacy.cpython-39.pyc,, -charset_normalizer/__pycache__/md.cpython-39.pyc,, -charset_normalizer/__pycache__/models.cpython-39.pyc,, -charset_normalizer/__pycache__/utils.cpython-39.pyc,, -charset_normalizer/__pycache__/version.cpython-39.pyc,, +charset_normalizer/__pycache__/__init__.cpython-38.pyc,, +charset_normalizer/__pycache__/__main__.cpython-38.pyc,, +charset_normalizer/__pycache__/api.cpython-38.pyc,, +charset_normalizer/__pycache__/cd.cpython-38.pyc,, +charset_normalizer/__pycache__/constant.cpython-38.pyc,, +charset_normalizer/__pycache__/legacy.cpython-38.pyc,, +charset_normalizer/__pycache__/md.cpython-38.pyc,, +charset_normalizer/__pycache__/models.cpython-38.pyc,, +charset_normalizer/__pycache__/utils.cpython-38.pyc,, +charset_normalizer/__pycache__/version.cpython-38.pyc,, charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097 charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744 -charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-39.pyc,, +charset_normalizer/cli/__pycache__/__init__.cpython-38.pyc,, +charset_normalizer/cli/__pycache__/__main__.cpython-38.pyc,, charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481 charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071 charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624 diff --git a/site-packages/click-8.1.7.dist-info/RECORD b/site-packages/click-8.1.7.dist-info/RECORD index 05f2645..8daf861 100644 --- a/site-packages/click-8.1.7.dist-info/RECORD +++ b/site-packages/click-8.1.7.dist-info/RECORD @@ -5,22 +5,22 @@ click-8.1.7.dist-info/RECORD,, click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 -click/__pycache__/__init__.cpython-39.pyc,, -click/__pycache__/_compat.cpython-39.pyc,, -click/__pycache__/_termui_impl.cpython-39.pyc,, -click/__pycache__/_textwrap.cpython-39.pyc,, -click/__pycache__/_winconsole.cpython-39.pyc,, -click/__pycache__/core.cpython-39.pyc,, -click/__pycache__/decorators.cpython-39.pyc,, -click/__pycache__/exceptions.cpython-39.pyc,, -click/__pycache__/formatting.cpython-39.pyc,, -click/__pycache__/globals.cpython-39.pyc,, -click/__pycache__/parser.cpython-39.pyc,, -click/__pycache__/shell_completion.cpython-39.pyc,, -click/__pycache__/termui.cpython-39.pyc,, -click/__pycache__/testing.cpython-39.pyc,, -click/__pycache__/types.cpython-39.pyc,, -click/__pycache__/utils.cpython-39.pyc,, +click/__pycache__/__init__.cpython-38.pyc,, +click/__pycache__/_compat.cpython-38.pyc,, +click/__pycache__/_termui_impl.cpython-38.pyc,, +click/__pycache__/_textwrap.cpython-38.pyc,, +click/__pycache__/_winconsole.cpython-38.pyc,, +click/__pycache__/core.cpython-38.pyc,, +click/__pycache__/decorators.cpython-38.pyc,, +click/__pycache__/exceptions.cpython-38.pyc,, +click/__pycache__/formatting.cpython-38.pyc,, +click/__pycache__/globals.cpython-38.pyc,, +click/__pycache__/parser.cpython-38.pyc,, +click/__pycache__/shell_completion.cpython-38.pyc,, +click/__pycache__/termui.cpython-38.pyc,, +click/__pycache__/testing.cpython-38.pyc,, +click/__pycache__/types.cpython-38.pyc,, +click/__pycache__/utils.cpython-38.pyc,, click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 diff --git a/site-packages/colorama-0.4.6.dist-info/RECORD b/site-packages/colorama-0.4.6.dist-info/RECORD index b0868a8..bfa4e61 100644 --- a/site-packages/colorama-0.4.6.dist-info/RECORD +++ b/site-packages/colorama-0.4.6.dist-info/RECORD @@ -4,23 +4,23 @@ colorama-0.4.6.dist-info/RECORD,, colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105 colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491 colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266 -colorama/__pycache__/__init__.cpython-39.pyc,, -colorama/__pycache__/ansi.cpython-39.pyc,, -colorama/__pycache__/ansitowin32.cpython-39.pyc,, -colorama/__pycache__/initialise.cpython-39.pyc,, -colorama/__pycache__/win32.cpython-39.pyc,, -colorama/__pycache__/winterm.cpython-39.pyc,, +colorama/__pycache__/__init__.cpython-38.pyc,, +colorama/__pycache__/ansi.cpython-38.pyc,, +colorama/__pycache__/ansitowin32.cpython-38.pyc,, +colorama/__pycache__/initialise.cpython-38.pyc,, +colorama/__pycache__/win32.cpython-38.pyc,, +colorama/__pycache__/winterm.cpython-38.pyc,, colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128 colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325 colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75 -colorama/tests/__pycache__/__init__.cpython-39.pyc,, -colorama/tests/__pycache__/ansi_test.cpython-39.pyc,, -colorama/tests/__pycache__/ansitowin32_test.cpython-39.pyc,, -colorama/tests/__pycache__/initialise_test.cpython-39.pyc,, -colorama/tests/__pycache__/isatty_test.cpython-39.pyc,, -colorama/tests/__pycache__/utils.cpython-39.pyc,, -colorama/tests/__pycache__/winterm_test.cpython-39.pyc,, +colorama/tests/__pycache__/__init__.cpython-38.pyc,, +colorama/tests/__pycache__/ansi_test.cpython-38.pyc,, +colorama/tests/__pycache__/ansitowin32_test.cpython-38.pyc,, +colorama/tests/__pycache__/initialise_test.cpython-38.pyc,, +colorama/tests/__pycache__/isatty_test.cpython-38.pyc,, +colorama/tests/__pycache__/utils.cpython-38.pyc,, +colorama/tests/__pycache__/winterm_test.cpython-38.pyc,, colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839 colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678 colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741 diff --git a/site-packages/devchat-0.2.10.dist-info/METADATA b/site-packages/devchat-0.2.10.dist-info/METADATA index 71845be..b67a31d 100644 --- a/site-packages/devchat-0.2.10.dist-info/METADATA +++ b/site-packages/devchat-0.2.10.dist-info/METADATA @@ -23,11 +23,14 @@ Classifier: Topic :: Software Development Requires-Dist: colorama (>=0.4.6,<0.5.0) Requires-Dist: gitpython (>=3.1.32,<4.0.0) Requires-Dist: importlib-metadata (>=6.8.0,<7.0.0) +Requires-Dist: importlib-resources (>=6.1.1,<7.0.0) Requires-Dist: networkx (>=3.1,<4.0) Requires-Dist: openai (>=1.0rc,<2.0) Requires-Dist: oyaml (>=1.0,<2.0) -Requires-Dist: pydantic (==1.10.13) +Requires-Dist: pathspec (>=0.12.1,<0.13.0) +Requires-Dist: pydantic (==1.10.14) Requires-Dist: rich_click (>=1.6.1,<2.0.0) +Requires-Dist: tenacity (>=8.2.3,<9.0.0) Requires-Dist: tiktoken (>=0.4.0,<0.5.0) Requires-Dist: tinydb (>=4.7.1,<5.0.0) Requires-Dist: urllib3 (<2.0) diff --git a/site-packages/devchat-0.2.10.dist-info/RECORD b/site-packages/devchat-0.2.10.dist-info/RECORD index 03bf30d..7354c46 100644 --- a/site-packages/devchat-0.2.10.dist-info/RECORD +++ b/site-packages/devchat-0.2.10.dist-info/RECORD @@ -1,61 +1,119 @@ -../../../bin/devchat,sha256=suM8Tlq7_32mdUbhBivdAeg9W---BD14D3E7bhy9LOc,247 +../../../bin/devchat,sha256=AXnwhCytrHAEByp2lBam6Im6Ua3Vm8JORd7Rq9ivjlE,257 devchat-0.2.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 devchat-0.2.10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 -devchat-0.2.10.dist-info/METADATA,sha256=NxGPR5qcRawRtREwinMGazaQIaefdeOOui9A60ynYo0,9775 +devchat-0.2.10.dist-info/METADATA,sha256=KIe85kKKkj5gtthBkRBo8J7SZ1XKgWO2HXCbR47bGD8,9911 devchat-0.2.10.dist-info/RECORD,, devchat-0.2.10.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -devchat-0.2.10.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88 +devchat-0.2.10.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88 devchat-0.2.10.dist-info/direct_url.json,sha256=zd3JCXQPIQyScA8lV44s-V6X6GM2woo8TWVYFvT3e9Y,59 devchat-0.2.10.dist-info/entry_points.txt,sha256=Glu9CHUNBjYbZXTQc3YW2rZFr2S_3AvqKu50gj3aWT4,50 devchat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 devchat/__main__.py,sha256=RY7_u5N5S0Ye2YtBWeYkk9n8zia_z9oe3DnS1SHRxZA,110 -devchat/__pycache__/__init__.cpython-39.pyc,, -devchat/__pycache__/__main__.cpython-39.pyc,, -devchat/__pycache__/assistant.cpython-39.pyc,, -devchat/__pycache__/chat.cpython-39.pyc,, -devchat/__pycache__/config.cpython-39.pyc,, -devchat/__pycache__/message.cpython-39.pyc,, -devchat/__pycache__/prompt.cpython-39.pyc,, -devchat/__pycache__/store.cpython-39.pyc,, -devchat/__pycache__/utils.cpython-39.pyc,, -devchat/_cli/__init__.py,sha256=KwW3v4rWg4ym3gqp3Dqm6tDjrixD92PhlRiW7807DUo,157 -devchat/_cli/__pycache__/__init__.cpython-39.pyc,, -devchat/_cli/__pycache__/log.cpython-39.pyc,, -devchat/_cli/__pycache__/main.cpython-39.pyc,, -devchat/_cli/__pycache__/prompt.cpython-39.pyc,, -devchat/_cli/__pycache__/run.cpython-39.pyc,, -devchat/_cli/__pycache__/topic.cpython-39.pyc,, -devchat/_cli/__pycache__/utils.cpython-39.pyc,, +devchat/__pycache__/__init__.cpython-38.pyc,, +devchat/__pycache__/__main__.cpython-38.pyc,, +devchat/__pycache__/assistant.cpython-38.pyc,, +devchat/__pycache__/chat.cpython-38.pyc,, +devchat/__pycache__/config.cpython-38.pyc,, +devchat/__pycache__/message.cpython-38.pyc,, +devchat/__pycache__/prompt.cpython-38.pyc,, +devchat/__pycache__/store.cpython-38.pyc,, +devchat/__pycache__/utils.cpython-38.pyc,, +devchat/_cli/__init__.py,sha256=WErLgVRreiNRgCXePjktbvojVOt15GVnZb0AXdFYCL4,192 +devchat/_cli/__pycache__/__init__.cpython-38.pyc,, +devchat/_cli/__pycache__/errors.cpython-38.pyc,, +devchat/_cli/__pycache__/log.cpython-38.pyc,, +devchat/_cli/__pycache__/main.cpython-38.pyc,, +devchat/_cli/__pycache__/prompt.cpython-38.pyc,, +devchat/_cli/__pycache__/route.cpython-38.pyc,, +devchat/_cli/__pycache__/router.cpython-38.pyc,, +devchat/_cli/__pycache__/run.cpython-38.pyc,, +devchat/_cli/__pycache__/topic.cpython-38.pyc,, +devchat/_cli/__pycache__/utils.cpython-38.pyc,, +devchat/_cli/errors.py,sha256=akl1b5EzZhAlhQyfcFNOSTaLmJ3zG5PSTVrRGPaJ1bg,58 devchat/_cli/log.py,sha256=GAYVrNPprDQEB6G2Z1J97jjDU-EbYlJBHnz-Lz6nzvo,3106 -devchat/_cli/main.py,sha256=_uJ6KOiV19kATA0CYUtJZ1gGX9IGcX_8pjQyZ9J8wSU,659 -devchat/_cli/prompt.py,sha256=uvug9x7zclL0P1xbT_xjFsSCx2PjjLtmGUf0O8Sx1Ek,3923 -devchat/_cli/run.py,sha256=nCCqNV7IuTxp7XrroHShR772qKWDXGmx8vM388QjPW8,4849 +devchat/_cli/main.py,sha256=T4CF8CUwMRD-oFyJxbi5wuIROqeqGDkx-ALEjo1U7ZU,714 +devchat/_cli/prompt.py,sha256=iWLGIkkS8Ti7sfQdyJGTB7ZapwapuxqF_pJWiIVibWU,2831 +devchat/_cli/route.py,sha256=kL6GuuKehDogN0-iLtpHlMeDr77rP_y40b4XT4_I364,2414 +devchat/_cli/router.py,sha256=4fcVpUDroNbxQc8FeS8mjbeyv_O8ssWxES4R8glAn50,5322 +devchat/_cli/run.py,sha256=DPndvap9biexprnkL_dEqleIzE4Vf7mW23lU6bgZPsk,8063 devchat/_cli/topic.py,sha256=CLE8y2Vox_5igtoSfsnFgaCa7YtJE-rcDtoNhnnedyQ,1455 -devchat/_cli/utils.py,sha256=u43D4lqihdil1BEenaryzP-NUp5CQo4jTmtq640gTLY,5975 +devchat/_cli/utils.py,sha256=bwYKSc3D3ImhCCiozLG4FI9o7LJRPEMs3l91EQSGTzM,6377 devchat/anthropic/__init__.py,sha256=xaFR1uXxn0sVHBhCJdJKuWKVVgPnSLw3mlaCFFivD_8,97 -devchat/anthropic/__pycache__/__init__.cpython-39.pyc,, -devchat/anthropic/__pycache__/anthropic_chat.cpython-39.pyc,, +devchat/anthropic/__pycache__/__init__.cpython-38.pyc,, +devchat/anthropic/__pycache__/anthropic_chat.cpython-38.pyc,, devchat/anthropic/anthropic_chat.py,sha256=OujoXOQywPQf4gjLhdZBYTwKoRDs8hujktss3hN-BNk,423 -devchat/assistant.py,sha256=qOU8u0nrRbruTmH0FS7Ax2H8aOws5uLOnVC8v-WzFoU,6033 +devchat/assistant.py,sha256=6Px9aEqHzNOhIde63mM7xeg1skrY8Gc7wYb2ww-D9WY,6141 devchat/chat.py,sha256=TEO8OndmL4hpJ1D-QAFKO-JB_7w1kTeUC3VVwL9FSUQ,1676 +devchat/chatmark/.gitignore,sha256=8wf0Azg8LJGtO3zamZ8sHM-ARFcedTCPK1disjofnhY,4 +devchat/chatmark/README.md,sha256=akXLntx1ebzWaIqwt0hQ_8eVd79t-CQZ5hOynM20JLk,157 +devchat/chatmark/__init__.py,sha256=l1xRneWsKKAWs0R4VoynYytFahCRgyvR-tbrhKK3iiE,203 +devchat/chatmark/__pycache__/__init__.cpython-38.pyc,, +devchat/chatmark/__pycache__/form.cpython-38.pyc,, +devchat/chatmark/__pycache__/iobase.cpython-38.pyc,, +devchat/chatmark/__pycache__/step.cpython-38.pyc,, +devchat/chatmark/__pycache__/widgets.cpython-38.pyc,, +devchat/chatmark/chatmark_example/README.md,sha256=vtSGvEL1IOQPu56qP5s6ZazW-41iNkS_Ph0GBjjWATA,413 +devchat/chatmark/chatmark_example/__pycache__/main.cpython-38.pyc,, +devchat/chatmark/chatmark_example/main.py,sha256=JfAC7opkVIUdzrOfyVwb04FlX7whVFNyeWrf-_ZWC0A,3600 +devchat/chatmark/form.py,sha256=bbPQhhyMDbrrs2bX8UmVKAZ6n4kcYJEppDD3700ksbM,2586 +devchat/chatmark/iobase.py,sha256=CjTHjDAxHkLHiNsrp4aaTjdT6mQB5Dy4B1UsJWVcKS8,913 +devchat/chatmark/step.py,sha256=jATqxc1ZoeKlkEoO-0DMoyVzLYGNA58S8hL5NMn6W7A,574 +devchat/chatmark/widgets.py,sha256=5FEghZ-BZPHyjPUIKZ3L6BFhNHawa2JdPX6OzBs7Yfs,10159 devchat/config.py,sha256=3lvhi-YRbCOM2Ye28GJF14n10mEYczD3sllhz_ZwZS8,6348 -devchat/engine/__init__.py,sha256=sXaM_4kQtG-VV7NxMDj7a7v4rbNg7dJHEMF8BOz9NtI,262 -devchat/engine/__pycache__/__init__.cpython-39.pyc,, -devchat/engine/__pycache__/command_parser.cpython-39.pyc,, -devchat/engine/__pycache__/namespace.cpython-39.pyc,, -devchat/engine/__pycache__/recursive_prompter.cpython-39.pyc,, -devchat/engine/command_parser.py,sha256=3SI_s8vNn1zyr5HugOLmtaoYErzRqmmjyWFIgPLcFBs,1706 +devchat/engine/__init__.py,sha256=KSlnUY42kg9EyTaW2lrhdANWl1ORkg-5vYoLZfv9e8Q,373 +devchat/engine/__pycache__/__init__.cpython-38.pyc,, +devchat/engine/__pycache__/command_parser.cpython-38.pyc,, +devchat/engine/__pycache__/command_runner.cpython-38.pyc,, +devchat/engine/__pycache__/namespace.cpython-38.pyc,, +devchat/engine/__pycache__/recursive_prompter.cpython-38.pyc,, +devchat/engine/__pycache__/router.cpython-38.pyc,, +devchat/engine/__pycache__/util.cpython-38.pyc,, +devchat/engine/command_parser.py,sha256=GOVUFio-ArCXUwDu-jJDh3_PuUzDJwIkMUTiE3be0zI,2054 +devchat/engine/command_runner.py,sha256=F1TNPR76JOrB9IVRU303iBVxZtZpMgoFbG-GuLrm9fY,9705 devchat/engine/namespace.py,sha256=MghROybwfVYhfKz8efeG38awQR4eXUThwuVL9J07MGc,5175 devchat/engine/recursive_prompter.py,sha256=MkMWIjluJMK6uk5_KS7K9Aygoo1DlGUsxrYt_w8yMwU,642 +devchat/engine/router.py,sha256=y_9nPaurK43lTVmvzUUA8aQMG-0BP6_Mq71Xz3jhdB4,1735 +devchat/engine/util.py,sha256=oxpYiYNbtOgjqwbRdBtJaKSfjap9AmlT49iHivOW01A,5643 +devchat/ide/__init__.py,sha256=VkUWCJWneJbDxwmWTH9E6f9QDrHmOdjJLIErT8v6JCA,65 +devchat/ide/__pycache__/__init__.cpython-38.pyc,, +devchat/ide/__pycache__/idea_services.cpython-38.pyc,, +devchat/ide/__pycache__/rpc.cpython-38.pyc,, +devchat/ide/__pycache__/service.cpython-38.pyc,, +devchat/ide/__pycache__/types.cpython-38.pyc,, +devchat/ide/__pycache__/vscode_services.cpython-38.pyc,, +devchat/ide/idea_services.py,sha256=OB3xQVf4kCS_9Gn9-GsqLqFfS4l-QNPmmp6kgd3iuVY,394 +devchat/ide/rpc.py,sha256=FH-55ReSHWidN0Dcfg2MqbDA_qarMz8YdHtAa4kjQgI,2440 +devchat/ide/service.py,sha256=NoixX7r4-hQu91HD8GhcjLh2TKKUXuNReuk8mCdS_mU,2060 +devchat/ide/types.py,sha256=01qSNKRO_hyRzPfAHrhm_Jg4rPCxQK7SpwN4PC0PXGE,461 +devchat/ide/vscode_services.py,sha256=fpANjDELLyrgbGMmpKfO4gsjoN4t3uyfSOgere0QdbU,5263 +devchat/llm/__init__.py,sha256=IXhLbfNO-TV2ZIJwZOhjsL2Batb8WGq-gayrxp-z8a0,409 +devchat/llm/__pycache__/__init__.cpython-38.pyc,, +devchat/llm/__pycache__/chat.cpython-38.pyc,, +devchat/llm/__pycache__/openai.cpython-38.pyc,, +devchat/llm/__pycache__/pipeline.cpython-38.pyc,, +devchat/llm/__pycache__/text_confirm.cpython-38.pyc,, +devchat/llm/__pycache__/tools_call.cpython-38.pyc,, +devchat/llm/chat.py,sha256=KAC1rzhP-goMjccCFGehm8vT1-4ZengFrzv9gkLYbd4,3409 +devchat/llm/openai.py,sha256=wViFBzaeSkh6sPmR0_rJSfHVYN0ecxAJ-nwKRy2--kk,5514 +devchat/llm/pipeline.py,sha256=hVtwEfKVZ1S90Qb9SLe3UBRJZbtLPptgCEk8JHaEpGI,2002 +devchat/llm/text_confirm.py,sha256=sdt7AUFDcsOZ0fLfS0vtjdS2_8xhkTF6aF8Sn05OlI0,1462 +devchat/llm/tools_call.py,sha256=Ks156bm_kkp6Sb3PP7Ci1cR4Gqf1pkye4oG5chd_rSg,8072 +devchat/memory/__init__.py,sha256=BD2vKfSmWsQrOJSKKXKBwcVcGQcZamglWZDstosn4cw,134 +devchat/memory/__pycache__/__init__.cpython-38.pyc,, +devchat/memory/__pycache__/base.cpython-38.pyc,, +devchat/memory/__pycache__/fixsize_memory.cpython-38.pyc,, +devchat/memory/base.py,sha256=mabEkWtez31gMtRamQkEMUGX6yEzb3P0uHFEgK1IBhI,598 +devchat/memory/fixsize_memory.py,sha256=iPBeylq8UayOepds6qrvVQf46pd8vMcrPO6brx_g-Po,1618 devchat/message.py,sha256=OdFQ8rv4ZrX-wOrLb4KRdqfvyAvCbaAMhDBXDHWuwRU,758 devchat/openai/__init__.py,sha256=9DcELA9I86vSQIySgABiZSb_QgE4qaT5s3n-ODASqiA,283 -devchat/openai/__pycache__/__init__.cpython-39.pyc,, -devchat/openai/__pycache__/openai_chat.cpython-39.pyc,, -devchat/openai/__pycache__/openai_message.cpython-39.pyc,, -devchat/openai/__pycache__/openai_prompt.cpython-39.pyc,, -devchat/openai/openai_chat.py,sha256=aME5qfzvZsnoUKJ344uaUJ27okTk9if46nF3T9DeMK0,3826 +devchat/openai/__pycache__/__init__.cpython-38.pyc,, +devchat/openai/__pycache__/openai_chat.cpython-38.pyc,, +devchat/openai/__pycache__/openai_message.cpython-38.pyc,, +devchat/openai/__pycache__/openai_prompt.cpython-38.pyc,, +devchat/openai/openai_chat.py,sha256=VQjKFHhunNsLQT4HXrUh_8L5jkrktVwjWwCB4fVLSyM,3828 devchat/openai/openai_message.py,sha256=xTmglsj5Iyvcytn3pUYhwkuiyJSx932N88fS4OCJ7Qk,3293 -devchat/openai/openai_prompt.py,sha256=M9NIBP5W9DwzFJyMO0L9DYM470383wQOZJwZv4KvQfs,10856 -devchat/prompt.py,sha256=WAHa6LmVU1xvBp6AGalQ1TzQuVwt1lsdBd70OazVoW0,9523 +devchat/openai/openai_prompt.py,sha256=rv-weE_vJ0GKL5FapYvJOuNga9-YRqgpF7-ld8TArZY,10771 +devchat/prompt.py,sha256=CRFvl6x5Fs2CmaAghY4Bo05LKr6DeuYJe5Ut6w-lh_Y,9411 devchat/store.py,sha256=PI2HvMyZmIV1XyyjIr5rPayagBQWJUWsEdpUCBZ7xLU,9879 -devchat/utils.py,sha256=_-FUAC-4ZKoF0q7eg6xWz6hrj0rKJKLHenK4S_uZvkE,7643 +devchat/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126 +devchat/utils.py,sha256=yhW9_Xr_01k-rJTc6JMGCxLawHvFpKZY1DeD3Uc2ZRo,7802 diff --git a/site-packages/devchat-0.2.10.dist-info/WHEEL b/site-packages/devchat-0.2.10.dist-info/WHEEL index 7c88152..d73ccaa 100644 --- a/site-packages/devchat-0.2.10.dist-info/WHEEL +++ b/site-packages/devchat-0.2.10.dist-info/WHEEL @@ -1,4 +1,4 @@ Wheel-Version: 1.0 -Generator: poetry-core 1.8.1 +Generator: poetry-core 1.9.0 Root-Is-Purelib: true Tag: py3-none-any diff --git a/site-packages/devchat/_cli/errors.py b/site-packages/devchat/_cli/errors.py new file mode 100644 index 0000000..73b8992 --- /dev/null +++ b/site-packages/devchat/_cli/errors.py @@ -0,0 +1,4 @@ + + +class MissContentInPromptException(Exception): + pass diff --git a/site-packages/devchat/_cli/prompt.py b/site-packages/devchat/_cli/prompt.py index 260e529..de62547 100644 --- a/site-packages/devchat/_cli/prompt.py +++ b/site-packages/devchat/_cli/prompt.py @@ -1,13 +1,6 @@ -import json import sys from typing import List, Optional import rich_click as click -from devchat.engine import run_command -from devchat.assistant import Assistant -from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig -from devchat.store import Store -from devchat.utils import parse_files -from devchat._cli.utils import handle_errors, init_dir, get_model_config from devchat._cli.router import llm_prompt diff --git a/site-packages/devchat/_cli/route.py b/site-packages/devchat/_cli/route.py index 9e1e05a..c571e6b 100644 --- a/site-packages/devchat/_cli/route.py +++ b/site-packages/devchat/_cli/route.py @@ -66,4 +66,3 @@ def route(content: Optional[str], parent: Optional[str], reference: Optional[Lis auto ) sys.exit(0) - diff --git a/site-packages/devchat/_cli/router.py b/site-packages/devchat/_cli/router.py index 3210783..e632fa3 100644 --- a/site-packages/devchat/_cli/router.py +++ b/site-packages/devchat/_cli/router.py @@ -2,54 +2,81 @@ import json import sys from typing import List, Optional import rich_click as click -from devchat.engine import run_command +from devchat.engine import run_command, load_workflow_instruction from devchat.assistant import Assistant from devchat.openai.openai_chat import OpenAIChat, OpenAIChatConfig from devchat.store import Store from devchat.utils import parse_files from devchat._cli.utils import handle_errors, init_dir, get_model_config +from devchat._cli.errors import MissContentInPromptException +def _get_model_and_config( + model: Optional[str], + config_str: Optional[str]): + repo_chat_dir, user_chat_dir = init_dir() + model, config = get_model_config(repo_chat_dir, user_chat_dir, model) + + parameters_data = config.dict(exclude_unset=True) + if config_str: + config_data = json.loads(config_str) + parameters_data.update(config_data) + return model, parameters_data + +def _load_tool_functions(functions: Optional[str]): + try: + if functions: + with open(functions, 'r', encoding="utf-8") as f_file: + return json.load(f_file) + return None + except Exception: + return None + +def _load_instruction_contents(content: str, instruct: Optional[List[str]]): + instruct_contents = parse_files(instruct) + command_instructions = load_workflow_instruction(content) + if command_instructions is not None: + instruct_contents.extend(command_instructions) + + return instruct_contents + def before_prompt(content: Optional[str], parent: Optional[str], reference: Optional[List[str]], instruct: Optional[List[str]], context: Optional[List[str]], model: Optional[str], config_str: Optional[str] = None, functions: Optional[str] = None, function_name: Optional[str] = None, not_store: Optional[bool] = False): - repo_chat_dir, user_chat_dir = init_dir() + repo_chat_dir, _1 = init_dir() if content is None: content = click.get_text_stream('stdin').read() if content == '': - return + raise MissContentInPromptException() - instruct_contents = parse_files(instruct) + instruct_contents = _load_instruction_contents(content, instruct) context_contents = parse_files(context) + tool_functions = _load_tool_functions(functions) - model, config = get_model_config(repo_chat_dir, user_chat_dir, model) + model, parameters_data = _get_model_and_config(model, config_str) + max_input_tokens = parameters_data.get("max_input_tokens", 4000) - parameters_data = config.dict(exclude_unset=True) - if config_str: - config_data = json.loads(config_str) - parameters_data.update(config_data) openai_config = OpenAIChatConfig(model=model, **parameters_data) - chat = OpenAIChat(openai_config) chat_store = Store(repo_chat_dir, chat) - assistant = Assistant(chat, chat_store, config.max_input_tokens, not not_store) - - functions_data = None - if functions is not None: - with open(functions, 'r', encoding="utf-8") as f_file: - functions_data = json.load(f_file) - assistant.make_prompt(content, instruct_contents, context_contents, functions_data, - parent=parent, references=reference, - function_name=function_name) - return openai_config, model, assistant, content, context_contents - + assistant = Assistant(chat, chat_store, max_input_tokens, not not_store) + assistant.make_prompt( + request = content, + instruct_contents = instruct_contents, + context_contents = context_contents, + functions = tool_functions, + parent=parent, + references=reference, + function_name=function_name + ) + return model, assistant, content def llm_prompt(content: Optional[str], parent: Optional[str], reference: Optional[List[str]], instruct: Optional[List[str]], context: Optional[List[str]], @@ -57,8 +84,9 @@ def llm_prompt(content: Optional[str], parent: Optional[str], reference: Optiona functions: Optional[str] = None, function_name: Optional[str] = None, not_store: Optional[bool] = False): with handle_errors(): - _1, _2, assistant, _3, _4 = before_prompt( - content, parent, reference, instruct, context, model, config_str, functions, function_name, not_store + _1, assistant, _3, = before_prompt( + content, parent, reference, instruct, context, + model, config_str, functions, function_name, not_store ) click.echo(assistant.prompt.formatted_header()) @@ -70,19 +98,17 @@ def llm_commmand(content: Optional[str], parent: Optional[str], reference: Optio instruct: Optional[List[str]], context: Optional[List[str]], model: Optional[str], config_str: Optional[str] = None): with handle_errors(): - openai_config, model, assistant, content, context_contents = before_prompt( + model, assistant, content = before_prompt( content, parent, reference, instruct, context, model, config_str, None, None, True ) click.echo(assistant.prompt.formatted_header()) command_result = run_command( - openai_config, - model, - assistant.prompt.messages, - content, - parent, - context_contents, - False) + model_name = model, + history_messages = assistant.prompt.messages, + input_text = content, + parent_hash = parent, + auto_fun = False) if command_result is not None: sys.exit(0) @@ -96,21 +122,19 @@ def llm_route(content: Optional[str], parent: Optional[str], reference: Optional model: Optional[str], config_str: Optional[str] = None, auto: Optional[bool] = False): with handle_errors(): - openai_config, model, assistant, content, context_contents = before_prompt( + model, assistant, content = before_prompt( content, parent, reference, instruct, context, model, config_str, None, None, True ) click.echo(assistant.prompt.formatted_header()) command_result = run_command( - openai_config, - model, - assistant.prompt.messages, - content, - parent, - context_contents, - auto) + model_name = model, + history_messages = assistant.prompt.messages, + input_text = content, + parent_hash = parent, + auto_fun = auto) if command_result is not None: sys.exit(command_result[0]) for response in assistant.iterate_response(): - click.echo(response, nl=False) \ No newline at end of file + click.echo(response, nl=False) diff --git a/site-packages/devchat/_cli/run.py b/site-packages/devchat/_cli/run.py index 739629a..0b07d8f 100644 --- a/site-packages/devchat/_cli/run.py +++ b/site-packages/devchat/_cli/run.py @@ -7,13 +7,9 @@ from typing import List, Optional import yaml import rich_click as click -try: - from git import Repo, GitCommandError -except Exception: - pass -from devchat._cli.utils import init_dir, handle_errors, valid_git_repo, clone_git_repo +from devchat._cli.utils import init_dir, handle_errors, clone_git_repo from devchat._cli.utils import download_and_extract_workflow -from devchat.engine import Namespace, CommandParser, RecursivePrompter +from devchat.engine import Namespace, CommandParser from devchat.utils import get_logger from devchat._cli.router import llm_commmand @@ -41,8 +37,7 @@ logger = get_logger(__name__) def run(command: str, list_flag: bool, recursive_flag: bool, update_sys_flag: bool, parent: Optional[str], reference: Optional[List[str]], instruct: Optional[List[str]], context: Optional[List[str]], - model: Optional[str], config_str: Optional[str] = None, - auto: Optional[bool] = False): + model: Optional[str], config_str: Optional[str] = None): """ Operate the workflow engine of DevChat. """ @@ -134,7 +129,7 @@ def read_hidden_workflows(): """ user_path = os.path.expanduser('~') config_path = os.path.join(user_path, '.chat', 'workflows', 'config.yml') - + if not os.path.exists(config_path): create_default_config_file(config_path) @@ -146,7 +141,7 @@ def read_hidden_workflows(): return hidden_workflows -def __onerror(func, path, exc_info): +def __onerror(func, path, _1): """ Error handler for shutil.rmtree. @@ -163,20 +158,17 @@ def __onerror(func, path, exc_info): os.chmod(path, stat.S_IWUSR) # Retry the function that failed func(path) - else: - # Re-raise the error if it's a different kind of error - raise - + def __make_files_writable(directory): """ Recursively make all files in the directory writable. """ - for root, dirs, files in os.walk(directory): + for root, _1, files in os.walk(directory): for name in files: filepath = os.path.join(root, name) if not os.access(filepath, os.W_OK): os.chmod(filepath, stat.S_IWUSR) - + def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[str], zip_urls: List[str]): """ Clone a Git repository to a specified location, or pull it if it already exists. @@ -205,12 +197,12 @@ def _clone_or_pull_git_repo(target_dir: str, repo_urls: List[str], zip_urls: Lis clone_git_repo(bak_dir, repo_urls) try: shutil.move(target_dir, new_dir) - except Exception as e: + except Exception: __make_files_writable(target_dir) shutil.move(target_dir, new_dir) try: shutil.move(bak_dir, target_dir) - except Exception as e: + except Exception: __make_files_writable(bak_dir) shutil.move(bak_dir, target_dir) else: diff --git a/site-packages/devchat/_cli/utils.py b/site-packages/devchat/_cli/utils.py index 3bd5ff0..74d36a9 100644 --- a/site-packages/devchat/_cli/utils.py +++ b/site-packages/devchat/_cli/utils.py @@ -14,6 +14,7 @@ except Exception: import rich_click as click from devchat.config import ConfigManager, OpenAIModelConfig from devchat.utils import find_root_dir, add_gitignore, setup_logger, get_logger +from devchat._cli.errors import MissContentInPromptException logger = get_logger(__name__) @@ -53,6 +54,9 @@ def handle_errors(): logger.exception(error) click.echo(f"{type(error).__name__}: {error.type}", err=True) sys.exit(1) + except MissContentInPromptException: + click.echo("Miss content in prompt command.", err=True) + sys.exit(1) except Exception as error: # import traceback # traceback.print_exc() diff --git a/site-packages/devchat/chatmark/.gitignore b/site-packages/devchat/chatmark/.gitignore new file mode 100644 index 0000000..c036379 --- /dev/null +++ b/site-packages/devchat/chatmark/.gitignore @@ -0,0 +1 @@ +tmp/ \ No newline at end of file diff --git a/site-packages/devchat/chatmark/README.md b/site-packages/devchat/chatmark/README.md new file mode 100644 index 0000000..94d95af --- /dev/null +++ b/site-packages/devchat/chatmark/README.md @@ -0,0 +1,5 @@ +# ChatMark + +ChatMark is a markup language for user interaction in chat message. + +This module provides python implementation for common widgets in ChatMark. diff --git a/site-packages/devchat/chatmark/__init__.py b/site-packages/devchat/chatmark/__init__.py new file mode 100644 index 0000000..d78551f --- /dev/null +++ b/site-packages/devchat/chatmark/__init__.py @@ -0,0 +1,12 @@ +from .form import Form +from .step import Step +from .widgets import Button, Checkbox, Radio, TextEditor + +__all__ = [ + "Checkbox", + "TextEditor", + "Radio", + "Button", + "Form", + "Step", +] diff --git a/site-packages/devchat/chatmark/chatmark_example/README.md b/site-packages/devchat/chatmark/chatmark_example/README.md new file mode 100644 index 0000000..8246f91 --- /dev/null +++ b/site-packages/devchat/chatmark/chatmark_example/README.md @@ -0,0 +1,16 @@ +# chatmark_exmaple + +This is an example of how to use the chatmark module. + +Usage: + +1. Copy the `chatmark_example` folder under `~/.chat/workflow/org` +2. Create `command.yml` under `~/.chat/workflow/org/chatmark_example` with the following content: +```yaml +description: chatmark examples +steps: + - run: $command_python $command_path/main.py + +``` +3. Use the command `/chatmark_example` in devchat vscode plugin. + diff --git a/site-packages/devchat/chatmark/chatmark_example/main.py b/site-packages/devchat/chatmark/chatmark_example/main.py new file mode 100644 index 0000000..f834c02 --- /dev/null +++ b/site-packages/devchat/chatmark/chatmark_example/main.py @@ -0,0 +1,159 @@ +import time + + +from devchat.chatmark import Button, Checkbox, Form, Radio, Step, TextEditor # pylint: disable=E402 + + +def main(): + print("\n\n---\n\n") + + # Step + print("\n\n# Step Example\n\n") + with Step("Something is running..."): + print("Will sleep for 5 seconds...", flush=True) + time.sleep(5) + print("Done", flush=True) + + print("\n\n# Step Example with exception\n\n") + try: + with Step("Something is running (will raise exception)..."): + print("Will sleep for 5 seconds...", flush=True) + time.sleep(5) + raise Exception("oops!") + + except Exception: + pass + + # Button + print("\n\n# Button Example\n\n") + button = Button( + [ + "Yes", + "Or", + "No", + ], + ) + button.render() + + idx = button.clicked + print("\n\nButton result\n\n") + print(f"\n\n{idx}: {button.buttons[idx]}\n\n") + + print("\n\n---\n\n") + + # Checkbox + print("\n\n# Checkbox Example\n\n") + checkbox = Checkbox( + [ + "A", + "B", + "C", + "D", + ], + [True, False, False, True], + ) + checkbox.render() + + print(f"\n\ncheckbox.selections: {checkbox.selections}\n\n") + for idx in checkbox.selections: + print(f"\n\n{idx}: {checkbox.options[idx]}\n\n") + + print("\n\n---\n\n") + + # TextEditor + print("\n\n# TextEditor Example\n\n") + text_editor = TextEditor( + "hello world\nnice to meet you", + ) + + text_editor.render() + + print(f"\n\ntext_editor.new_text:\n\n{text_editor.new_text}\n\n") + + print("\n\n---\n\n") + + # Radio + print("\n\n# Radio Example\n\n") + radio = Radio( + [ + "Sun", + "Moon", + "Star", + ], + ) + radio.render() + + print(f"\n\nradio.selection: {radio.selection}\n\n") + if radio.selection is not None: + print(f"\n\nradio.options[radio.selection]: {radio.options[radio.selection]}\n\n") + + print("\n\n---\n\n") + + # Form + print("\n\n# Form Example\n\n") + checkbox_1 = Checkbox( + [ + "Sprint", + "Summer", + "Autumn", + "Winter", + ] + ) + checkbox_2 = Checkbox( + [ + "金", + "木", + "水", + "火", + "土", + ], + ) + radio_1 = Radio( + [ + "Up", + "Down", + ], + ) + radio_2 = Radio( + [ + "Left", + "Center", + "Right", + ], + ) + text_editor_1 = TextEditor( + "hello world\nnice to meet you", + ) + text_editor_2 = TextEditor( + "hihihihihi", + ) + + form = Form( + [ + "Some string in a form", + checkbox_1, + "Another string in a form", + radio_1, + "the third string in a form", + checkbox_2, + "the fourth string in a form", + radio_2, + "the fifth string in a form", + text_editor_1, + "the last string in a form", + text_editor_2, + ], + ) + + form.render() + + print(f"\n\ncheckbox_1.selections: {checkbox_1.selections}\n\n") + print(f"\n\ncheckbox_2.selections: {checkbox_2.selections}\n\n") + print(f"\n\nradio_1.selection: {radio_1.selection}\n\n") + print(f"\n\nradio_2.selection: {radio_2.selection}\n\n") + print(f"\n\ntext_editor_1.new_text:\n\n{text_editor_1.new_text}\n\n") + print(f"\n\ntext_editor_2.new_text:\n\n{text_editor_2.new_text}\n\n") + + +if __name__ == "__main__": + main() diff --git a/site-packages/devchat/chatmark/form.py b/site-packages/devchat/chatmark/form.py new file mode 100644 index 0000000..f610358 --- /dev/null +++ b/site-packages/devchat/chatmark/form.py @@ -0,0 +1,97 @@ +# pylint: disable=C0103 +# pylint: disable=W0212 +from typing import Dict, List, Optional, Union + +from .iobase import pipe_interaction +from .widgets import Button, Widget + + +class Form: + """ + A container for different widgets + + Syntax: + """ + + def __init__( + self, + components: List[Union[Widget, str]], + title: Optional[str] = None, + submit_button_name: Optional[str] = None, + cancel_button_name: Optional[str] = None, + ): + """ + components: components in the form, can be widgets (except Button) or strings + title: title of the form + """ + assert ( + any(isinstance(c, Button) for c in components) is False + ), "Button is not allowed in Form" + + self._components = components + self._title = title + + self._rendered = False + self._submit = submit_button_name + self._cancel = cancel_button_name + + @property + def components(self) -> List[Union[Widget, str]]: + """ + Return the components + """ + + return self._components + + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for all components + """ + lines = [] + + if self._title: + lines.append(self._title) + + for c in self.components: + if isinstance(c, str): + lines.append(c) + elif isinstance(c, Widget): + lines.append(c._in_chatmark()) + else: + raise ValueError(f"Invalid component {c}") + + return "\n".join(lines) + + def _parse_response(self, response: Dict): + """ + Parse response from user input + """ + for c in self.components: + if isinstance(c, Widget): + c._parse_response(response) + + def render(self): + """ + Render to receive user input + """ + if self._rendered: + # already rendered once + # not sure if the constraint is necessary + # could be removed if re-rendering is needed + raise RuntimeError("Widget can only be rendered once") + + self._rendered = True + + chatmark_header = "```chatmark" + chatmark_header += f" submit={self._submit}" if self._submit else "" + chatmark_header += f" cancel={self._cancel}" if self._cancel else "" + + lines = [ + chatmark_header, + self._in_chatmark(), + "```", + ] + + chatmark = "\n".join(lines) + response = pipe_interaction(chatmark) + self._parse_response(response) diff --git a/site-packages/devchat/chatmark/iobase.py b/site-packages/devchat/chatmark/iobase.py new file mode 100644 index 0000000..571e940 --- /dev/null +++ b/site-packages/devchat/chatmark/iobase.py @@ -0,0 +1,43 @@ +import yaml + + +def _send_message(message): + out_data = f"""\n{message}\n""" + print(out_data, flush=True) + + +def _parse_chatmark_response(response): + # resonse looks like: + """ + ``` some_name + some key name 1: value1 + some key name 2: value2 + ``` + """ + # parse key values + lines = response.strip().split("\n") + if len(lines) <= 2: + return {} + + data = yaml.safe_load("\n".join(lines[1:-1])) + return data + + +def pipe_interaction(message: str): + _send_message(message) + + lines = [] + while True: + try: + line = input() + if line.strip().startswith("```yaml"): + lines = [] + elif line.strip() == "```": + lines.append(line) + break + lines.append(line) + except EOFError: + pass + + response = "\n".join(lines) + return _parse_chatmark_response(response) diff --git a/site-packages/devchat/chatmark/step.py b/site-packages/devchat/chatmark/step.py new file mode 100644 index 0000000..9e0a8c4 --- /dev/null +++ b/site-packages/devchat/chatmark/step.py @@ -0,0 +1,28 @@ +from contextlib import AbstractContextManager + + +class Step(AbstractContextManager): + """ + Show a running step in the TUI. + + ChatMark syntax: + + ```Step + # Something is running... + some details... + ``` + + Usage: + with Step("Something is running..."): + print("some details...") + """ + + def __init__(self, title: str): + self.title = title + + def __enter__(self): + print(f"\n```Step\n# {self.title}", flush=True) + + def __exit__(self, exc_type, exc_val, exc_tb): + # close the step + print("\n```", flush=True) diff --git a/site-packages/devchat/chatmark/widgets.py b/site-packages/devchat/chatmark/widgets.py new file mode 100644 index 0000000..ac04f10 --- /dev/null +++ b/site-packages/devchat/chatmark/widgets.py @@ -0,0 +1,395 @@ +from abc import ABC, abstractmethod +from typing import Dict, List, Optional, Tuple +from uuid import uuid4 + +from .iobase import pipe_interaction + + +class Widget(ABC): + """ + Abstract base class for widgets + """ + + def __init__(self, submit: Optional[str] = None, cancel: Optional[str] = None): + self._rendered = False + # Prefix for IDs/keys in the widget + self._id_prefix = self.gen_id_prefix() + self._submit = submit + self._cancel = cancel + + @abstractmethod + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for the widget + """ + + @abstractmethod + def _parse_response(self, response: Dict) -> None: + """ + Parse ChatMark response from user input + """ + + def render(self) -> None: + """ + Render the widget to receive user input + """ + if self._rendered: + # already rendered once + # not sure if the constraint is necessary + # could be removed if re-rendering is needed + raise RuntimeError("Widget can only be rendered once") + + self._rendered = True + + chatmark_header = "```chatmark" + chatmark_header += f" submit={self._submit}" if self._submit else "" + chatmark_header += f" cancel={self._cancel}" if self._cancel else "" + + lines = [ + chatmark_header, + self._in_chatmark(), + "```", + ] + + chatmark = "\n".join(lines) + response = pipe_interaction(chatmark) + self._parse_response(response) + + @staticmethod + def gen_id_prefix() -> str: + return uuid4().hex + + @staticmethod + def gen_id(id_prefix: str, index: int) -> str: + return f"{id_prefix}_{index}" + + @staticmethod + def parse_id(a_id: str) -> Tuple[Optional[str], Optional[int]]: + try: + id_prefix, index = a_id.split("_") + return id_prefix, int(index) + except Exception: + return None, None + + +class Checkbox(Widget): + """ + ChatMark syntax: + ```chatmark + Which files would you like to commit? I've suggested a few. + > [x](file1) devchat/engine/prompter.py + > [x](file2) devchat/prompt.py + > [](file3) tests/test_cli_prompt.py + ``` + + Response: + ```yaml + file1: checked + file3: checked + ``` + """ + + def __init__( + self, + options: List[str], + check_states: Optional[List[bool]] = None, + title: Optional[str] = None, + submit_button_name: str = "Submit", + cancel_button_name: str = "Cancel", + ): + """ + options: options to be selected + check_states: initial check states of options, default to all False + title: title of the widget + """ + super().__init__(submit_button_name, cancel_button_name) + + if check_states is not None: + assert len(options) == len(check_states) + else: + check_states = [False for _ in options] + + self._options = options + self._states = check_states + self._title = title + + self._selections: Optional[List[int]] = None + + @property + def selections(self) -> Optional[List[int]]: + """ + Get the indices of selected options + """ + return self._selections + + @property + def options(self) -> List[str]: + """ + Get the options + """ + return self._options + + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for checkbox options + Use the index of option to generate id/key + """ + lines = [] + + if self._title: + lines.append(self._title) + + for idx, (option, state) in enumerate(zip(self._options, self._states)): + mark = "[x]" if state else "[]" + key = self.gen_id(self._id_prefix, idx) + lines.append(f"> {mark}({key}) {option}") + + text = "\n".join(lines) + return text + + def _parse_response(self, response: Dict): + selections = [] + for key, value in response.items(): + prefix, index = self.parse_id(key) + # check if the prefix is the same as the widget's + if prefix != self._id_prefix: + continue + + if value == "checked": + selections.append(index) + + self._selections = selections + + +class TextEditor(Widget): + """ + ChatMark syntax: + ```chatmark + I've drafted a commit message for you as below. Feel free to modify it. + + > | (ID) + > fix: prevent racing of requests + > + > Introduce a request id and a reference to latest request. Dismiss + > incoming responses other than from latest request. + > + > Reviewed-by: Z + > Refs: #123 + ``` + + Response: + ```yaml + ID: | + fix: prevent racing of requests + + Introduce a request ID and a reference to latest request. Dismiss + incoming responses other than from latest request. + + Reviewed-by: Z + Refs: #123 + ``` + """ + + def __init__( + self, + text: str, + title: Optional[str] = None, + submit_button_name: str = "Submit", + cancel_button_name: str = "Cancel", + ): + super().__init__(submit_button_name, cancel_button_name) + + self._title = title + self._text = text + + self._editor_key = self.gen_id(self._id_prefix, 0) + self._new_text: Optional[str] = None + + @property + def new_text(self): + return self._new_text + + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for text editor + Use _editor_key as id + """ + lines = self._text.split("\n") + new_lines = [] + + if self._title: + new_lines.append(self._title) + + new_lines.append(f"> | ({self._editor_key})") + new_lines.extend([f"> {line}" for line in lines]) + + text = "\n".join(new_lines) + return text + + def _parse_response(self, response: Dict): + self._new_text = response.get(self._editor_key, None) + + +class Radio(Widget): + """ + ChatMark syntax: + ```chatmark + How would you like to make the change? + > - (insert) Insert the new code. + > - (new) Put the code in a new file. + > - (replace) Replace the current code. + ``` + + Reponse: + ```yaml + replace: checked + ``` + """ + + def __init__( + self, + options: List[str], + default_selected: Optional[int] = None, + title: Optional[str] = None, + submit_button_name: str = "Submit", + cancel_button_name: str = "Cancel", + ) -> None: + """ + options: options to be selected + default_selected: index of the option to be selected by default, default to None + title: title of the widget + """ + if default_selected is not None: + assert 0 <= default_selected < len(options) + + super().__init__(submit_button_name, cancel_button_name) + + self._options = options + self._title = title + + self._selection: Optional[int] = default_selected + + @property + def options(self) -> List[str]: + """ + Return the options + """ + return self._options + + @property + def selection(self) -> Optional[int]: + """ + Return the index of the selected option + """ + return self._selection + + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for options + Use the index of option to generate id/key + """ + lines = [] + + if self._title: + lines.append(self._title) + + for idx, option in enumerate(self._options): + key = self.gen_id(self._id_prefix, idx) + if self._selection is not None and self._selection == idx: + lines.append(f"> x ({key}) {option}") + else: + lines.append(f"> - ({key}) {option}") + + text = "\n".join(lines) + return text + + def _parse_response(self, response: Dict): + selected = None + for key, value in response.items(): + prefix, idx = self.parse_id(key) + # check if the prefix is the same as the widget's + if prefix != self._id_prefix: + continue + + if value == "checked": + selected = idx + break + + self._selection = selected + + +class Button(Widget): + """ + ChatMark syntax: + ```chatmark + Would you like to pay $0.02 for this LLM query? + > (Confirm) Yes, go ahead! + > (Cancel) No, let's skip this. + ``` + + ```yaml + Confirm: clicked + ``` + + # NOTE: almost the same as Radio essentially + """ + + def __init__( + self, + buttons: List[str], + title: Optional[str] = None, + ) -> None: + """ + buttons: button names to show + title: title of the widget + """ + super().__init__() + + self._buttons = buttons + self._title = title + + self._clicked: Optional[int] = None + + @property + def clicked(self) -> Optional[int]: + """ + Return the index of the clicked button + """ + return self._clicked + + @property + def buttons(self) -> List[str]: + """ + Return the buttons + """ + return self._buttons + + def _in_chatmark(self) -> str: + """ + Generate ChatMark syntax for options + Use the index of button to generate id/key + """ + lines = [] + + if self._title: + lines.append(self._title) + + for idx, button in enumerate(self._buttons): + key = self.gen_id(self._id_prefix, idx) + lines.append(f"> ({key}) {button}") + + text = "\n".join(lines) + return text + + def _parse_response(self, response: Dict[str, str]): + clicked = None + for key, value in response.items(): + prefix, idx = self.parse_id(key) + # check if the prefix is the same as the widget's + if prefix != self._id_prefix: + continue + + if value == "clicked": + clicked = idx + break + self._clicked = clicked diff --git a/site-packages/devchat/engine/__init__.py b/site-packages/devchat/engine/__init__.py index 5fb7a41..af78a48 100644 --- a/site-packages/devchat/engine/__init__.py +++ b/site-packages/devchat/engine/__init__.py @@ -1,7 +1,7 @@ from .command_parser import parse_command, Command, CommandParser from .namespace import Namespace from .recursive_prompter import RecursivePrompter -from .router import run_command +from .router import run_command, load_workflow_instruction __all__ = [ 'parse_command', @@ -9,5 +9,6 @@ __all__ = [ 'CommandParser', 'Namespace', 'RecursivePrompter', - 'run_command' + 'run_command', + 'load_workflow_instruction' ] diff --git a/site-packages/devchat/engine/command_runner.py b/site-packages/devchat/engine/command_runner.py index bf6415d..6222919 100644 --- a/site-packages/devchat/engine/command_runner.py +++ b/site-packages/devchat/engine/command_runner.py @@ -9,138 +9,73 @@ import subprocess from typing import List, Dict import shlex -import openai -from devchat.openai.openai_chat import OpenAIChatConfig - from devchat.utils import get_logger from .command_parser import Command +from .util import ToolUtil logger = get_logger(__name__) +DEVCHAT_COMMAND_MISS_ERROR_MESSAGE = ( + 'devchat-commands environment is not installed yet. ' + 'Please install it before using the current command.' + 'The devchat-command environment is automatically ' + 'installed after the plugin starts,' + ' and details can be viewed in the output window.' +) + +def pipe_reader(pipe, out_data, out_flag): + while pipe: + data = pipe.read(1) + if data == '': + break + out_data['out'] += data + print(data, end='', file=out_flag, flush=True) + + # Equivalent of CommandRun in Python\which executes subprocesses class CommandRunner: def __init__(self, model_name: str): self.process = None self._model_name = model_name - def _call_function_by_llm(self, - openai_config: OpenAIChatConfig, - command_name: str, - command: Command, - history_messages: List[Dict]): - """ - command needs multi parameters, so we need parse each - parameter by LLM from input_text - """ - properties = {} - required = [] - for key, value in command.parameters.items(): - properties[key] = {} - for key1, value1 in value.dict().items(): - if key1 not in ['type', 'description', 'enum'] or value1 is None: - continue - properties[key][key1] = value1 - required.append(key) - - command_name = command_name.replace('.', '---') - tools = [ - { - "type": "function", - "function": { - "name": command_name, - "description": command.description, - "parameters": { - "type": "object", - "properties": properties, - "required": required, - }, - } - } - ] - - client = openai.OpenAI( - api_key=os.environ.get("OPENAI_API_KEY", None), - base_url=os.environ.get("OPENAI_API_BASE", None) - ) - - config_params = openai_config.dict(exclude_unset=True) - config_params.pop('stream', None) - config_params.pop('user', None) - config_params.pop('request_timeout', None) - config_params.pop('model', None) - - connection_error = '' - for _1 in range(3): - try: - response = client.chat.completions.create( - messages=history_messages, - model="gpt-3.5-turbo-16k", - stream=False, - **config_params, - tools=tools, - tool_choice={"type": "function", "function": {"name": command_name}} - ) - - respose_message = response.dict()["choices"][0]["message"] - if not respose_message['tool_calls']: - return None - tool_call = respose_message['tool_calls'][0]['function'] - if tool_call['name'] != command_name: - return None - parameters = json.loads(tool_call['arguments']) - return parameters - except (ConnectionError, openai.APIConnectionError) as err: - connection_error = err - continue - except Exception as err: - print("Exception:", err, file=sys.stderr, flush=True) - logger.exception("Call command by LLM error: %s", err) - return None - print("Connect Error:", connection_error, file=sys.stderr, flush=True) - return None - - def run_command(self, - openai_config: OpenAIChatConfig, command_name: str, command: Command, history_messages: List[Dict], input_text: str, - parent_hash: str, - context_contents: List[str]): + parent_hash: str): """ if command has parameters, then generate command parameters from input by LLM if command.input is "required", and input is null, then return error """ + input_text = input_text.strip()\ + .replace(f'/{command_name}', '')\ + .replace('\"', '\\"')\ + .replace('\'', '\\\'')\ + .replace('\n', '\\n') + + arguments = {} if command.parameters and len(command.parameters) > 0: if not self._model_name.startswith("gpt-"): return None - arguments = self._call_function_by_llm(openai_config, command_name, command, history_messages) + arguments = self._call_function_by_llm(command_name, command, history_messages) if not arguments: print("No valid parameters generated by LLM", file=sys.stderr, flush=True) return (-1, "") - return self.run_command_with_parameters( - command_name, - command, - { - "input": input_text.strip().replace(f'/{command_name}', '').replace('\"', '\\"').replace('\'', '\\\'').replace('\n', '\\n'), - **arguments - }, - parent_hash, - history_messages) return self.run_command_with_parameters( - command_name, - command, - { - "input": input_text.strip().replace(f'/{command_name}', '').replace('\"', '\\"').replace('\'', '\\\'').replace('\n', '\\n') + command_name=command_name, + command=command, + parameters={ + "input": input_text, + **arguments }, - parent_hash, - history_messages) - + parent_hash=parent_hash, + history_messages=history_messages + ) def run_command_with_parameters(self, command_name: str, @@ -152,107 +87,174 @@ class CommandRunner: replace $xxx in command.steps[0].run with parameters[xxx] then run command.steps[0].run """ - def pipe_reader(pipe, out_data, out_flag): - while pipe: - data = pipe.read(1) - if data == '': - break - out_data['out'] += data - print(data, end='', file=out_flag, flush=True) - try: - # add environment variables to parameters - if self._model_name: - os.environ['LLM_MODEL'] = self._model_name - if parent_hash: - os.environ['PARENT_HASH'] = parent_hash - if history_messages: - os.environ['CONTEXT_CONTENTS'] = json.dumps(history_messages) - for env_var in os.environ: - parameters[env_var] = os.environ[env_var] - - # how to get command_python path? - root_command_name = command_name.split('.')[0] - command_runtime = os.path.expanduser(f'~/.chat/workflows/usr/{root_command_name}/runtime.json') - if os.path.exists(command_runtime): - with open(command_runtime, 'r', encoding='utf8') as f: - command_runtime_json = json.loads(f.read()) - if 'command_python' in command_runtime_json: - parameters['command_python'] = command_runtime_json['command_python'].replace('\\', '/') - elif os.environ.get('command_python', None): - parameters['command_python'] = os.environ['command_python'].replace('\\', '/') - parameters["devchat_python"] = sys.executable.replace('\\', '/') - - command_run = command.steps[0]["run"] - - # if $devchat_python in command_run - # then set environ PYTHONPATH to DEVCHAT_PYTHONPATH - # if command_python in command_run - # then unset environ PYTHONPATH env = os.environ.copy() - if 'DEVCHAT_PYTHONPATH' not in env: - env['DEVCHAT_PYTHONPATH'] = os.environ.get('PYTHONPATH', '') - if command_run.find('$devchat_python ') == -1: - del env['PYTHONPATH'] - if command_run.find('$command_python ') != -1 and parameters.get('command_python', '') == '': - error_msg = ('devchat-commands environment is not installed yet. ' - 'Please install it before using the current command.' - 'The devchat-command environment is automatically installed after the plugin starts,' - ' and details can be viewed in the output window.') - print(error_msg, file=sys.stderr, flush=True) + env.update(parameters) + env.update( + self.__load_command_runtime(command) + ) + env.update( + self.__load_chat_data(self._model_name, parent_hash, history_messages) + ) + self.__update_devchat_python_path(env, command.steps[0]["run"]) + + command_run = command.steps[0]["run"] + for parameter in env: + command_run = command_run.replace('$' + parameter, str(env[parameter])) + + if self.__check_command_python_error(command_run, env): + return (-1, "") + if self.__check_input_miss_error(command, command_name, env): + return (-1, "") + if self.__check_parameters_miss_error(command, command_run): return (-1, "") - # Replace parameters in command run - for parameter in parameters: - command_run = command_run.replace('$' + parameter, str(parameters[parameter])) - # Check whether there is parameter not specified - has_parameter = (command_run.find('$') != -1) - is_input_required = (command.input == "required") - is_input_invalid = is_input_required and parameters["input"] == "" - if has_parameter or is_input_invalid: - command_dir = os.path.dirname(command.path) - readme_file = os.path.join(command_dir, 'README.md') - if os.path.exists(readme_file): - with open(readme_file, 'r', encoding='utf8') as f: - readme = f.read() - print(readme, flush=True) - return (0, readme) - else: - if has_parameter: - print("Missing argument. the command being parsed is:", command_run, file=sys.stderr, flush=True) - else: - print(f"Missing input which is required. You can use it as '/{command_name} some related description'", file=sys.stderr, flush=True) - return (-1, "") - - # result = subprocess.run(command_run, shell=True, env=env) - # return result - # command_run = command_run.replace('\\', '/') - with subprocess.Popen( - shlex.split(command_run), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env, - text=True - ) as process: - - stdout_data = {'out': ''} - stderr_data = {'out': ''} - - stdout_thread = threading.Thread( - target=pipe_reader, - args=(process.stdout, stdout_data, sys.stdout)) - stderr_thread = threading.Thread( - target=pipe_reader, - args=(process.stderr, stderr_data, sys.stderr)) - - stdout_thread.start() - stderr_thread.start() - - stdout_thread.join() - stderr_thread.join() - exit_code = process.wait() - return (exit_code, stdout_data["out"]) - return (-1, "") + return self.__run_command_with_thread_output(command_run, env) except Exception as err: print("Exception:", type(err), err, file=sys.stderr, flush=True) + logger.exception("Run command error: %s", err) return (-1, "") + + def __run_command_with_thread_output(self, command_str: str, env: Dict[str, str]): + """ + run command string + """ + def handle_output(process): + stdout_data, stderr_data = {'out': ''}, {'out': ''} + stdout_thread = threading.Thread( + target=pipe_reader, + args=(process.stdout, stdout_data, sys.stdout)) + stderr_thread = threading.Thread( + target=pipe_reader, + args=(process.stderr, stderr_data, sys.stderr)) + stdout_thread.start() + stderr_thread.start() + stdout_thread.join() + stderr_thread.join() + return (process.wait(), stdout_data["out"]) + + for key in env: + if isinstance(env[key], (List, Dict)): + env[key] = json.dumps(env[key]) + with subprocess.Popen( + shlex.split(command_str), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + text=True + ) as process: + return handle_output(process) + + def __check_command_python_error(self, command_run: str, parameters: Dict[str, str]): + need_command_python = command_run.find('$command_python ') != -1 + has_command_python = parameters.get('command_python', None) + + if need_command_python and not has_command_python: + print(DEVCHAT_COMMAND_MISS_ERROR_MESSAGE, file=sys.stderr, flush=True) + return True + return False + + def __get_readme(self, command: Command): + try: + command_dir = os.path.dirname(command.path) + readme_file = os.path.join(command_dir, 'README.md') + if os.path.exists(readme_file): + with open(readme_file, 'r', encoding='utf8') as file: + readme = file.read() + return readme + return None + except Exception: + return None + + def __check_input_miss_error( + self, command: Command, command_name: str, parameters: Dict[str, str] + ): + is_input_required = command.input == "required" + if not (is_input_required and parameters["input"] == ""): + return False + + input_miss_error = ( + f"{command_name} workflow is missing input. Example usage: " + f"'/{command_name} user input'\n" + ) + readme_content = self.__get_readme(command) + if readme_content: + print(readme_content, file=sys.stderr, flush=True) + else: + print(input_miss_error, file=sys.stderr, flush=True) + return True + + def __check_parameters_miss_error(self, command: Command, command_run: str): + # visit parameters in command + parameter_names = command.parameters.keys() if command.parameters else [] + if len(parameter_names) == 0: + return False + + missed_parameters = [] + for parameter_name in parameter_names: + if command_run.find('$' + parameter_name) != -1: + missed_parameters.append(parameter_name) + + if len(missed_parameters) == 0: + return False + + readme_content = self.__get_readme(command) + if readme_content: + print(readme_content, file=sys.stderr, flush=True) + else: + print("Missing parameters:", missed_parameters, file=sys.stderr, flush=True) + return True + + def __load_command_runtime(self, command: Command): + command_path = os.path.dirname(command.path) + runtime_config = {} + + # visit each path in command_path, for example: /usr/x1/x2/x3 + # then load visit: /usr, /usr/x1, /usr/x1/x2, /usr/x1/x2/x3 + paths = command_path.split('/') + for index in range(1, len(paths)+1): + try: + path = '/'.join(paths[:index]) + runtime_file = os.path.join(path, 'runtime.json') + if os.path.exists(runtime_file): + with open(runtime_file, 'r', encoding='utf8') as file: + command_runtime_config = json.loads(file.read()) + runtime_config.update(command_runtime_config) + except Exception: + pass + + # for windows + if runtime_config.get('command_python', None): + runtime_config['command_python'] = \ + runtime_config['command_python'].replace('\\', '/') + return runtime_config + + def __load_chat_data(self, model_name: str, parent_hash: str, history_messages: List[Dict]): + return { + "LLM_MODEL": model_name if model_name else "", + "PARENT_HASH": parent_hash if parent_hash else "", + "CONTEXT_CONTENTS": history_messages if history_messages else [], + } + + def __update_devchat_python_path(self, env: Dict[str, str], command_run: str): + python_path = os.environ.get('PYTHONPATH', '') + env['DEVCHAT_PYTHONPATH'] = os.environ.get('DEVCHAT_PYTHONPATH', python_path) + if command_run.find('$devchat_python ') == -1: + del env['PYTHONPATH'] + env["devchat_python"] = sys.executable.replace('\\', '/') + + def _call_function_by_llm(self, + command_name: str, + command: Command, + history_messages: List[Dict]): + """ + command needs multi parameters, so we need parse each + parameter by LLM from input_text + """ + tools = [ToolUtil.make_function(command, command_name)] + + function_call = ToolUtil.select_function_by_llm(history_messages, tools) + if not function_call: + return None + + return function_call["arguments"] diff --git a/site-packages/devchat/engine/router.py b/site-packages/devchat/engine/router.py index 0c8b9a9..1caa835 100644 --- a/site-packages/devchat/engine/router.py +++ b/site-packages/devchat/engine/router.py @@ -1,231 +1,18 @@ import os -import json -from typing import List, Iterable -import openai -from devchat._cli.utils import init_dir -from devchat.engine.recursive_prompter import RecursivePrompter -from devchat.openai.openai_chat import OpenAIChatConfig -from .namespace import Namespace -from .command_parser import CommandParser, Command +from typing import List from .command_runner import CommandRunner +from .util import CommandUtil +from .namespace import Namespace +from.recursive_prompter import RecursivePrompter - -def _load_command(command: str): - _, user_chat_dir = init_dir() - workflows_dir = os.path.join(user_chat_dir, 'workflows') - if not os.path.exists(workflows_dir): +def load_workflow_instruction(user_input: str): + user_input = user_input.strip() + if len(user_input) == 0: return None - if not os.path.isdir(workflows_dir): + if user_input[:1] != '/': return None - namespace = Namespace(workflows_dir) - commander = CommandParser(namespace) - - cmd = commander.parse(command) - if not cmd: - return None - return cmd - - -def _load_commands() -> List[Command]: - _, user_chat_dir = init_dir() - workflows_dir = os.path.join(user_chat_dir, 'workflows') - if not os.path.exists(workflows_dir): - return [] - if not os.path.isdir(workflows_dir): - return [] - - namespace = Namespace(workflows_dir) - commander = CommandParser(namespace) - command_names = namespace.list_names("", True) - - commands = [] - for name in command_names: - cmd = commander.parse(name) - if not cmd: - continue - commands.append((name, cmd)) - - return commands - - -def _create_tool(command_name:str, command: Command) -> dict: - properties = {} - required = [] - if command.parameters: - for key, value in command.parameters.items(): - properties[key] = {} - for key1, value1 in value.dict().items(): - if key1 not in ['type', 'description', 'enum'] or value1 is None: - continue - properties[key][key1] = value1 - required.append(key) - elif command.steps[0]['run'].find('$input') > 0: - properties['input'] = { - "type": "string", - "description": "input text" - } - required.append('input') - - command_name = command_name.replace('.', '---') - return { - "type": "function", - "function": { - "name": command_name, - "description": command.description, - "parameters": { - "type": "object", - "properties": properties, - "required": required, - }, - } - } - - -def _create_tools() -> List[dict]: - commands = _load_commands() - return [_create_tool(command[0], command[1]) for command in commands if command[1].steps] - - -def _call_gpt( - openai_config: OpenAIChatConfig, - messages: List[dict], # messages passed to GPT - model_name: str, # GPT model name - use_function_calling: bool) -> dict: # whether to use function calling - client = openai.OpenAI( - api_key=os.environ.get("OPENAI_API_KEY", None), - base_url=os.environ.get("OPENAI_API_BASE", None) - ) - - tools_functions = _create_tools() - tools = {} if not use_function_calling or not tools_functions else {"tools": tools_functions} - - config_params = openai_config.dict(exclude_unset=True) - config_params.pop('stream', None) - config_params.pop('user', None) - config_params.pop('request_timeout', None) - config_params.pop('model', None) - - for try_times in range(3): - try: - response: Iterable = client.chat.completions.create( - messages=messages, - model=model_name, - stream=True, - **config_params, - **tools - ) - - response_result = {'content': None, 'function_name': None, 'parameters': ""} - for chunk in response: # pylint: disable=E1133 - chunk = chunk.dict() - delta = chunk["choices"][0]["delta"] - if 'tool_calls' in delta and delta['tool_calls']: - tool_call = delta['tool_calls'][0]['function'] - if tool_call.get('name', None): - response_result["function_name"] = tool_call["name"].replace('---', '.') - if tool_call.get("arguments", None): - response_result["parameters"] += tool_call["arguments"] - if delta.get('content', None): - if response_result["content"]: - response_result["content"] += delta["content"] - else: - response_result["content"] = delta["content"] - print(delta["content"], end='', flush=True) - if response_result["function_name"]: - print("``` command_run") - function_call = { - 'name': response_result["function_name"], - 'arguments': response_result["parameters"]} - print(json.dumps(function_call, indent=4)) - print("```", flush=True) - return response_result - except (ConnectionError, openai.APIConnectionError) as err: - if try_times == 2: - print("Connect Exception:", err) - print(err.strerror) - return {'content': None, 'function_name': None, 'parameters': ""} - continue - except Exception as err: - print("Exception Error:", err) - return {'content': None, 'function_name': None, 'parameters': ""} - return {'content': None, 'function_name': None, 'parameters': ""} - - -def _create_messages(): - return [] - - -def _call_function(function_name: str, parameters: str, model_name: str): - """ - call function by function_name and parameters - """ - parameters = json.loads(parameters) - command_obj = _load_command(function_name) - runner = CommandRunner(model_name) - return runner.run_command_with_parameters(function_name, command_obj, parameters, "", []) - - -def _auto_function_calling( - openai_config: OpenAIChatConfig, - history_messages: List[dict], - model_name:str): - """ - 通过function calling方式来回答当前问题。 - function最多被调用4次,必须进行最终答复。 - """ - function_call_times = 0 - - response = _call_gpt(openai_config, history_messages, model_name, True) - while True: - if response['function_name']: - # run function - function_call_times += 1 - print("do function calling", end='\n\n', flush=True) - function_result = _call_function( - response['function_name'], - response['parameters'], - model_name) - history_messages.append({ - 'role': 'function', - 'content': f'exit code: {function_result[0]} stdout: {function_result[1]}', - 'name': response['function_name']}) - print("after functon call.", end='\n\n', flush=True) - - # send function result to gpt - if function_call_times < 5: - response = _call_gpt(openai_config, history_messages, model_name, True) - else: - response = _call_gpt(openai_config, history_messages, model_name, False) - else: - return response - - -def _auto_route(openai_config: OpenAIChatConfig, history_messages, model_name:str): - """ - select which command to run - """ - response = _call_gpt(openai_config, history_messages, model_name, True) - if response['function_name']: - return _call_function( - response['function_name'], - response['parameters'], - model_name) - if response['content']: - return (0, response['content']) - return (-1, "") - - -def _run_prompt_command( - openai_config: OpenAIChatConfig, - command_name: str, - history_messages, - model_name:str): - """ - run prompt command - """ - _, user_chat_dir = init_dir() - workflows_dir = os.path.join(user_chat_dir, 'workflows') + workflows_dir = os.path.join(os.path.abspath('~/.chat'), 'workflows') if not os.path.exists(workflows_dir): return None if not os.path.isdir(workflows_dir): @@ -233,25 +20,18 @@ def _run_prompt_command( namespace = Namespace(workflows_dir) prompter = RecursivePrompter(namespace) + + command_name = user_input.split()[0][1:] command_prompts = prompter.run(command_name) - history_messages.append({ - 'role': 'system', - 'content': command_prompts}) - - response = _call_gpt(openai_config, history_messages, model_name, False) - if response['content']: - return (0, response['content']) - return (-1, "") + return command_prompts def run_command( - openai_config: OpenAIChatConfig, model_name: str, history_messages: List[dict], input_text: str, parent_hash: str, - context_contents: List[str], auto_fun: bool): """ load command config, and then run Command @@ -263,24 +43,22 @@ def run_command( if not (auto_fun and model_name.startswith('gpt-')): return None - # response = _auto_function_calling(history_messages, model_name) - # return response['content'] - return _auto_route(openai_config, history_messages, model_name) + # TODO + # use auto select workflow to run command + return None + commands = input_text.split() command = commands[0][1:] - command_obj = _load_command(command) - if not command_obj: + command_obj = CommandUtil.load_command(command) + if not command_obj or not command_obj.steps: return None - if not command_obj.steps: - return _run_prompt_command(openai_config, command, history_messages, model_name) runner = CommandRunner(model_name) return runner.run_command( - openai_config, - command, - command_obj, - history_messages, - input_text, - parent_hash, - context_contents) + command_name=command, + command=command_obj, + history_messages=history_messages, + input_text=input_text, + parent_hash=parent_hash + ) diff --git a/site-packages/devchat/engine/util.py b/site-packages/devchat/engine/util.py new file mode 100644 index 0000000..935cb0f --- /dev/null +++ b/site-packages/devchat/engine/util.py @@ -0,0 +1,168 @@ +import os +import sys +import json +from typing import List, Dict + +import openai + +from devchat._cli.utils import init_dir +from devchat.utils import get_logger + +from .namespace import Namespace +from .command_parser import CommandParser, Command + + +logger = get_logger(__name__) + + +DEFAULT_MODEL = "gpt-3.5-turbo" + +class CommandUtil: + @staticmethod + def __command_parser(): + _, user_chat_dir = init_dir() + workflows_dir = os.path.join(user_chat_dir, 'workflows') + if not os.path.exists(workflows_dir) or not os.path.isdir(workflows_dir): + return None + + namespace = Namespace(workflows_dir) + commander = CommandParser(namespace) + return commander + + @staticmethod + def load_command(command: str): + commander = CommandUtil.__command_parser() + if not commander: + return None + return commander.parse(command) + + @staticmethod + def load_commands() -> List[Command]: + commander = CommandUtil.__command_parser() + if not commander: + return [] + + command_names = commander.namespace.list_names("", True) + commands = [ (name, commander.parse(name)) for name in command_names ] + return [ cmd for cmd in commands if cmd[1] ] + + +class ToolUtil: + @staticmethod + def __make_function_parameters(command: Command): + properties = {} + required = [] + + if command.parameters: + for key, value in command.parameters.items(): + properties[key] = {} + for key1, value1 in value.dict().items(): + if key1 not in ['type', 'description', 'enum'] or value1 is None: + continue + properties[key][key1] = value1 + required.append(key) + elif command.steps[0]['run'].find('$input') > 0: + properties['input'] = { + "type": "string", + "description": "input text" + } + required.append('input') + + return properties, required + + @staticmethod + def make_function(command: Command, command_name: str): + properties, required = ToolUtil.__make_function_parameters(command) + command_name = command_name.replace('.', '---') + + return { + "type": "function", + "function": { + "name": command_name, + "description": command.description, + "parameters": { + "type": "object", + "properties": properties, + "required": required, + }, + } + } + + @staticmethod + def select_function_by_llm( + history_messages: List[Dict], tools: List[Dict], model: str = DEFAULT_MODEL + ): + client = openai.OpenAI( + api_key=os.environ.get("OPENAI_API_KEY", None), + base_url=os.environ.get("OPENAI_API_BASE", None) + ) + + try: + response = client.chat.completions.create( + messages=history_messages, + model=model, + stream=False, + tools=tools + ) + + respose_message = response.dict()["choices"][0]["message"] + if not respose_message['tool_calls']: + return None + tool_call = respose_message['tool_calls'][0]['function'] + if tool_call['name'] != tools[0]["function"]["name"]: + error_msg = ( + "The LLM returned an invalid function name. " + f"Expected: {tools[0]['function']['name']}, " + f"Actual: {tool_call['name']}" + ) + print(error_msg, file=sys.stderr, flush=True) + return None + return { + "name": tool_call['name'].replace('---', '.'), + "arguments": json.loads(tool_call['arguments']) + } + except (ConnectionError, openai.APIConnectionError) as err: + print("ConnectionError:", err, file=sys.stderr, flush=True) + return None + except openai.APIError as err: + print("openai APIError:", err.type, file=sys.stderr, flush=True) + logger.exception("Call command by LLM error: %s", err) + return None + except Exception as err: + print("Exception:", err, file=sys.stderr, flush=True) + logger.exception("Call command by LLM error: %s", err) + return None + + @staticmethod + def _create_tool(command_name:str, command: Command) -> dict: + properties = {} + required = [] + if command.parameters: + for key, value in command.parameters.items(): + properties[key] = {} + for key1, value1 in value.dict().items(): + if key1 not in ['type', 'description', 'enum'] or value1 is None: + continue + properties[key][key1] = value1 + required.append(key) + elif command.steps[0]['run'].find('$input') > 0: + properties['input'] = { + "type": "string", + "description": "input text" + } + required.append('input') + + command_name = command_name.replace('.', '---') + return { + "type": "function", + "function": { + "name": command_name, + "description": command.description, + "parameters": { + "type": "object", + "properties": properties, + "required": required, + }, + } + } + \ No newline at end of file diff --git a/site-packages/devchat/ide/__init__.py b/site-packages/devchat/ide/__init__.py new file mode 100644 index 0000000..dbfd10a --- /dev/null +++ b/site-packages/devchat/ide/__init__.py @@ -0,0 +1,5 @@ +from .service import IDEService + +__all__ = [ + "IDEService", +] diff --git a/site-packages/devchat/ide/idea_services.py b/site-packages/devchat/ide/idea_services.py new file mode 100644 index 0000000..e021131 --- /dev/null +++ b/site-packages/devchat/ide/idea_services.py @@ -0,0 +1,14 @@ +from .types import LocationWithText +from .rpc import rpc_method + +class IdeaIDEService: + def __init__(self): + self._result = None + + @rpc_method + def get_visible_range(self) -> LocationWithText: + return LocationWithText.parse_obj(self._result) + + @rpc_method + def get_selected_range(self) -> LocationWithText: + return LocationWithText.parse_obj(self._result) diff --git a/site-packages/devchat/ide/rpc.py b/site-packages/devchat/ide/rpc.py new file mode 100644 index 0000000..480e5df --- /dev/null +++ b/site-packages/devchat/ide/rpc.py @@ -0,0 +1,80 @@ +# pylint: disable=C0103 +# pylint: disable=W3101 +# pylint: disable=W0719 +# pylint: disable=R1710 +import os +from functools import wraps + +import requests + +BASE_SERVER_URL = os.environ.get("DEVCHAT_IDE_SERVICE_URL", "http://localhost:3000") + + +def rpc_call(f): + @wraps(f) + def wrapper(*args, **kwargs): + if os.environ.get("DEVCHAT_IDE_SERVICE_URL", "") == "": + # maybe in a test, user don't want to mock services functions + return + + try: + function_name = f.__name__ + url = f"{BASE_SERVER_URL}/{function_name}" + + data = dict(zip(f.__code__.co_varnames, args)) + data.update(kwargs) + headers = {"Content-Type": "application/json"} + + response = requests.post(url, json=data, headers=headers) + + if response.status_code != 200: + raise Exception(f"Server error: {response.status_code}") + + response_data = response.json() + if "error" in response_data: + raise Exception(f"Server returned an error: {response_data['error']}") + return response_data.get("result", None) + except ConnectionError as err: + # TODO + raise err + + return wrapper + + +def rpc_method(f): + """ + Decorator for Service methods + """ + + @wraps(f) + def wrapper(self, *args, **kwargs): + if os.environ.get("DEVCHAT_IDE_SERVICE_URL", "") == "": + # maybe in a test, user don't want to mock services functions + return + + try: + function_name = f.__name__ + url = f"{BASE_SERVER_URL}/{function_name}" + + data = dict(zip(f.__code__.co_varnames[1:], args)) # Exclude "self" + data.update(kwargs) + headers = {"Content-Type": "application/json"} + + response = requests.post(url, json=data, headers=headers) + + if response.status_code != 200: + raise Exception(f"Server error: {response.status_code}") + + response_data = response.json() + if "error" in response_data: + raise Exception(f"Server returned an error: {response_data['error']}") + + # Store the result in the _result attribute of the instance + self._result = response_data.get("result", None) + return f(self, *args, **kwargs) + + except ConnectionError as err: + # TODO + raise err + + return wrapper diff --git a/site-packages/devchat/ide/service.py b/site-packages/devchat/ide/service.py new file mode 100644 index 0000000..1bac612 --- /dev/null +++ b/site-packages/devchat/ide/service.py @@ -0,0 +1,80 @@ +# disable pylint +# pylint: disable=W0613 +# pylint: disable=E1133 +# pylint: disable=R1710 +# pylint: disable=W0212 +# pylint: disable=W0719 +# pylint: disable=W3101 +# pylint: disable=C0103 + +from typing import List + +from .rpc import rpc_method +from .types import Location, SymbolNode +from .vscode_services import selected_range, visible_range +from .idea_services import IdeaIDEService + + +class IDEService: + """ + Client for IDE service + + Usage: + client = IDEService() + res = client.ide_language() + res = client.ide_logging("info", "some message") + """ + + def __init__(self): + self._result = None + + @rpc_method + def get_lsp_brige_port(self) -> str: + return self._result + + @rpc_method + def install_python_env(self, command_name: str, requirements_file: str) -> str: + return self._result + + @rpc_method + def update_slash_commands(self) -> bool: + return self._result + + @rpc_method + def ide_language(self) -> str: + return self._result + + @rpc_method + def ide_logging(self, level: str, message: str) -> bool: + """ + level: "info" | "warn" | "error" | "debug" + """ + return self._result + + @rpc_method + def get_document_symbols(self, abspath: str) -> List[SymbolNode]: + return [SymbolNode.parse_obj(node) for node in self._result] + + @rpc_method + def find_type_def_locations(self, abspath: str, line: int, character: int) -> List[Location]: + return [Location.parse_obj(loc) for loc in self._result] + + @rpc_method + def ide_name(self) -> str: + return self._result + + @rpc_method + def diff_apply(self, filepath, content) -> bool: + return self._result + + def get_visible_range(self): + if self.ide_name() == "vscode": + return visible_range() + else: + return IdeaIDEService().get_visible_range() + + def get_selected_range(self): + if self.ide_name() == "vscode": + return selected_range() + else: + return IdeaIDEService().get_selected_range() diff --git a/site-packages/devchat/ide/types.py b/site-packages/devchat/ide/types.py new file mode 100644 index 0000000..01fb463 --- /dev/null +++ b/site-packages/devchat/ide/types.py @@ -0,0 +1,31 @@ +from typing import List + +from pydantic import BaseModel + + +class Position(BaseModel): + line: int # 0-based + character: int # 0-based + + +class Range(BaseModel): + start: Position + end: Position + + +class Location(BaseModel): + abspath: str + range: Range + + +class SymbolNode(BaseModel): + name: str + kind: str + range: Range + children: List["SymbolNode"] + + +class LocationWithText(BaseModel): + abspath: str + range: Range + text: str diff --git a/site-packages/devchat/ide/vscode_services.py b/site-packages/devchat/ide/vscode_services.py new file mode 100644 index 0000000..b8021e5 --- /dev/null +++ b/site-packages/devchat/ide/vscode_services.py @@ -0,0 +1,177 @@ +import os + +from .rpc import rpc_call +from .types import LocationWithText + + +@rpc_call +def run_code(code: str): # pylint: disable=unused-argument + pass + + +@rpc_call +def diff_apply(filepath, content): # pylint: disable=unused-argument + pass + + +@rpc_call +def get_symbol_defines_in_selected_code(): + pass + + +def find_symbol(command, abspath, line, col): + code = ( + f"const position = new vscode.Position({line}, {col});" + f"const absPath = vscode.Uri.file('{abspath}');" + f"return await vscode.commands.executeCommand('{command}', absPath, position);" + ) + result = run_code(code=code) + return result + + +def find_definition(abspath: str, line: int, col: int): + return find_symbol("vscode.executeDefinitionProvider", abspath, line, col) + + +def find_type_definition(abspath: str, line: int, col: int): + return find_symbol("vscode.executeTypeDefinitionProvider", abspath, line, col) + + +def find_declaration(abspath: str, line: int, col: int): + return find_symbol("vscode.executeDeclarationProvider", abspath, line, col) + + +def find_implementation(abspath: str, line: int, col: int): + return find_symbol("vscode.executeImplementationProvider", abspath, line, col) + + +def find_reference(abspath: str, line: int, col: int): + return find_symbol("vscode.executeReferenceProvider", abspath, line, col) + + +def document_symbols(abspath: str): + code = ( + f"const fileUri = vscode.Uri.file('{abspath}');" + "return await vscode.commands.executeCommand(" + "'vscode.executeDocumentSymbolProvider', fileUri);" + ) + symbols = run_code(code=code) + return symbols + + +def workspace_symbols(query: str): + code = ( + "return await vscode.commands.executeCommand('vscode.executeWorkspaceSymbolProvider'," + f" '{query}');" + ) + return run_code(code=code) + + +def active_text_editor(): + code = "return vscode.window.activeTextEditor;" + return run_code(code=code) + + +def open_folder(folder: str): + folder = folder.replace("\\", "/") + code = ( + f"const folderUri = vscode.Uri.file('{folder}');" + "vscode.commands.executeCommand(`vscode.openFolder`, folderUri);" + ) + run_code(code=code) + + +def visible_lines(): + active_document = active_text_editor() + fail_result = { + "filePath": "", + "visibleText": "", + "visibleRange": [-1, -1], + } + + if not active_document: + return fail_result + if not os.path.exists(active_document["document"]["uri"]["fsPath"]): + return fail_result + + file_path = active_document["document"]["uri"]["fsPath"] + start_line = active_document["visibleRanges"][0][0]["line"] + end_line = active_document["visibleRanges"][0][1]["line"] + + # read file lines from start_line to end_line + with open(file_path, "r", encoding="utf-8") as file: + _lines = file.readlines() + _visible_lines = _lines[start_line : end_line + 1] + + # continue with the rest of the function + return { + "filePath": file_path, + "visibleText": "".join(_visible_lines), + "visibleRange": [start_line, end_line], + } + +def visible_range() -> LocationWithText: + visible_range_text = visible_lines() + return LocationWithText( + text=visible_range_text["visibleText"], + abspath=visible_range_text["filePath"], + range={ + "start": { + "line": visible_range_text["visibleRange"][0], + "character": 0, + }, + "end": { + "line": visible_range_text["visibleRange"][1], + "character": 0, + }, + } + ) + + +def selected_lines(): + active_document = active_text_editor() + fail_result = { + "filePath": "", + "selectedText": "", + "selectedRange": [-1, -1, -1, -1], + } + + if not active_document: + return fail_result + if not os.path.exists(active_document["document"]["uri"]["fsPath"]): + return fail_result + + file_path = active_document["document"]["uri"]["fsPath"] + start_line = active_document["selection"]["start"]["line"] + start_col = active_document["selection"]["start"]["character"] + end_line = active_document["selection"]["end"]["line"] + end_col = active_document["selection"]["end"]["character"] + + # read file lines from start_line to end_line + with open(file_path, "r", encoding="utf-8") as file: + _lines = file.readlines() + _selected_lines = _lines[start_line : end_line + 1] + + # continue with the rest of the function + return { + "filePath": file_path, + "selectedText": "".join(_selected_lines), + "selectedRange": [start_line, start_col, end_line, end_col], + } + +def selected_range() -> LocationWithText: + selected_range_text = selected_lines() + return LocationWithText( + text=selected_range_text["selectedText"], + abspath=selected_range_text["filePath"], + range={ + "start": { + "line": selected_range_text["selectedRange"][0], + "character": selected_range_text["selectedRange"][1], + }, + "end": { + "line": selected_range_text["selectedRange"][2], + "character": selected_range_text["selectedRange"][3], + }, + } + ) \ No newline at end of file diff --git a/site-packages/devchat/llm/__init__.py b/site-packages/devchat/llm/__init__.py new file mode 100644 index 0000000..22e3683 --- /dev/null +++ b/site-packages/devchat/llm/__init__.py @@ -0,0 +1,15 @@ +from .chat import chat, chat_json +from .openai import chat_completion_no_stream_return_json, chat_completion_stream +from .text_confirm import llm_edit_confirm +from .tools_call import chat_tools, llm_func, llm_param + +__all__ = [ + "chat_completion_stream", + "chat_completion_no_stream_return_json", + "chat_json", + "chat", + "llm_edit_confirm", + "llm_func", + "llm_param", + "chat_tools", +] diff --git a/site-packages/devchat/llm/chat.py b/site-packages/devchat/llm/chat.py new file mode 100644 index 0000000..fc3e80a --- /dev/null +++ b/site-packages/devchat/llm/chat.py @@ -0,0 +1,110 @@ +import json +import os +import sys +from functools import wraps + +import openai + +from devchat.memory import ChatMemory + +from .openai import ( + chat_completion_no_stream_return_json, + chat_completion_stream, + chat_completion_stream_commit, + chunks_content, + retry_timeout, + stream_out_chunk, + to_dict_content_and_call, +) +from .pipeline import exception_handle, pipeline, retry + +chat_completion_stream_out = exception_handle( + retry( + pipeline( + chat_completion_stream_commit, + retry_timeout, + stream_out_chunk, + chunks_content, + to_dict_content_and_call, + ), + times=3, + ), + lambda err: { + "content": None, + "function_name": None, + "parameters": "", + "error": err.type if isinstance(err, openai.APIError) else err, + }, +) + + +def chat( + prompt, + memory: ChatMemory = None, + stream_out: bool = False, + model: str = os.environ.get("LLM_MODEL", "gpt-3.5-turbo-1106"), + **llm_config, +): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): # pylint: disable=unused-argument + nonlocal prompt, memory, model, llm_config + prompt = prompt.format(**kwargs) + messages = memory.contexts() if memory else [] + if not any(item["content"] == prompt for item in messages) and prompt: + messages.append({"role": "user", "content": prompt}) + if "__user_request__" in kwargs: + messages.append(kwargs["__user_request__"]) + del kwargs["__user_request__"] + + llm_config["model"] = model + if not stream_out: + response = chat_completion_stream(messages, llm_config=llm_config) + else: + response = chat_completion_stream_out(messages, llm_config=llm_config) + if not response.get("content", None): + print(f"call {func.__name__} failed:", response["error"], file=sys.stderr) + return None + + if memory: + memory.append( + {"role": "user", "content": prompt}, + {"role": "assistant", "content": response["content"]}, + ) + return response["content"] + + return wrapper + + return decorator + + +def chat_json( + prompt, + memory: ChatMemory = None, + model: str = os.environ.get("LLM_MODEL", "gpt-3.5-turbo-1106"), + **llm_config, +): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): # pylint: disable=unused-argument + nonlocal prompt, memory, model, llm_config + prompt = prompt.format(**kwargs) + messages = memory.contexts() if memory else [] + if not any(item["content"] == prompt for item in messages): + messages.append({"role": "user", "content": prompt}) + + llm_config["model"] = model + response = chat_completion_no_stream_return_json(messages, llm_config=llm_config) + if not response: + print(f"call {func.__name__} failed.", file=sys.stderr) + + if memory: + memory.append( + {"role": "user", "content": prompt}, + {"role": "assistant", "content": json.dumps(response)}, + ) + return response + + return wrapper + + return decorator diff --git a/site-packages/devchat/llm/openai.py b/site-packages/devchat/llm/openai.py new file mode 100644 index 0000000..d6664be --- /dev/null +++ b/site-packages/devchat/llm/openai.py @@ -0,0 +1,195 @@ +# flake8: noqa: E402 +import json +import os +import re +from typing import Dict, List + +import openai + +from .pipeline import ( + RetryException, + exception_handle, + exception_output_handle, + parallel, + pipeline, + retry, +) + + +def _try_remove_markdown_block_flag(content): + """ + 如果content是一个markdown块,则删除它的头部```xxx和尾部``` + """ + # 定义正则表达式模式,用于匹配markdown块的头部和尾部 + pattern = r"^\s*```\s*(\w+)\s*\n(.*?)\n\s*```\s*$" + + # 使用re模块进行匹配 + match = re.search(pattern, content, re.DOTALL | re.MULTILINE) + + if match: + # 如果匹配成功,则提取出markdown块的内容并返回 + _ = match.group(1) # language + markdown_content = match.group(2) + return markdown_content.strip() + # 如果匹配失败,则返回原始内容 + return content + + +def chat_completion_stream_commit( + messages: List[Dict], # [{"role": "user", "content": "hello"}] + llm_config: Dict, # {"model": "...", ...} +): + client = openai.OpenAI( + api_key=os.environ.get("OPENAI_API_KEY", None), + base_url=os.environ.get("OPENAI_API_BASE", None), + ) + + llm_config["stream"] = True + llm_config["timeout"] = 60 + return client.chat.completions.create(messages=messages, **llm_config) + + +def chat_completion_stream_raw(**kwargs): + client = openai.OpenAI( + api_key=os.environ.get("OPENAI_API_KEY", None), + base_url=os.environ.get("OPENAI_API_BASE", None), + ) + + kwargs["stream"] = True + kwargs["timeout"] = 60 + return client.chat.completions.create(**kwargs) + + +def stream_out_chunk(chunks): + for chunk in chunks: + chunk_dict = chunk.dict() + delta = chunk_dict["choices"][0]["delta"] + if delta.get("content", None): + print(delta["content"], end="", flush=True) + yield chunk + + +def retry_timeout(chunks): + try: + for chunk in chunks: + yield chunk + except (openai.APIConnectionError, openai.APITimeoutError) as err: + raise RetryException(err) from err + + +def chunk_list(chunks): + return [chunk for chunk in chunks] # pylint: disable=R1721 + + +def chunks_content(chunks): + content = None + for chunk in chunks: + chunk_dict = chunk.dict() + delta = chunk_dict["choices"][0]["delta"] + if delta.get("content", None): + if content is None: + content = "" + content += delta["content"] + return content + + +def chunks_call(chunks): + tool_calls = [] + + for chunk in chunks: + chunk = chunk.dict() + delta = chunk["choices"][0]["delta"] + if "tool_calls" in delta and delta["tool_calls"]: + tool_call = delta["tool_calls"][0]["function"] + if delta["tool_calls"][0].get("index", None) is not None: + index = delta["tool_calls"][0]["index"] + if index >= len(tool_calls): + tool_calls.append({"name": None, "arguments": ""}) + if tool_call.get("name", None): + tool_calls[-1]["name"] = tool_call["name"] + if tool_call.get("arguments", None): + tool_calls[-1]["arguments"] += tool_call["arguments"] + return tool_calls + + +def content_to_json(content): + try: + # json will format as ```json ... ``` in 1106 model + response_content = _try_remove_markdown_block_flag(content) + response_obj = json.loads(response_content) + return response_obj + except json.JSONDecodeError as err: + raise RetryException(err) from err + except Exception as err: + raise err + + +def to_dict_content_and_call(content, tool_calls=None): + if tool_calls is None: + tool_calls = [] + return { + "content": content, + "function_name": tool_calls[0]["name"] if tool_calls else None, + "parameters": tool_calls[0]["arguments"] if tool_calls else "", + "tool_calls": tool_calls, + } + + +chat_completion_content = retry( + pipeline(chat_completion_stream_commit, retry_timeout, chunks_content), times=3 +) + +chat_completion_stream_content = retry( + pipeline(chat_completion_stream_commit, retry_timeout, stream_out_chunk, chunks_content), + times=3, +) + +chat_completion_call = retry( + pipeline(chat_completion_stream_commit, retry_timeout, chunks_call), times=3 +) + +chat_completion_no_stream_return_json = exception_handle( + retry( + pipeline(chat_completion_stream_commit, retry_timeout, chunks_content, content_to_json), + times=3, + ), + exception_output_handle(lambda err: None), +) + +chat_completion_stream = exception_handle( + retry( + pipeline( + chat_completion_stream_commit, + retry_timeout, + chunks_content, + to_dict_content_and_call, + ), + times=3, + ), + lambda err: { + "content": None, + "function_name": None, + "parameters": "", + "error": err.type if isinstance(err, openai.APIError) else err, + }, +) + +chat_call_completion_stream = exception_handle( + retry( + pipeline( + chat_completion_stream_commit, + retry_timeout, + chunk_list, + parallel(chunks_content, chunks_call), + to_dict_content_and_call, + ), + times=3, + ), + lambda err: { + "content": None, + "function_name": None, + "parameters": "", + "tool_calls": [], + "error": err.type if isinstance(err, openai.APIError) else err, + }, +) diff --git a/site-packages/devchat/llm/pipeline.py b/site-packages/devchat/llm/pipeline.py new file mode 100644 index 0000000..2a4c867 --- /dev/null +++ b/site-packages/devchat/llm/pipeline.py @@ -0,0 +1,84 @@ +import sys +from typing import Dict + +import openai + + +class RetryException(Exception): + def __init__(self, err): + self.error = err + + +def retry(func, times): + def wrapper(*args, **kwargs): + for index in range(times): + try: + return func(*args, **kwargs) + except RetryException as err: + if index + 1 == times: + raise err.error + continue + except Exception as err: + raise err + raise err.error + + return wrapper + + +def exception_err(func): + def wrapper(*args, **kwargs): + try: + result = func(*args, **kwargs) + return True, result + except Exception as err: + return False, err + + return wrapper + + +def exception_output_handle(func): + def wrapper(err): + if isinstance(err, openai.APIError): + print(err.type, file=sys.stderr, flush=True) + else: + print(err, file=sys.stderr, flush=True) + return func(err) + + return wrapper + + +def exception_handle(func, handler): + def wrapper(*args, **kwargs): + try: + result = func(*args, **kwargs) + return result + except Exception as err: + return handler(err) + + return wrapper + + +def pipeline(*funcs): + def wrapper(*args, **kwargs): + for index, func in enumerate(funcs): + if index > 0: + # pylint: disable=E1101 + if isinstance(args, Dict) and args.get("__type__", None) == "parallel": + args = func(*args["value"]) # pylint: disable=E1126 + else: + args = func(args) + else: + args = func(*args, **kwargs) + return args + + return wrapper + + +def parallel(*funcs): + def wrapper(args): + results = {"__type__": "parallel", "value": []} + for func in funcs: + results["value"].append(func(args)) + return results + + return wrapper diff --git a/site-packages/devchat/llm/text_confirm.py b/site-packages/devchat/llm/text_confirm.py new file mode 100644 index 0000000..7d877b5 --- /dev/null +++ b/site-packages/devchat/llm/text_confirm.py @@ -0,0 +1,52 @@ +# flake8: noqa: E402 +from functools import wraps + +from devchat.chatmark import Checkbox, Form, TextEditor + + +class MissEditConfirmFieldException(Exception): + pass + + +def edit_confirm(response): + need_regenerate = Checkbox(["Need Regenerate"]) + edit_text = TextEditor(response) + feedback_text = TextEditor("") + confirmation_form = Form( + [ + "Edit AI Response:", + edit_text, + "Need Regenerate?", + need_regenerate, + "Feedback if Regenerate:", + feedback_text, + ] + ) + confirmation_form.render() + if len(need_regenerate.selections) > 0: + return True, feedback_text.new_text + return False, edit_text.new_text + + +def llm_edit_confirm(edit_confirm_fun=edit_confirm): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + nonlocal edit_confirm_fun + if not edit_confirm_fun: + raise MissEditConfirmFieldException() + + while True: + response = func(*args, **kwargs) + if not response: + return response + + do_regenerate, new_response = edit_confirm_fun(response) + if do_regenerate: + kwargs["__user_request__"] = {"role": "user", "content": new_response} + else: + return new_response if new_response else response + + return wrapper + + return decorator diff --git a/site-packages/devchat/llm/tools_call.py b/site-packages/devchat/llm/tools_call.py new file mode 100644 index 0000000..01a189e --- /dev/null +++ b/site-packages/devchat/llm/tools_call.py @@ -0,0 +1,218 @@ +import json +import os +import sys +from functools import wraps + +from devchat.memory import ChatMemory +from devchat.chatmark import Form, Radio, TextEditor +from devchat.ide import IDEService + +from .openai import chat_call_completion_stream + + +class MissToolsFieldException(Exception): + pass + + +def openai_tool_schema(name, description, parameters, required): + return { + "type": "function", + "function": { + "name": name, + "description": description, + "parameters": {"type": "object", "properties": parameters, "required": required}, + }, + } + + +def openai_function_schema(name, description, properties, required): + return { + "name": name, + "description": description, + "parameters": {"type": "object", "properties": properties, "required": required}, + } + + +def llm_func(name, description, schema_fun=openai_tool_schema): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + if not hasattr(func, "llm_metadata"): + func.llm_metadata = {"properties": {}, "required": []} + + wrapper.function_name = name + wrapper.json_schema = lambda: schema_fun( + name, + description, + func.llm_metadata.get("properties", {}), + func.llm_metadata.get("required", []), + ) + return wrapper + + return decorator + + +def llm_param(name, description, dtype, **kwargs): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + if hasattr(func, "llm_metadata"): + wrapper.llm_metadata = func.llm_metadata + else: + wrapper.llm_metadata = {"properties": {}, "required": []} + + wrapper.llm_metadata["properties"][name] = { + "type": dtype, + "description": description, + **kwargs, # Add any additional keyword arguments + } + wrapper.llm_metadata["required"].append(name) + + return wrapper + + return decorator + + +def call_confirm(response): + """ + Prompt the user to confirm if a function call should be allowed. + + This function is responsible for asking the user to confirm whether the AI's + intention to call a function is permissible. It prints out the response content + and the details of the function calls that the AI intends to make. The user is + then presented with a choice to either allow or deny the function call. + + Parameters: + response (dict): A dictionary containing the 'content' and 'all_calls' keys. + 'content' is a string representing the AI's response, and + 'all_calls' is a list of dictionaries, each representing a + function call with 'function_name' and 'parameters' keys. + + Returns: + tuple: A tuple containing a boolean and a string. The boolean indicates whether + the function call is allowed (True) or not (False). The string contains + additional input from the user if the function call is not allowed. + """ + + def display_response_and_calls(response): + if response["content"]: + print(f"AI Response: {response['content']}", end="\n\n", flush=True) + print("Function Call Requests:", end="\n\n", flush=True) + for call_request in response["all_calls"]: + print( + f"Function: {call_request['function_name']}, " + f"Parameters: {call_request['parameters']}", + end="\n\n", + flush=True, + ) + + def prompt_user_confirmation(): + function_call_radio = Radio(["Allow function call", "Block function call"]) + user_feedback_input = TextEditor("") + confirmation_form = Form( + [ + "Permission to proceed with function call?", + function_call_radio, + "Provide feedback if blocked:", + user_feedback_input, + ] + ) + confirmation_form.render() + user_allowed_call = function_call_radio.selection == 0 + user_feedback = user_feedback_input.new_text + return user_allowed_call, user_feedback + + display_response_and_calls(response) + return prompt_user_confirmation() + + +def chat_tools( + prompt, + memory: ChatMemory = None, + model: str = os.environ.get("LLM_MODEL", "gpt-3.5-turbo-1106"), + tools=None, + call_confirm_fun=call_confirm, + **llm_config, +): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): # pylint: disable=unused-argument + nonlocal prompt, memory, model, tools, call_confirm_fun, llm_config + prompt = prompt.format(**kwargs) + if not tools: + raise MissToolsFieldException() + + messages = memory.contexts() if memory else [] + if not any(item["content"] == prompt for item in messages): + messages.append({"role": "user", "content": prompt}) + + tool_schemas = [fun.json_schema() for fun in tools] if tools else [] + + llm_config["model"] = model + llm_config["tools"] = tool_schemas + + user_request = {"role": "user", "content": prompt} + while True: + response = chat_call_completion_stream(messages, llm_config=llm_config) + if not response.get("content", None) and not response.get("function_name", None): + print(f"call {func.__name__} failed:", response["error"], file=sys.stderr) + return response + + response_content = ( + f"{response.get('content', '') or ''}\n\n" + f"call function {response.get('function_name', '')} with arguments:" + f"{response.get('parameters', '')}" + ) + if memory: + memory.append(user_request, {"role": "assistant", "content": response_content}) + messages.append({"role": "assistant", "content": response_content}) + + if not response.get("function_name", None): + return response + if not response.get("all_calls", None): + response["all_calls"] = [ + { + "function_name": response["function_name"], + "parameters": response["parameters"], + } + ] + + do_call = True + if call_confirm_fun: + do_call, fix_prompt = call_confirm_fun(response) + + if do_call: + # call function + functions = {tool.function_name: tool for tool in tools} + for call in response["all_calls"]: + IDEService().ide_logging( + "info", + f"try to call function tool: {call['function_name']} " + f"with {call['parameters']}", + ) + tool = functions[call["function_name"]] + result = tool(**json.loads(call["parameters"])) + messages.append( + { + "role": "function", + "content": f"function has called, this is the result: {result}", + "name": call["function_name"], + } + ) + user_request = { + "role": "function", + "content": f"function has called, this is the result: {result}", + "name": call["function_name"], + } + else: + # update prompt + messages.append({"role": "user", "content": fix_prompt}) + user_request = {"role": "user", "content": fix_prompt} + + return wrapper + + return decorator diff --git a/site-packages/devchat/memory/__init__.py b/site-packages/devchat/memory/__init__.py new file mode 100644 index 0000000..4a5905c --- /dev/null +++ b/site-packages/devchat/memory/__init__.py @@ -0,0 +1,8 @@ + +from .base import ChatMemory +from .fixsize_memory import FixSizeChatMemory + +__all__ = [ + "ChatMemory", + "FixSizeChatMemory", +] diff --git a/site-packages/devchat/memory/base.py b/site-packages/devchat/memory/base.py new file mode 100644 index 0000000..4c73687 --- /dev/null +++ b/site-packages/devchat/memory/base.py @@ -0,0 +1,28 @@ +class ChatMemory: + """ + ChatMemory is the base class for all chat memory classes. + """ + + def __init__(self): + pass + + def append(self, request, response): + """ + Append a request and response to the memory. + """ + # it must implemented in sub class + + def append_request(self, request): + """ + Append a request to the memory. + """ + + def append_response(self, response): + """ + Append a request to the memory. + """ + + def contexts(self): + """ + Return the contexts of the memory. + """ diff --git a/site-packages/devchat/memory/fixsize_memory.py b/site-packages/devchat/memory/fixsize_memory.py new file mode 100644 index 0000000..ab51413 --- /dev/null +++ b/site-packages/devchat/memory/fixsize_memory.py @@ -0,0 +1,54 @@ +from .base import ChatMemory + + +class FixSizeChatMemory(ChatMemory): + """ + FixSizeChatMemory is a memory class that stores up + to a fixed number of requests and responses. + """ + + def __init__(self, max_size: int = 5, messages=None, system_prompt=None): + """ + init the memory + """ + super().__init__() + self._max_size = max_size + # store last max_size messages + if messages is None: + messages = [] + + self._messages = messages[-max_size * 2 :] + self._system_prompt = system_prompt + + def append(self, request, response): + """ + Append a request and response to the memory. + """ + self._messages.append(request) + self._messages.append(response) + if len(self._messages) > self._max_size * 2: + self._messages = self._messages[-self._max_size * 2 :] + + def append_request(self, request): + """ + Append a request to the memory. + """ + self._messages.append(request) + + def append_response(self, response): + """ + Append a response to the memory. + """ + self._messages.append(response) + if len(self._messages) > self._max_size * 2: + self._messages = self._messages[-self._max_size * 2 :] + + def contexts(self): + """ + Return the contexts of the memory. + """ + messages = self._messages.copy() + # insert system prompt at the beginning + if self._system_prompt: + messages = [{"role": "system", "content": self._system_prompt}] + messages + return messages diff --git a/site-packages/distro-1.9.0.dist-info/INSTALLER b/site-packages/distro-1.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/site-packages/distro-1.9.0.dist-info/LICENSE b/site-packages/distro-1.9.0.dist-info/LICENSE new file mode 100644 index 0000000..e06d208 --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/site-packages/distro-1.9.0.dist-info/METADATA b/site-packages/distro-1.9.0.dist-info/METADATA new file mode 100644 index 0000000..9312e8e --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/METADATA @@ -0,0 +1,184 @@ +Metadata-Version: 2.1 +Name: distro +Version: 1.9.0 +Summary: Distro - an OS platform information API +Home-page: https://github.com/python-distro/distro +Author: Nir Cohen +Author-email: nir36g@gmail.com +License: Apache License, Version 2.0 +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: BSD :: NetBSD +Classifier: Operating System :: POSIX :: BSD :: OpenBSD +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Operating System +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE + +Distro - an OS platform information API +======================================= + +[![CI Status](https://github.com/python-distro/distro/workflows/CI/badge.svg)](https://github.com/python-distro/distro/actions/workflows/ci.yaml) +[![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro) +[![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg) +[![Code Coverage](https://codecov.io/github/python-distro/distro/coverage.svg?branch=master)](https://codecov.io/github/python-distro/distro?branch=master) +[![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro) +[![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/) + +`distro` provides information about the +OS distribution it runs on, such as a reliable machine-readable ID, or +version information. + +It is the recommended replacement for Python's original +[`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution) +function (removed in Python 3.8). It also provides much more functionality +which isn't necessarily Python bound, like a command-line interface. + +Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/python-distro/distro/issues/177) is also planned. + +For Python 2.6 support, see https://github.com/python-distro/distro/tree/python2.6-support + +## Installation + +Installation of the latest released version from PyPI: + +```shell +pip install distro +``` + +Installation of the latest development version: + +```shell +pip install https://github.com/python-distro/distro/archive/master.tar.gz +``` + +To use as a standalone script, download `distro.py` directly: + +```shell +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +python distro.py +``` + +``distro`` is safe to vendor within projects that do not wish to add +dependencies. + +```shell +cd myproject +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +``` + +## Usage + +```bash +$ distro +Name: Antergos Linux +Version: 2015.10 (ISO-Rolling) +Codename: ISO-Rolling + +$ distro -j +{ + "codename": "ISO-Rolling", + "id": "antergos", + "like": "arch", + "version": "16.9", + "version_parts": { + "build_number": "", + "major": "16", + "minor": "9" + } +} + + +$ python +>>> import distro +>>> distro.name(pretty=True) +'CentOS Linux 8' +>>> distro.id() +'centos' +>>> distro.version(best=True) +'8.4.2105' +``` + + +## Documentation + +On top of the aforementioned API, several more functions are available. For a complete description of the +API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/). + +## Background + +An alternative implementation became necessary because Python 3.5 deprecated +this function, and Python 3.8 removed it altogether. Its predecessor function +[`platform.dist`](https://docs.python.org/3.7/library/platform.html#platform.dist) +was already deprecated since Python 2.6 and removed in Python 3.8. Still, there +are many cases in which access to that information is needed. See [Python issue +1322](https://bugs.python.org/issue1322) for more information. + +The `distro` package implements a robust and inclusive way of retrieving the +information about a distribution based on new standards and old methods, +namely from these data sources (from high to low precedence): + +* The os-release file `/etc/os-release` if present, with a fall-back on `/usr/lib/os-release` if needed. +* The output of the `lsb_release` command, if available. +* The distro release file (`/etc/*(-|_)(release|version)`), if present. +* The `uname` command for BSD based distrubtions. + + +## Python and Distribution Support + +`distro` is supported and tested on Python 3.6+ and PyPy and on any +distribution that provides one or more of the data sources covered. + +This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/python-distro/distro/tree/master/tests/resources/distros). + + +## Testing + +```shell +git clone git@github.com:python-distro/distro.git +cd distro +pip install tox +tox +``` + + +## Contributions + +Pull requests are always welcome to deal with specific distributions or just +for general merriment. + +See [CONTRIBUTIONS](https://github.com/python-distro/distro/blob/master/CONTRIBUTING.md) for contribution info. + +Reference implementations for supporting additional distributions and file +formats can be found here: + +* https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172 +* https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb +* https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py +* https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc + +## Package manager distributions + +* https://src.fedoraproject.org/rpms/python-distro +* https://www.archlinux.org/packages/community/any/python-distro/ +* https://launchpad.net/ubuntu/+source/python-distro +* https://packages.debian.org/stable/python3-distro +* https://packages.gentoo.org/packages/dev-python/distro +* https://pkgs.org/download/python3-distro +* https://slackbuilds.org/repository/14.2/python/python-distro/ diff --git a/site-packages/distro-1.9.0.dist-info/RECORD b/site-packages/distro-1.9.0.dist-info/RECORD new file mode 100644 index 0000000..b621a3b --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/RECORD @@ -0,0 +1,15 @@ +../../../bin/distro,sha256=yB1pUj8W8WIUvaFBxMzRmAQFyddeRkkNyQTJwI3Wzm0,253 +distro-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distro-1.9.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 +distro-1.9.0.dist-info/METADATA,sha256=MWMqst5VkRMQkbM5e9zfeXcYV52Fp1GG8Gg53QwJ6B0,6791 +distro-1.9.0.dist-info/RECORD,, +distro-1.9.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +distro-1.9.0.dist-info/entry_points.txt,sha256=3ObjqQMbh1xeQQwsWtgbfDNDMDD-EbggR1Oj_z8s9hc,46 +distro-1.9.0.dist-info/top_level.txt,sha256=ikde_V_XEdSBqaGd5tEriN_wzYHLgTX_zVtlsGLHvwQ,7 +distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 +distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 +distro/__pycache__/__init__.cpython-38.pyc,, +distro/__pycache__/__main__.cpython-38.pyc,, +distro/__pycache__/distro.cpython-38.pyc,, +distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430 +distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/site-packages/distro-1.9.0.dist-info/WHEEL b/site-packages/distro-1.9.0.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/site-packages/distro-1.9.0.dist-info/entry_points.txt b/site-packages/distro-1.9.0.dist-info/entry_points.txt new file mode 100644 index 0000000..08d29c5 --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +distro = distro.distro:main diff --git a/site-packages/distro-1.9.0.dist-info/top_level.txt b/site-packages/distro-1.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..0e09331 --- /dev/null +++ b/site-packages/distro-1.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +distro diff --git a/site-packages/distro/distro.py b/site-packages/distro/distro.py index 89e1868..78ccdfa 100644 --- a/site-packages/distro/distro.py +++ b/site-packages/distro/distro.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2015,2016,2017 Nir Cohen +# Copyright 2015-2021 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -55,7 +55,7 @@ except ImportError: # Python 3.7 TypedDict = dict -__version__ = "1.8.0" +__version__ = "1.9.0" class VersionDict(TypedDict): @@ -125,6 +125,7 @@ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") # Base file names to be looked up for if _UNIXCONFDIR is not readable. _DISTRO_RELEASE_BASENAMES = [ "SuSE-release", + "altlinux-release", "arch-release", "base-release", "centos-release", @@ -151,6 +152,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = ( "system-release", "plesk-release", "iredmail-release", + "board-release", + "ec2_version", ) @@ -243,6 +246,7 @@ def id() -> str: "rocky" Rocky Linux "aix" AIX "guix" Guix System + "altlinux" ALT Linux ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -991,10 +995,10 @@ class LinuxDistribution: For details, see :func:`distro.info`. """ - return dict( + return InfoDict( id=self.id(), version=self.version(pretty, best), - version_parts=dict( + version_parts=VersionDict( major=self.major_version(best), minor=self.minor_version(best), build_number=self.build_number(best), diff --git a/site-packages/exceptiongroup-1.2.0.dist-info/INSTALLER b/site-packages/exceptiongroup-1.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/site-packages/exceptiongroup-1.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/site-packages/exceptiongroup-1.2.0.dist-info/LICENSE b/site-packages/exceptiongroup-1.2.0.dist-info/LICENSE new file mode 100644 index 0000000..50d4fa5 --- /dev/null +++ b/site-packages/exceptiongroup-1.2.0.dist-info/LICENSE @@ -0,0 +1,73 @@ +The MIT License (MIT) + +Copyright (c) 2022 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +This project contains code copied from the Python standard library. +The following is the required license notice for those parts. + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/site-packages/exceptiongroup-1.2.0.dist-info/METADATA b/site-packages/exceptiongroup-1.2.0.dist-info/METADATA new file mode 100644 index 0000000..e202ce2 --- /dev/null +++ b/site-packages/exceptiongroup-1.2.0.dist-info/METADATA @@ -0,0 +1,155 @@ +Metadata-Version: 2.1 +Name: exceptiongroup +Version: 1.2.0 +Summary: Backport of PEP 654 (exception groups) +Author-email: Alex Grönholm +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Typing :: Typed +Requires-Dist: pytest >= 6 ; extra == "test" +Project-URL: Changelog, https://github.com/agronholm/exceptiongroup/blob/main/CHANGES.rst +Project-URL: Issue Tracker, https://github.com/agronholm/exceptiongroup/issues +Project-URL: Source code, https://github.com/agronholm/exceptiongroup +Provides-Extra: test + +.. image:: https://github.com/agronholm/exceptiongroup/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/exceptiongroup/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/exceptiongroup/badge.svg?branch=main + :target: https://coveralls.io/github/agronholm/exceptiongroup?branch=main + :alt: Code Coverage + +This is a backport of the ``BaseExceptionGroup`` and ``ExceptionGroup`` classes from +Python 3.11. + +It contains the following: + +* The ``exceptiongroup.BaseExceptionGroup`` and ``exceptiongroup.ExceptionGroup`` + classes +* A utility function (``exceptiongroup.catch()``) for catching exceptions possibly + nested in an exception group +* Patches to the ``TracebackException`` class that properly formats exception groups + (installed on import) +* An exception hook that handles formatting of exception groups through + ``TracebackException`` (installed on import) +* Special versions of some of the functions from the ``traceback`` module, modified to + correctly handle exception groups even when monkey patching is disabled, or blocked by + another custom exception hook: + + * ``traceback.format_exception()`` + * ``traceback.format_exception_only()`` + * ``traceback.print_exception()`` + * ``traceback.print_exc()`` +* A backported version of ``contextlib.suppress()`` from Python 3.12.1 which also + handles suppressing exceptions inside exception groups + +If this package is imported on Python 3.11 or later, the built-in implementations of the +exception group classes are used instead, ``TracebackException`` is not monkey patched +and the exception hook won't be installed. + +See the `standard library documentation`_ for more information on exception groups. + +.. _standard library documentation: https://docs.python.org/3/library/exceptions.html + +Catching exceptions +=================== + +Due to the lack of the ``except*`` syntax introduced by `PEP 654`_ in earlier Python +versions, you need to use ``exceptiongroup.catch()`` to catch exceptions that are +potentially nested inside an exception group. This function returns a context manager +that calls the given handler for any exceptions matching the sole argument. + +The argument to ``catch()`` must be a dict (or any ``Mapping``) where each key is either +an exception class or an iterable of exception classes. Each value must be a callable +that takes a single positional argument. The handler will be called at most once, with +an exception group as an argument which will contain all the exceptions that are any +of the given types, or their subclasses. The exception group may contain nested groups +containing more matching exceptions. + +Thus, the following Python 3.11+ code: + +.. code-block:: python3 + + try: + ... + except* (ValueError, KeyError) as excgroup: + for exc in excgroup.exceptions: + print('Caught exception:', type(exc)) + except* RuntimeError: + print('Caught runtime error') + +would be written with this backport like this: + +.. code-block:: python3 + + from exceptiongroup import ExceptionGroup, catch + + def value_key_err_handler(excgroup: ExceptionGroup) -> None: + for exc in excgroup.exceptions: + print('Caught exception:', type(exc)) + + def runtime_err_handler(exc: ExceptionGroup) -> None: + print('Caught runtime error') + + with catch({ + (ValueError, KeyError): value_key_err_handler, + RuntimeError: runtime_err_handler + }): + ... + +**NOTE**: Just like with ``except*``, you cannot handle ``BaseExceptionGroup`` or +``ExceptionGroup`` with ``catch()``. + +Suppressing exceptions +====================== + +This library contains a backport of the ``contextlib.suppress()`` context manager from +Python 3.12.1. It allows you to selectively ignore certain exceptions, even when they're +inside exception groups:: + + from exceptiongroup import suppress + + with suppress(RuntimeError): + raise ExceptionGroup("", [RuntimeError("boo")]) + +Notes on monkey patching +======================== + +To make exception groups render properly when an unhandled exception group is being +printed out, this package does two things when it is imported on any Python version +earlier than 3.11: + +#. The ``traceback.TracebackException`` class is monkey patched to store extra + information about exception groups (in ``__init__()``) and properly format them (in + ``format()``) +#. An exception hook is installed at ``sys.excepthook``, provided that no other hook is + already present. This hook causes the exception to be formatted using + ``traceback.TracebackException`` rather than the built-in rendered. + +If ``sys.exceptionhook`` is found to be set to something else than the default when +``exceptiongroup`` is imported, no monkeypatching is done at all. + +To prevent the exception hook and patches from being installed, set the environment +variable ``EXCEPTIONGROUP_NO_PATCH`` to ``1``. + +Formatting exception groups +--------------------------- + +Normally, the monkey patching applied by this library on import will cause exception +groups to be printed properly in tracebacks. But in cases when the monkey patching is +blocked by a third party exception hook, or monkey patching is explicitly disabled, +you can still manually format exceptions using the special versions of the ``traceback`` +functions, like ``format_exception()``, listed at the top of this page. They work just +like their counterparts in the ``traceback`` module, except that they use a separately +patched subclass of ``TracebackException`` to perform the rendering. + +Particularly in cases where a library installs its own exception hook, it is recommended +to use these special versions to do the actual formatting of exceptions/tracebacks. + +.. _PEP 654: https://www.python.org/dev/peps/pep-0654/ + diff --git a/site-packages/exceptiongroup-1.2.0.dist-info/RECORD b/site-packages/exceptiongroup-1.2.0.dist-info/RECORD new file mode 100644 index 0000000..812ef47 --- /dev/null +++ b/site-packages/exceptiongroup-1.2.0.dist-info/RECORD @@ -0,0 +1,18 @@ +exceptiongroup-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +exceptiongroup-1.2.0.dist-info/LICENSE,sha256=blBw12UDHgrUA6HL-Qrm0ZoCKPgC4yC3rP9GCqcu1Hw,3704 +exceptiongroup-1.2.0.dist-info/METADATA,sha256=HZmM9fFOVM8mE2DYVKBJYGaC9zgTtqItGx171R8Xui4,6603 +exceptiongroup-1.2.0.dist-info/RECORD,, +exceptiongroup-1.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +exceptiongroup/__init__.py,sha256=7DHS0hDk-RIs3IQc3SbZVB0-1MhiSCJ9XgvEyEloL7M,1049 +exceptiongroup/__pycache__/__init__.cpython-38.pyc,, +exceptiongroup/__pycache__/_catch.cpython-38.pyc,, +exceptiongroup/__pycache__/_exceptions.cpython-38.pyc,, +exceptiongroup/__pycache__/_formatting.cpython-38.pyc,, +exceptiongroup/__pycache__/_suppress.cpython-38.pyc,, +exceptiongroup/__pycache__/_version.cpython-38.pyc,, +exceptiongroup/_catch.py,sha256=kmx01LkEZI5-pmxh9ATEO4vzqAtkn7njV-5Z-F0Kp-I,4669 +exceptiongroup/_exceptions.py,sha256=f-WOXHw-lVWAAZjGY8c-KFxofCvDcEl4XeMDeqcgMyg,11010 +exceptiongroup/_formatting.py,sha256=mumIgRk61thv7wP6WzCpVNsGPhYZGWffJeGzQngFL0s,21005 +exceptiongroup/_suppress.py,sha256=8ymKx5AFcO15F47NfnRI-G98mn4ZBXuyt4qiLd5zD3Q,1338 +exceptiongroup/_version.py,sha256=zMnMemknXglcJs59xkicNzeEJTVgYd1omSfLWj76yWw,411 +exceptiongroup/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/site-packages/exceptiongroup-1.2.0.dist-info/WHEEL b/site-packages/exceptiongroup-1.2.0.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/site-packages/exceptiongroup-1.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/site-packages/exceptiongroup/__init__.py b/site-packages/exceptiongroup/__init__.py index 0e7e02b..d8e36b2 100644 --- a/site-packages/exceptiongroup/__init__.py +++ b/site-packages/exceptiongroup/__init__.py @@ -6,6 +6,7 @@ __all__ = [ "format_exception_only", "print_exception", "print_exc", + "suppress", ] import os @@ -38,3 +39,8 @@ else: BaseExceptionGroup = BaseExceptionGroup ExceptionGroup = ExceptionGroup + +if sys.version_info < (3, 12, 1): + from ._suppress import suppress +else: + from contextlib import suppress diff --git a/site-packages/exceptiongroup/_catch.py b/site-packages/exceptiongroup/_catch.py index b76f51f..4a39b41 100644 --- a/site-packages/exceptiongroup/_catch.py +++ b/site-packages/exceptiongroup/_catch.py @@ -64,7 +64,10 @@ class _Catcher: except BaseExceptionGroup: result = handler(matched) except BaseExceptionGroup as new_exc: - new_exceptions.extend(new_exc.exceptions) + if new_exc is matched: + new_exceptions.append(new_exc) + else: + new_exceptions.extend(new_exc.exceptions) except BaseException as new_exc: new_exceptions.append(new_exc) else: diff --git a/site-packages/exceptiongroup/_exceptions.py b/site-packages/exceptiongroup/_exceptions.py index 339735e..82a129c 100644 --- a/site-packages/exceptiongroup/_exceptions.py +++ b/site-packages/exceptiongroup/_exceptions.py @@ -27,7 +27,7 @@ def check_direct_subclass( def get_condition_filter( condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - | Callable[[_BaseExceptionT_co], bool] + | Callable[[_BaseExceptionT_co], bool], ) -> Callable[[_BaseExceptionT_co], bool]: if isclass(condition) and issubclass( cast(Type[BaseException], condition), BaseException diff --git a/site-packages/exceptiongroup/_formatting.py b/site-packages/exceptiongroup/_formatting.py index c0402ba..e3835e4 100644 --- a/site-packages/exceptiongroup/_formatting.py +++ b/site-packages/exceptiongroup/_formatting.py @@ -359,6 +359,46 @@ if sys.excepthook is sys.__excepthook__: ) sys.excepthook = exceptiongroup_excepthook +# Ubuntu's system Python has a sitecustomize.py file that imports +# apport_python_hook and replaces sys.excepthook. +# +# The custom hook captures the error for crash reporting, and then calls +# sys.__excepthook__ to actually print the error. +# +# We don't mind it capturing the error for crash reporting, but we want to +# take over printing the error. So we monkeypatch the apport_python_hook +# module so that instead of calling sys.__excepthook__, it calls our custom +# hook. +# +# More details: https://github.com/python-trio/trio/issues/1065 +if getattr(sys.excepthook, "__name__", None) in ( + "apport_excepthook", + # on ubuntu 22.10 the hook was renamed to partial_apport_excepthook + "partial_apport_excepthook", +): + # patch traceback like above + traceback.TracebackException.__init__ = ( # type: ignore[assignment] + PatchedTracebackException.__init__ + ) + traceback.TracebackException.format = ( # type: ignore[assignment] + PatchedTracebackException.format + ) + traceback.TracebackException.format_exception_only = ( # type: ignore[assignment] + PatchedTracebackException.format_exception_only + ) + + from types import ModuleType + + import apport_python_hook + + assert sys.excepthook is apport_python_hook.apport_excepthook + + # monkeypatch the sys module that apport has imported + fake_sys = ModuleType("exceptiongroup_fake_sys") + fake_sys.__dict__.update(sys.__dict__) + fake_sys.__excepthook__ = exceptiongroup_excepthook + apport_python_hook.sys = fake_sys + @singledispatch def format_exception_only(__exc: BaseException) -> List[str]: diff --git a/site-packages/exceptiongroup/_suppress.py b/site-packages/exceptiongroup/_suppress.py new file mode 100644 index 0000000..6741563 --- /dev/null +++ b/site-packages/exceptiongroup/_suppress.py @@ -0,0 +1,40 @@ +import sys +from contextlib import AbstractContextManager + +if sys.version_info < (3, 11): + from ._exceptions import BaseExceptionGroup + + +class suppress(AbstractContextManager): + """Backport of :class:`contextlib.suppress` from Python 3.12.1.""" + + def __init__(self, *exceptions): + self._exceptions = exceptions + + def __enter__(self): + pass + + def __exit__(self, exctype, excinst, exctb): + # Unlike isinstance and issubclass, CPython exception handling + # currently only looks at the concrete type hierarchy (ignoring + # the instance and subclass checking hooks). While Guido considers + # that a bug rather than a feature, it's a fairly hard one to fix + # due to various internal implementation details. suppress provides + # the simpler issubclass based semantics, rather than trying to + # exactly reproduce the limitations of the CPython interpreter. + # + # See http://bugs.python.org/issue12029 for more details + if exctype is None: + return + + if issubclass(exctype, self._exceptions): + return True + + if issubclass(exctype, BaseExceptionGroup): + match, rest = excinst.split(self._exceptions) + if rest is None: + return True + + raise rest + + return False diff --git a/site-packages/exceptiongroup/_version.py b/site-packages/exceptiongroup/_version.py index 760ce26..ba0e352 100644 --- a/site-packages/exceptiongroup/_version.py +++ b/site-packages/exceptiongroup/_version.py @@ -1,4 +1,16 @@ # file generated by setuptools_scm # don't change, don't track in version control -__version__ = version = '1.1.3' -__version_tuple__ = version_tuple = (1, 1, 3) +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '1.2.0' +__version_tuple__ = version_tuple = (1, 2, 0) diff --git a/site-packages/git/__init__.py b/site-packages/git/__init__.py index f4bf678..f9417a9 100644 --- a/site-packages/git/__init__.py +++ b/site-packages/git/__init__.py @@ -1,69 +1,43 @@ -# __init__.py # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors # -# This module is part of GitPython and is released under -# the BSD License: https://opensource.org/license/bsd-3-clause/ -# flake8: noqa -# @PydevCodeAnalysisIgnore -from git.exc import * # @NoMove @IgnorePep8 -import inspect -import os -import sys -import os.path as osp +# This module is part of GitPython and is released under the +# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/ -from typing import Optional +# @PydevCodeAnalysisIgnore + +__version__ = '3.1.41' + +from typing import List, Optional, Sequence, Tuple, Union, TYPE_CHECKING + +from gitdb.util import to_hex_sha +from git.exc import * # noqa: F403 # @NoMove @IgnorePep8 from git.types import PathLike -__version__ = '3.1.40' - - -# { Initialization -def _init_externals() -> None: - """Initialize external projects by putting them into the path""" - if __version__ == '3.1.40' and "PYOXIDIZER" not in os.environ: - sys.path.insert(1, osp.join(osp.dirname(__file__), "ext", "gitdb")) - - try: - import gitdb - except ImportError as e: - raise ImportError("'gitdb' could not be found in your PYTHONPATH") from e - # END verify import - - -# } END initialization - - -################# -_init_externals() -################# - -# { Imports - try: + from git.compat import safe_decode # @NoMove @IgnorePep8 from git.config import GitConfigParser # @NoMove @IgnorePep8 - from git.objects import * # @NoMove @IgnorePep8 - from git.refs import * # @NoMove @IgnorePep8 - from git.diff import * # @NoMove @IgnorePep8 - from git.db import * # @NoMove @IgnorePep8 + from git.objects import * # noqa: F403 # @NoMove @IgnorePep8 + from git.refs import * # noqa: F403 # @NoMove @IgnorePep8 + from git.diff import * # noqa: F403 # @NoMove @IgnorePep8 + from git.db import * # noqa: F403 # @NoMove @IgnorePep8 from git.cmd import Git # @NoMove @IgnorePep8 from git.repo import Repo # @NoMove @IgnorePep8 - from git.remote import * # @NoMove @IgnorePep8 - from git.index import * # @NoMove @IgnorePep8 + from git.remote import * # noqa: F403 # @NoMove @IgnorePep8 + from git.index import * # noqa: F403 # @NoMove @IgnorePep8 from git.util import ( # @NoMove @IgnorePep8 LockFile, BlockingLockFile, Stats, Actor, + remove_password_if_present, rmtree, ) -except GitError as _exc: +except GitError as _exc: # noqa: F405 raise ImportError("%s: %s" % (_exc.__class__.__name__, _exc)) from _exc -# } END imports - # __all__ must be statically defined by py.typed support # __all__ = [name for name, obj in locals().items() if not (name.startswith("_") or inspect.ismodule(obj))] -__all__ = [ +__all__ = [ # noqa: F405 "Actor", "AmbiguousObjectName", "BadName", @@ -152,7 +126,7 @@ def refresh(path: Optional[PathLike] = None) -> None: if not Git.refresh(path=path): return - if not FetchInfo.refresh(): + if not FetchInfo.refresh(): # noqa: F405 return # type: ignore [unreachable] GIT_OK = True diff --git a/site-packages/git/cmd.py b/site-packages/git/cmd.py index 7c448e3..4413182 100644 --- a/site-packages/git/cmd.py +++ b/site-packages/git/cmd.py @@ -1,33 +1,36 @@ -# cmd.py # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors # -# This module is part of GitPython and is released under -# the BSD License: https://opensource.org/license/bsd-3-clause/ +# This module is part of GitPython and is released under the +# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/ + from __future__ import annotations + import re import contextlib import io import logging import os import signal -from subprocess import call, Popen, PIPE, DEVNULL +from subprocess import Popen, PIPE, DEVNULL import subprocess import threading from textwrap import dedent -from git.compat import ( - defenc, - force_bytes, - safe_decode, - is_posix, - is_win, +from git.compat import defenc, force_bytes, safe_decode +from git.exc import ( + CommandError, + GitCommandError, + GitCommandNotFound, + UnsafeOptionError, + UnsafeProtocolError, ) -from git.exc import CommandError -from git.util import is_cygwin_git, cygpath, expand_path, remove_password_if_present, patch_env - -from .exc import GitCommandError, GitCommandNotFound, UnsafeOptionError, UnsafeProtocolError -from .util import ( +from git.util import ( LazyMixin, + cygpath, + expand_path, + is_cygwin_git, + patch_env, + remove_password_if_present, stream_copy, ) @@ -43,6 +46,7 @@ from typing import ( Iterator, List, Mapping, + Optional, Sequence, TYPE_CHECKING, TextIO, @@ -99,24 +103,24 @@ def handle_process_output( Callable[[bytes, "Repo", "DiffIndex"], None], ], stderr_handler: Union[None, Callable[[AnyStr], None], Callable[[List[AnyStr]], None]], - finalizer: Union[None, Callable[[Union[subprocess.Popen, "Git.AutoInterrupt"]], None]] = None, + finalizer: Union[None, Callable[[Union[Popen, "Git.AutoInterrupt"]], None]] = None, decode_streams: bool = True, kill_after_timeout: Union[None, float] = None, ) -> None: - """Registers for notifications to learn that process output is ready to read, and dispatches lines to - the respective line handlers. + """Register for notifications to learn that process output is ready to read, and + dispatch lines to the respective line handlers. + This function returns once the finalizer returns. - :return: result of finalizer - :param process: subprocess.Popen instance + :param process: :class:`subprocess.Popen` instance :param stdout_handler: f(stdout_line_string), or None :param stderr_handler: f(stderr_line_string), or None :param finalizer: f(proc) - wait for proc to finish :param decode_streams: - Assume stdout/stderr streams are binary and decode them before pushing \ + Assume stdout/stderr streams are binary and decode them before pushing their contents to handlers. - Set it to False if `universal_newline == True` (then streams are in text-mode) - or if decoding must happen later (i.e. for Diffs). + Set it to False if ``universal_newlines == True`` (then streams are in + text mode) or if decoding must happen later (i.e. for Diffs). :param kill_after_timeout: float or None, Default = None To specify a timeout in seconds for the git command, after which the process @@ -177,14 +181,13 @@ def handle_process_output( t.start() threads.append(t) - ## FIXME: Why Join?? Will block if `stdin` needs feeding... - # + # FIXME: Why join? Will block if stdin needs feeding... for t in threads: t.join(timeout=kill_after_timeout) if t.is_alive(): if isinstance(process, Git.AutoInterrupt): process._terminate() - else: # Don't want to deal with the other case + else: # Don't want to deal with the other case. raise RuntimeError( "Thread join() timed out in cmd.handle_process_output()." f" kill_after_timeout={kill_after_timeout} seconds" @@ -194,17 +197,77 @@ def handle_process_output( "error: process killed because it timed out." f" kill_after_timeout={kill_after_timeout} seconds" ) if not decode_streams and isinstance(p_stderr, BinaryIO): - # Assume stderr_handler needs binary input + # Assume stderr_handler needs binary input. error_str = cast(str, error_str) error_str = error_str.encode() # We ignore typing on the next line because mypy does not like - # the way we inferred that stderr takes str or bytes + # the way we inferred that stderr takes str or bytes. stderr_handler(error_str) # type: ignore if finalizer: - return finalizer(process) + finalizer(process) + + +def _safer_popen_windows( + command: Union[str, Sequence[Any]], + *, + shell: bool = False, + env: Optional[Mapping[str, str]] = None, + **kwargs: Any, +) -> Popen: + """Call :class:`subprocess.Popen` on Windows but don't include a CWD in the search. + + This avoids an untrusted search path condition where a file like ``git.exe`` in a + malicious repository would be run when GitPython operates on the repository. The + process using GitPython may have an untrusted repository's working tree as its + current working directory. Some operations may temporarily change to that directory + before running a subprocess. In addition, while by default GitPython does not run + external commands with a shell, it can be made to do so, in which case the CWD of + the subprocess, which GitPython usually sets to a repository working tree, can + itself be searched automatically by the shell. This wrapper covers all those cases. + + :note: This currently works by setting the ``NoDefaultCurrentDirectoryInExePath`` + environment variable during subprocess creation. It also takes care of passing + Windows-specific process creation flags, but that is unrelated to path search. + + :note: The current implementation contains a race condition on :attr:`os.environ`. + GitPython isn't thread-safe, but a program using it on one thread should ideally + be able to mutate :attr:`os.environ` on another, without unpredictable results. + See comments in https://github.com/gitpython-developers/GitPython/pull/1650. + """ + # CREATE_NEW_PROCESS_GROUP is needed for some ways of killing it afterwards. See: + # https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal + # https://docs.python.org/3/library/subprocess.html#subprocess.CREATE_NEW_PROCESS_GROUP + creationflags = subprocess.CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP + + # When using a shell, the shell is the direct subprocess, so the variable must be + # set in its environment, to affect its search behavior. (The "1" can be any value.) + if shell: + safer_env = {} if env is None else dict(env) + safer_env["NoDefaultCurrentDirectoryInExePath"] = "1" else: - return None + safer_env = env + + # When not using a shell, the current process does the search in a CreateProcessW + # API call, so the variable must be set in our environment. With a shell, this is + # unnecessary, in versions where https://github.com/python/cpython/issues/101283 is + # patched. If not, in the rare case the ComSpec environment variable is unset, the + # shell is searched for unsafely. Setting NoDefaultCurrentDirectoryInExePath in all + # cases, as here, is simpler and protects against that. (The "1" can be any value.) + with patch_env("NoDefaultCurrentDirectoryInExePath", "1"): + return Popen( + command, + shell=shell, + env=safer_env, + creationflags=creationflags, + **kwargs, + ) + + +if os.name == "nt": + safer_popen = _safer_popen_windows +else: + safer_popen = Popen def dashify(string: str) -> str: @@ -225,20 +288,8 @@ def dict_to_slots_and__excluded_are_none(self: object, d: Mapping[str, Any], exc ## -- End Utilities -- @} -# value of Windows process creation flag taken from MSDN -CREATE_NO_WINDOW = 0x08000000 - -## CREATE_NEW_PROCESS_GROUP is needed to allow killing it afterwards, -# see https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal -PROC_CREATIONFLAGS = ( - CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP if is_win else 0 # type: ignore[attr-defined] -) # mypy error if not windows - - class Git(LazyMixin): - - """ - The Git class manages communication with the Git binary. + """The Git class manages communication with the Git binary. It provides a convenient interface to calling the Git binary, such as in:: @@ -264,7 +315,7 @@ class Git(LazyMixin): _excluded_ = ("cat_file_all", "cat_file_header", "_version_info") - re_unsafe_protocol = re.compile("(.+)::.+") + re_unsafe_protocol = re.compile(r"(.+)::.+") def __getstate__(self) -> Dict[str, Any]: return slots_to_dict(self, exclude=self._excluded_) @@ -274,44 +325,58 @@ class Git(LazyMixin): # CONFIGURATION - git_exec_name = "git" # default that should work on linux and windows + git_exec_name = "git" + """Default git command that should work on Linux, Windows, and other systems.""" - # Enables debugging of GitPython's git commands GIT_PYTHON_TRACE = os.environ.get("GIT_PYTHON_TRACE", False) + """Enables debugging of GitPython's git commands.""" - # If True, a shell will be used when executing git commands. - # This should only be desirable on Windows, see https://github.com/gitpython-developers/GitPython/pull/126 - # and check `git/test_repo.py:TestRepo.test_untracked_files()` TC for an example where it is required. - # Override this value using `Git.USE_SHELL = True` USE_SHELL = False + """Deprecated. If set to True, a shell will be used when executing git commands. + + Prior to GitPython 2.0.8, this had a narrow purpose in suppressing console windows + in graphical Windows applications. In 2.0.8 and higher, it provides no benefit, as + GitPython solves that problem more robustly and safely by using the + ``CREATE_NO_WINDOW`` process creation flag on Windows. + + Code that uses ``USE_SHELL = True`` or that passes ``shell=True`` to any GitPython + functions should be updated to use the default value of ``False`` instead. ``True`` + is unsafe unless the effect of shell expansions is fully considered and accounted + for, which is not possible under most circumstances. + + See: + - :meth:`Git.execute` (on the ``shell`` parameter). + - https://github.com/gitpython-developers/GitPython/commit/0d9390866f9ce42870d3116094cd49e0019a970a + - https://learn.microsoft.com/en-us/windows/win32/procthread/process-creation-flags + """ - # Provide the full path to the git executable. Otherwise it assumes git is in the path _git_exec_env_var = "GIT_PYTHON_GIT_EXECUTABLE" _refresh_env_var = "GIT_PYTHON_REFRESH" + GIT_PYTHON_GIT_EXECUTABLE = None - # note that the git executable is actually found during the refresh step in - # the top level __init__ + """Provide the full path to the git executable. Otherwise it assumes git is in the path. + + Note that the git executable is actually found during the refresh step in + the top level ``__init__``. + """ @classmethod def refresh(cls, path: Union[None, PathLike] = None) -> bool: """This gets called by the refresh function (see the top level __init__).""" - # discern which path to refresh with + # Discern which path to refresh with. if path is not None: new_git = os.path.expanduser(path) new_git = os.path.abspath(new_git) else: new_git = os.environ.get(cls._git_exec_env_var, cls.git_exec_name) - # keep track of the old and new git executable path + # Keep track of the old and new git executable path. old_git = cls.GIT_PYTHON_GIT_EXECUTABLE cls.GIT_PYTHON_GIT_EXECUTABLE = new_git - # test if the new git executable path is valid - - # - a GitCommandNotFound error is spawned by ourselves - # - a PermissionError is spawned if the git executable provided - # cannot be executed for whatever reason - + # Test if the new git executable path is valid. A GitCommandNotFound error is + # spawned by us. A PermissionError is spawned if the git executable cannot be + # executed for whatever reason. has_git = False try: cls().version() @@ -319,7 +384,7 @@ class Git(LazyMixin): except (GitCommandNotFound, PermissionError): pass - # warn or raise exception if test failed + # Warn or raise exception if test failed. if not has_git: err = ( dedent( @@ -334,18 +399,18 @@ class Git(LazyMixin): % cls._git_exec_env_var ) - # revert to whatever the old_git was + # Revert to whatever the old_git was. cls.GIT_PYTHON_GIT_EXECUTABLE = old_git if old_git is None: - # on the first refresh (when GIT_PYTHON_GIT_EXECUTABLE is - # None) we only are quiet, warn, or error depending on the - # GIT_PYTHON_REFRESH value + # On the first refresh (when GIT_PYTHON_GIT_EXECUTABLE is None) we only + # are quiet, warn, or error depending on the GIT_PYTHON_REFRESH value. - # determine what the user wants to happen during the initial - # refresh we expect GIT_PYTHON_REFRESH to either be unset or - # be one of the following values: - # 0|q|quiet|s|silence + # Determine what the user wants to happen during the initial refresh we + # expect GIT_PYTHON_REFRESH to either be unset or be one of the + # following values: + # + # 0|q|quiet|s|silence|n|none # 1|w|warn|warning # 2|r|raise|e|error @@ -410,14 +475,13 @@ class Git(LazyMixin): ) raise ImportError(err) - # we get here if this was the init refresh and the refresh mode - # was not error, go ahead and set the GIT_PYTHON_GIT_EXECUTABLE - # such that we discern the difference between a first import - # and a second import + # We get here if this was the init refresh and the refresh mode was not + # error. Go ahead and set the GIT_PYTHON_GIT_EXECUTABLE such that we + # discern the difference between a first import and a second import. cls.GIT_PYTHON_GIT_EXECUTABLE = cls.git_exec_name else: - # after the first refresh (when GIT_PYTHON_GIT_EXECUTABLE - # is no longer None) we raise an exception + # After the first refresh (when GIT_PYTHON_GIT_EXECUTABLE is no longer + # None) we raise an exception. raise GitCommandNotFound("git", err) return has_git @@ -438,18 +502,18 @@ class Git(LazyMixin): @classmethod def polish_url(cls, url: str, is_cygwin: Union[None, bool] = None) -> PathLike: + """Remove any backslashes from urls to be written in config files. + + Windows might create config files containing paths with backslashes, + but git stops liking them as it will escape the backslashes. Hence we + undo the escaping just to be sure. + """ if is_cygwin is None: is_cygwin = cls.is_cygwin() if is_cygwin: url = cygpath(url) else: - """Remove any backslashes from urls to be written in config files. - - Windows might create config files containing paths with backslashes, - but git stops liking them as it will escape the backslashes. - Hence we undo the escaping just to be sure. - """ url = os.path.expandvars(url) if url.startswith("~"): url = os.path.expanduser(url) @@ -458,12 +522,11 @@ class Git(LazyMixin): @classmethod def check_unsafe_protocols(cls, url: str) -> None: - """ - Check for unsafe protocols. + """Check for unsafe protocols. Apart from the usual protocols (http, git, ssh), - Git allows "remote helpers" that have the form ``::
``, - one of these helpers (``ext::``) can be used to invoke any arbitrary command. + Git allows "remote helpers" that have the form ``::
``. + One of these helpers (``ext::``) can be used to invoke any arbitrary command. See: @@ -479,8 +542,7 @@ class Git(LazyMixin): @classmethod def check_unsafe_options(cls, options: List[str], unsafe_options: List[str]) -> None: - """ - Check for unsafe options. + """Check for unsafe options. Some options that are passed to `git ` can be used to execute arbitrary commands, this are blocked by default. @@ -495,18 +557,22 @@ class Git(LazyMixin): f"{unsafe_option} is not allowed, use `allow_unsafe_options=True` to allow it." ) - class AutoInterrupt(object): - """Kill/Interrupt the stored process instance once this instance goes out of scope. It is - used to prevent processes piling up in case iterators stop reading. - Besides all attributes are wired through to the contained process object. + class AutoInterrupt: + """Process wrapper that terminates the wrapped process on finalization. - The wait method was overridden to perform automatic status code checking - and possibly raise.""" + This kills/interrupts the stored process instance once this instance goes out of + scope. It is used to prevent processes piling up in case iterators stop reading. + + All attributes are wired through to the contained process object. + + The wait method is overridden to perform automatic status code checking and + possibly raise. + """ __slots__ = ("proc", "args", "status") # If this is non-zero it will override any status code during - # _terminate, used to prevent race conditions in testing + # _terminate, used to prevent race conditions in testing. _status_code_if_terminate: int = 0 def __init__(self, proc: Union[None, subprocess.Popen], args: Any) -> None: @@ -527,36 +593,26 @@ class Git(LazyMixin): proc.stdout.close() if proc.stderr: proc.stderr.close() - # did the process finish already so we have a return code ? + # Did the process finish already so we have a return code? try: if proc.poll() is not None: self.status = self._status_code_if_terminate or proc.poll() - return None + return except OSError as ex: log.info("Ignored error after process had died: %r", ex) - # can be that nothing really exists anymore ... + # It can be that nothing really exists anymore... if os is None or getattr(os, "kill", None) is None: - return None + return - # try to kill it + # Try to kill it. try: proc.terminate() - status = proc.wait() # ensure process goes away + status = proc.wait() # Ensure the process goes away. self.status = self._status_code_if_terminate or status except OSError as ex: log.info("Ignored error after process had died: %r", ex) - except AttributeError: - # try windows - # for some reason, providing None for stdout/stderr still prints something. This is why - # we simply use the shell and redirect to nul. Its slower than CreateProcess, question - # is whether we really want to see all these messages. Its annoying no matter what. - if is_win: - call( - ("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(proc.pid)), - shell=True, - ) # END exception handling def __del__(self) -> None: @@ -571,7 +627,8 @@ class Git(LazyMixin): :param stderr: Previously read value of stderr, in case stderr is already closed. :warn: May deadlock if output or error pipes are used and not handled separately. - :raise GitCommandError: if the return status is not 0""" + :raise GitCommandError: If the return status is not 0. + """ if stderr is None: stderr_b = b"" stderr_b = force_bytes(data=stderr, encoding="utf-8") @@ -579,7 +636,7 @@ class Git(LazyMixin): if self.proc is not None: status = self.proc.wait() p_stderr = self.proc.stderr - else: # Assume the underlying proc was killed earlier or never existed + else: # Assume the underlying proc was killed earlier or never existed. status = self.status p_stderr = None @@ -602,22 +659,25 @@ class Git(LazyMixin): # END auto interrupt - class CatFileContentStream(object): + class CatFileContentStream: """Object representing a sized read-only stream returning the contents of an object. - It behaves like a stream, but counts the data read and simulates an empty + + This behaves like a stream, but counts the data read and simulates an empty stream once our sized content region is empty. - If not all data is read to the end of the object's lifetime, we read the - rest to assure the underlying stream continues to work.""" + + If not all data are read to the end of the object's lifetime, we read the + rest to ensure the underlying stream continues to work. + """ __slots__: Tuple[str, ...] = ("_stream", "_nbr", "_size") def __init__(self, size: int, stream: IO[bytes]) -> None: self._stream = stream self._size = size - self._nbr = 0 # num bytes read + self._nbr = 0 # Number of bytes read. - # special case: if the object is empty, has null bytes, get the + # Special case: If the object is empty, has null bytes, get the # final newline right away. if size == 0: stream.read(1) @@ -628,16 +688,16 @@ class Git(LazyMixin): if bytes_left == 0: return b"" if size > -1: - # assure we don't try to read past our limit + # Ensure we don't try to read past our limit. size = min(bytes_left, size) else: - # they try to read all, make sure its not more than what remains + # They try to read all, make sure it's not more than what remains. size = bytes_left # END check early depletion data = self._stream.read(size) self._nbr += len(data) - # check for depletion, read our final byte to make the stream usable by others + # Check for depletion, read our final byte to make the stream usable by others. if self._size - self._nbr == 0: self._stream.read(1) # final newline # END finish reading @@ -647,7 +707,7 @@ class Git(LazyMixin): if self._nbr == self._size: return b"" - # clamp size to lowest allowed value + # Clamp size to lowest allowed value. bytes_left = self._size - self._nbr if size > -1: size = min(bytes_left, size) @@ -658,7 +718,7 @@ class Git(LazyMixin): data = self._stream.readline(size) self._nbr += len(data) - # handle final byte + # Handle final byte. if self._size - self._nbr == 0: self._stream.read(1) # END finish reading @@ -669,7 +729,7 @@ class Git(LazyMixin): if self._nbr == self._size: return [] - # leave all additional logic to our readline method, we just check the size + # Leave all additional logic to our readline method, we just check the size. out = [] nbr = 0 while True: @@ -701,8 +761,8 @@ class Git(LazyMixin): def __del__(self) -> None: bytes_left = self._size - self._nbr if bytes_left: - # read and discard - seeking is impossible within a stream - # includes terminating newline + # Read and discard - seeking is impossible within a stream. + # This includes any terminating newline. self._stream.read(bytes_left + 1) # END handle incomplete read @@ -711,10 +771,11 @@ class Git(LazyMixin): :param working_dir: Git directory we should work in. If None, we always work in the current - directory as returned by os.getcwd(). + directory as returned by :func:`os.getcwd`. It is meant to be the working tree directory if available, or the - .git directory in case of bare repositories.""" - super(Git, self).__init__() + ``.git`` directory in case of bare repositories. + """ + super().__init__() self._working_dir = expand_path(working_dir) self._git_options: Union[List[str], Tuple[str, ...]] = () self._persistent_git_options: List[str] = [] @@ -722,7 +783,7 @@ class Git(LazyMixin): # Extra environment variables to pass to git commands self._environment: Dict[str, str] = {} - # cached command slots + # Cached command slots self.cat_file_header: Union[None, TBD] = None self.cat_file_all: Union[None, TBD] = None @@ -730,28 +791,30 @@ class Git(LazyMixin): """A convenience method as it allows to call the command as if it was an object. - :return: Callable object that will execute call _call_process with your arguments.""" + :return: + Callable object that will execute call :meth:`_call_process` with + your arguments. + """ if name[0] == "_": return LazyMixin.__getattr__(self, name) return lambda *args, **kwargs: self._call_process(name, *args, **kwargs) def set_persistent_git_options(self, **kwargs: Any) -> None: - """Specify command line options to the git executable - for subsequent subcommand calls. + """Specify command line options to the git executable for subsequent + subcommand calls. :param kwargs: - is a dict of keyword arguments. - These arguments are passed as in _call_process - but will be passed to the git command rather than - the subcommand. + A dict of keyword arguments. + These arguments are passed as in :meth:`_call_process`, but will be + passed to the git command rather than the subcommand. """ self._persistent_git_options = self.transform_kwargs(split_single_char_options=True, **kwargs) def _set_cache_(self, attr: str) -> None: if attr == "_version_info": - # We only use the first 4 numbers, as everything else could be strings in fact (on windows) - process_version = self._call_process("version") # should be as default *args and **kwargs used + # We only use the first 4 numbers, as everything else could be strings in fact (on Windows). + process_version = self._call_process("version") # Should be as default *args and **kwargs used. version_numbers = process_version.split(" ")[2] self._version_info = cast( @@ -759,7 +822,7 @@ class Git(LazyMixin): tuple(int(n) for n in version_numbers.split(".")[:4] if n.isdigit()), ) else: - super(Git, self)._set_cache_(attr) + super()._set_cache_(attr) # END handle version info @property @@ -772,7 +835,9 @@ class Git(LazyMixin): """ :return: tuple(int, int, int, int) tuple with integers representing the major, minor and additional version numbers as parsed from git version. - This value is generated on demand and is cached.""" + + This value is generated on demand and is cached. + """ return self._version_info @overload @@ -839,7 +904,7 @@ class Git(LazyMixin): strip_newline_in_stdout: bool = True, **subprocess_kwargs: Any, ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], AutoInterrupt]: - """Handles executing the command and consumes and returns the returned + R"""Handle executing the command, and consume and return the returned information (stdout). :param command: @@ -848,7 +913,7 @@ class Git(LazyMixin): program to execute is the first item in the args sequence or string. :param istream: - Standard input filehandle passed to `subprocess.Popen`. + Standard input filehandle passed to :class:`subprocess.Popen`. :param with_extended_output: Whether to return a (status, stdout, stderr) tuple. @@ -858,17 +923,17 @@ class Git(LazyMixin): :param as_process: Whether to return the created process instance directly from which - streams can be read on demand. This will render with_extended_output and - with_exceptions ineffective - the caller will have to deal with the details. - It is important to note that the process will be placed into an AutoInterrupt - wrapper that will interrupt the process once it goes out of scope. If you - use the command in iterators, you should pass the whole process instance - instead of a single stream. + streams can be read on demand. This will render `with_extended_output` + and `with_exceptions` ineffective - the caller will have to deal with + the details. It is important to note that the process will be placed + into an :class:`AutoInterrupt` wrapper that will interrupt the process + once it goes out of scope. If you use the command in iterators, you + should pass the whole process instance instead of a single stream. :param output_stream: If set to a file-like object, data produced by the git command will be output to the given stream directly. - This feature only has any effect if as_process is False. Processes will + This feature only has any effect if `as_process` is False. Processes will always be created with a pipe due to issues with subprocess. This merely is a workaround as data will be copied from the output pipe to the given output stream directly. @@ -881,13 +946,21 @@ class Git(LazyMixin): :param kill_after_timeout: Specifies a timeout in seconds for the git command, after which the process - should be killed. This will have no effect if as_process is set to True. It is - set to None by default and will let the process run until the timeout is - explicitly specified. This feature is not supported on Windows. It's also worth - noting that kill_after_timeout uses SIGKILL, which can have negative side - effects on a repository. For example, stale locks in case of ``git gc`` could - render the repository incapable of accepting changes until the lock is manually - removed. + should be killed. This will have no effect if `as_process` is set to True. + It is set to None by default and will let the process run until the timeout + is explicitly specified. Uses of this feature should be carefully + considered, due to the following limitations: + + 1. This feature is not supported at all on Windows. + 2. Effectiveness may vary by operating system. ``ps --ppid`` is used to + enumerate child processes, which is available on most GNU/Linux systems + but not most others. + 3. Deeper descendants do not receive signals, though they may sometimes + terminate as a consequence of their parent processes being killed. + 4. `kill_after_timeout` uses ``SIGKILL``, which can have negative side + effects on a repository. For example, stale locks in case of ``git gc`` + could render the repository incapable of accepting changes until the lock + is manually removed. :param with_stdout: If True, default True, we open stdout on the created process. @@ -898,10 +971,18 @@ class Git(LazyMixin): :param shell: Whether to invoke commands through a shell (see `Popen(..., shell=True)`). - It overrides :attr:`USE_SHELL` if it is not `None`. + If this is not `None`, it overrides :attr:`USE_SHELL`. + + Passing ``shell=True`` to this or any other GitPython function should be + avoided, as it is unsafe under most circumstances. This is because it is + typically not feasible to fully consider and account for the effect of shell + expansions, especially when passing ``shell=True`` to other methods that + forward it to :meth:`Git.execute`. Passing ``shell=True`` is also no longer + needed (nor useful) to work around any known operating system specific + issues. :param env: - A dictionary of environment variables to be passed to `subprocess.Popen`. + A dictionary of environment variables to be passed to :class:`subprocess.Popen`. :param max_chunk_size: Maximum number of bytes in one chunk of data passed to the output_stream in @@ -909,11 +990,11 @@ class Git(LazyMixin): the default value is used. :param strip_newline_in_stdout: - Whether to strip the trailing ``\\n`` of the command stdout. + Whether to strip the trailing ``\n`` of the command stdout. :param subprocess_kwargs: - Keyword arguments to be passed to `subprocess.Popen`. Please note that - some of the valid kwargs are already set by this method; the ones you + Keyword arguments to be passed to :class:`subprocess.Popen`. Please note + that some of the valid kwargs are already set by this method; the ones you specify may not be the same ones. :return: @@ -931,8 +1012,9 @@ class Git(LazyMixin): :note: If you add additional keyword arguments to the signature of this method, - you must update the execute_kwargs tuple housed in this module.""" - # Remove password for the command if present + you must update the execute_kwargs tuple housed in this module. + """ + # Remove password for the command if present. redacted_command = remove_password_if_present(command) if self.GIT_PYTHON_TRACE and (self.GIT_PYTHON_TRACE != "full" or as_process): log.info(" ".join(redacted_command)) @@ -945,12 +1027,12 @@ class Git(LazyMixin): except FileNotFoundError: cwd = None - # Start the process + # Start the process. inline_env = env env = os.environ.copy() - # Attempt to force all output to plain ascii english, which is what some parsing code - # may expect. - # According to stackoverflow (http://goo.gl/l74GC8), we are setting LANGUAGE as well + # Attempt to force all output to plain ASCII English, which is what some parsing + # code may expect. + # According to https://askubuntu.com/a/311796, we are setting LANGUAGE as well # just to be sure. env["LANGUAGE"] = "C" env["LC_ALL"] = "C" @@ -958,19 +1040,16 @@ class Git(LazyMixin): if inline_env is not None: env.update(inline_env) - if is_win: + if os.name == "nt": cmd_not_found_exception = OSError if kill_after_timeout is not None: raise GitCommandError( redacted_command, '"kill_after_timeout" feature is not supported on Windows.', ) - # Only search PATH, not CWD. This must be in the *caller* environment. The "1" can be any value. - maybe_patch_caller_env = patch_env("NoDefaultCurrentDirectoryInExePath", "1") else: - cmd_not_found_exception = FileNotFoundError # NOQA # exists, flake8 unknown @UndefinedVariable - maybe_patch_caller_env = contextlib.nullcontext() - # end handle + cmd_not_found_exception = FileNotFoundError + # END handle stdout_sink = PIPE if with_stdout else getattr(subprocess, "DEVNULL", None) or open(os.devnull, "wb") if shell is None: @@ -984,25 +1063,22 @@ class Git(LazyMixin): universal_newlines, ) try: - with maybe_patch_caller_env: - proc = Popen( - command, - env=env, - cwd=cwd, - bufsize=-1, - stdin=istream or DEVNULL, - stderr=PIPE, - stdout=stdout_sink, - shell=shell, - close_fds=is_posix, # unsupported on windows - universal_newlines=universal_newlines, - creationflags=PROC_CREATIONFLAGS, - **subprocess_kwargs, - ) + proc = safer_popen( + command, + env=env, + cwd=cwd, + bufsize=-1, + stdin=(istream or DEVNULL), + stderr=PIPE, + stdout=stdout_sink, + shell=shell, + universal_newlines=universal_newlines, + **subprocess_kwargs, + ) except cmd_not_found_exception as err: raise GitCommandNotFound(redacted_command, err) from err else: - # replace with a typeguard for Popen[bytes]? + # Replace with a typeguard for Popen[bytes]? proc.stdout = cast(BinaryIO, proc.stdout) proc.stderr = cast(BinaryIO, proc.stderr) @@ -1011,11 +1087,9 @@ class Git(LazyMixin): def kill_process(pid: int) -> None: """Callback to kill a process.""" - p = Popen( - ["ps", "--ppid", str(pid)], - stdout=PIPE, - creationflags=PROC_CREATIONFLAGS, - ) + if os.name == "nt": + raise AssertionError("Bug: This callback would be ineffective and unsafe on Windows, stopping.") + p = Popen(["ps", "--ppid", str(pid)], stdout=PIPE) child_pids = [] if p.stdout is not None: for line in p.stdout: @@ -1024,28 +1098,26 @@ class Git(LazyMixin): if local_pid.isdigit(): child_pids.append(int(local_pid)) try: - # Windows does not have SIGKILL, so use SIGTERM instead - sig = getattr(signal, "SIGKILL", signal.SIGTERM) - os.kill(pid, sig) + os.kill(pid, signal.SIGKILL) for child_pid in child_pids: try: - os.kill(child_pid, sig) + os.kill(child_pid, signal.SIGKILL) except OSError: pass - kill_check.set() # tell the main routine that the process was killed + kill_check.set() # Tell the main routine that the process was killed. except OSError: - # It is possible that the process gets completed in the duration after timeout - # happens and before we try to kill the process. + # It is possible that the process gets completed in the duration after + # timeout happens and before we try to kill the process. pass return - # end + # END kill_process if kill_after_timeout is not None: kill_check = threading.Event() watchdog = threading.Timer(kill_after_timeout, kill_process, args=(proc.pid,)) - # Wait for the process to return + # Wait for the process to return. status = 0 stdout_value: Union[str, bytes] = b"" stderr_value: Union[str, bytes] = b"" @@ -1064,7 +1136,7 @@ class Git(LazyMixin): ) if not universal_newlines: stderr_value = stderr_value.encode(defenc) - # strip trailing "\n" + # Strip trailing "\n". if stdout_value.endswith(newline) and strip_newline_in_stdout: # type: ignore stdout_value = stdout_value[:-1] if stderr_value.endswith(newline): # type: ignore @@ -1076,7 +1148,7 @@ class Git(LazyMixin): stream_copy(proc.stdout, output_stream, max_chunk_size) stdout_value = proc.stdout.read() stderr_value = proc.stderr.read() - # strip trailing "\n" + # Strip trailing "\n". if stderr_value.endswith(newline): # type: ignore stderr_value = stderr_value[:-1] status = proc.wait() @@ -1091,7 +1163,7 @@ class Git(LazyMixin): def as_text(stdout_value: Union[bytes, str]) -> str: return not output_stream and safe_decode(stdout_value) or "" - # end + # END as_text if stderr_value: log.info( @@ -1110,10 +1182,10 @@ class Git(LazyMixin): if with_exceptions and status != 0: raise GitCommandError(redacted_command, status, stderr_value, stdout_value) - if isinstance(stdout_value, bytes) and stdout_as_string: # could also be output_stream + if isinstance(stdout_value, bytes) and stdout_as_string: # Could also be output_stream. stdout_value = safe_decode(stdout_value) - # Allow access to the command's status code + # Allow access to the command's status code. if with_extended_output: return (status, stdout_value, safe_decode(stderr_value)) else: @@ -1123,26 +1195,26 @@ class Git(LazyMixin): return self._environment def update_environment(self, **kwargs: Any) -> Dict[str, Union[str, None]]: - """ - Set environment variables for future git invocations. Return all changed - values in a format that can be passed back into this function to revert - the changes: + """Set environment variables for future git invocations. Return all changed + values in a format that can be passed back into this function to revert the + changes. ``Examples``:: old_env = self.update_environment(PWD='/tmp') self.update_environment(**old_env) - :param kwargs: environment variables to use for git processes - :return: dict that maps environment variables to their old values + :param kwargs: Environment variables to use for git processes + + :return: Dict that maps environment variables to their old values """ old_env = {} for key, value in kwargs.items(): - # set value if it is None + # Set value if it is None. if value is not None: old_env[key] = self._environment.get(key) self._environment[key] = value - # remove key from environment if its value is None + # Remove key from environment if its value is None. elif key in self._environment: old_env[key] = self._environment[key] del self._environment[key] @@ -1150,16 +1222,15 @@ class Git(LazyMixin): @contextlib.contextmanager def custom_environment(self, **kwargs: Any) -> Iterator[None]: - """ - A context manager around the above ``update_environment`` method to restore the - environment back to its previous state after operation. + """A context manager around the above :meth:`update_environment` method to + restore the environment back to its previous state after operation. ``Examples``:: with self.custom_environment(GIT_SSH='/bin/ssh_wrapper'): repo.remotes.origin.fetch() - :param kwargs: see update_environment + :param kwargs: See :meth:`update_environment` """ old_env = self.update_environment(**kwargs) try: @@ -1184,7 +1255,7 @@ class Git(LazyMixin): return [] def transform_kwargs(self, split_single_char_options: bool = True, **kwargs: Any) -> List[str]: - """Transforms Python style kwargs into git command line options.""" + """Transform Python style kwargs into git command line options.""" args = [] for k, v in kwargs.items(): if isinstance(v, (list, tuple)): @@ -1206,23 +1277,22 @@ class Git(LazyMixin): return outlist def __call__(self, **kwargs: Any) -> "Git": - """Specify command line options to the git executable - for a subcommand call. + """Specify command line options to the git executable for a subcommand call. :param kwargs: - is a dict of keyword arguments. - these arguments are passed as in _call_process - but will be passed to the git command rather than - the subcommand. + A dict of keyword arguments. + These arguments are passed as in :meth:`_call_process`, but will be + passed to the git command rather than the subcommand. ``Examples``:: - git(work_tree='/tmp').difftool()""" + git(work_tree='/tmp').difftool() + """ self._git_options = self.transform_kwargs(split_single_char_options=True, **kwargs) return self @overload def _call_process(self, method: str, *args: None, **kwargs: None) -> str: - ... # if no args given, execute called with all defaults + ... # If no args were given, execute the call with all defaults. @overload def _call_process( @@ -1248,20 +1318,20 @@ class Git(LazyMixin): the result as a string. :param method: - is the command. Contained "_" characters will be converted to dashes, - such as in 'ls_files' to call 'ls-files'. + The command. Contained ``_`` characters will be converted to dashes, + such as in ``ls_files`` to call ``ls-files``. :param args: - is the list of arguments. If None is included, it will be pruned. + The list of arguments. If None is included, it will be pruned. This allows your commands to call git more conveniently as None is realized as non-existent. :param kwargs: - It contains key-values for the following: - - the :meth:`execute()` kwds, as listed in :var:`execute_kwargs`; - - "command options" to be converted by :meth:`transform_kwargs()`; - - the `'insert_kwargs_after'` key which its value must match one of ``*args`` - and any cmd-options will be appended after the matched arg. + Contains key-values for the following: + - The :meth:`execute()` kwds, as listed in :var:`execute_kwargs`. + - "Command options" to be converted by :meth:`transform_kwargs`. + - The `'insert_kwargs_after'` key which its value must match one of ``*args``. + It also contains any command options, to be appended after the matched arg. Examples:: @@ -1271,17 +1341,18 @@ class Git(LazyMixin): git rev-list max-count 10 --header master - :return: Same as ``execute`` - if no args given used execute default (esp. as_process = False, stdout_as_string = True) - and return str""" - # Handle optional arguments prior to calling transform_kwargs - # otherwise these'll end up in args, which is bad. + :return: Same as :meth:`execute`. + If no args are given, used :meth:`execute`'s default (especially + ``as_process = False``, ``stdout_as_string = True``) and return str. + """ + # Handle optional arguments prior to calling transform_kwargs. + # Otherwise these'll end up in args, which is bad. exec_kwargs = {k: v for k, v in kwargs.items() if k in execute_kwargs} opts_kwargs = {k: v for k, v in kwargs.items() if k not in execute_kwargs} insert_after_this_arg = opts_kwargs.pop("insert_kwargs_after", None) - # Prepare the argument list + # Prepare the argument list. opt_args = self.transform_kwargs(**opts_kwargs) ext_args = self._unpack_args([a for a in args if a is not None]) @@ -1296,17 +1367,16 @@ class Git(LazyMixin): "Couldn't find argument '%s' in args %s to insert cmd options after" % (insert_after_this_arg, str(ext_args)) ) from err - # end handle error + # END handle error args_list = ext_args[: index + 1] + opt_args + ext_args[index + 1 :] - # end handle opts_kwargs + # END handle opts_kwargs call = [self.GIT_PYTHON_GIT_EXECUTABLE] - # add persistent git options + # Add persistent git options. call.extend(self._persistent_git_options) - # add the git options, then reset to empty - # to avoid side_effects + # Add the git options, then reset to empty to avoid side effects. call.extend(self._git_options) self._git_options = () @@ -1322,12 +1392,18 @@ class Git(LazyMixin): :return: (hex_sha, type_string, size_as_int) - :raise ValueError: if the header contains indication for an error due to - incorrect input sha""" + :raise ValueError: If the header contains indication for an error due to + incorrect input sha + """ tokens = header_line.split() if len(tokens) != 3: if not tokens: - raise ValueError("SHA could not be resolved, git returned: %r" % (header_line.strip())) + err_msg = ( + f"SHA is empty, possible dubious ownership in the repository " + f"""at {self._working_dir}.\n If this is unintended run:\n\n """ + f""" "git config --global --add safe.directory {self._working_dir}" """ + ) + raise ValueError(err_msg) else: raise ValueError("SHA %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip())) # END handle actual return value @@ -1338,12 +1414,12 @@ class Git(LazyMixin): return (tokens[0], tokens[1], int(tokens[2])) def _prepare_ref(self, ref: AnyStr) -> bytes: - # required for command to separate refs on stdin, as bytes + # Required for command to separate refs on stdin, as bytes. if isinstance(ref, bytes): - # Assume 40 bytes hexsha - bin-to-ascii for some reason returns bytes, not text + # Assume 40 bytes hexsha - bin-to-ascii for some reason returns bytes, not text. refstr: str = ref.decode("ascii") elif not isinstance(ref, str): - refstr = str(ref) # could be ref-object + refstr = str(ref) # Could be ref-object. else: refstr = ref @@ -1379,7 +1455,8 @@ class Git(LazyMixin): :note: The method will only suffer from the costs of command invocation once and reuses the command in subsequent calls. - :return: (hexsha, type_string, size_as_int)""" + :return: (hexsha, type_string, size_as_int) + """ cmd = self._get_persistent_cmd("cat_file_header", "cat_file", batch_check=True) return self.__get_object_header(cmd, ref) @@ -1387,7 +1464,8 @@ class Git(LazyMixin): """As get_object_header, but returns object data as well. :return: (hexsha, type_string, size_as_int, data_string) - :note: not threadsafe""" + :note: Not threadsafe. + """ hexsha, typename, size, stream = self.stream_object_data(ref) data = stream.read(size) del stream @@ -1397,7 +1475,8 @@ class Git(LazyMixin): """As get_object_header, but returns the data as a stream. :return: (hexsha, type_string, size_as_int, stream) - :note: This method is not threadsafe, you need one independent Command instance per thread to be safe!""" + :note: This method is not threadsafe, you need one independent Command instance per thread to be safe! + """ cmd = self._get_persistent_cmd("cat_file_all", "cat_file", batch=True) hexsha, typename, size = self.__get_object_header(cmd, ref) cmd_stdout = cmd.stdout if cmd.stdout is not None else io.BytesIO() @@ -1408,7 +1487,8 @@ class Git(LazyMixin): Currently persistent commands will be interrupted. - :return: self""" + :return: self + """ for cmd in (self.cat_file_all, self.cat_file_header): if cmd: cmd.__del__() diff --git a/site-packages/git/compat.py b/site-packages/git/compat.py index 624f261..920e44b 100644 --- a/site-packages/git/compat.py +++ b/site-packages/git/compat.py @@ -1,24 +1,19 @@ -# -*- coding: utf-8 -*- -# config.py # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors # -# This module is part of GitPython and is released under -# the BSD License: https://opensource.org/license/bsd-3-clause/ -"""utilities to help provide compatibility with python 3""" -# flake8: noqa +# This module is part of GitPython and is released under the +# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/ + +"""Utilities to help provide compatibility with Python 3.""" import locale import os import sys -from gitdb.utils.encoding import ( - force_bytes, # @UnusedImport - force_text, # @UnusedImport -) +from gitdb.utils.encoding import force_bytes, force_text # noqa: F401 # @UnusedImport # typing -------------------------------------------------------------------- -from typing import ( +from typing import ( # noqa: F401 Any, AnyStr, Dict, @@ -33,10 +28,41 @@ from typing import ( # --------------------------------------------------------------------------- -is_win: bool = os.name == "nt" +is_win = os.name == "nt" +"""Deprecated alias for ``os.name == "nt"`` to check for native Windows. + +This is deprecated because it is clearer to write out :attr:`os.name` or +:attr:`sys.platform` checks explicitly, especially in cases where it matters which is +used. + +:note: ``is_win`` is ``False`` on Cygwin, but is often wrongly assumed ``True``. To + detect Cygwin, use ``sys.platform == "cygwin"``. +""" + is_posix = os.name == "posix" -is_darwin = os.name == "darwin" +"""Deprecated alias for ``os.name == "posix"`` to check for Unix-like ("POSIX") systems. + +This is deprecated because it clearer to write out :attr:`os.name` or +:attr:`sys.platform` checks explicitly, especially in cases where it matters which is +used. + +:note: For POSIX systems, more detailed information is available in + :attr:`sys.platform`, while :attr:`os.name` is always ``"posix"`` on such systems, + including macOS (Darwin). +""" + +is_darwin = sys.platform == "darwin" +"""Deprecated alias for ``sys.platform == "darwin"`` to check for macOS (Darwin). + +This is deprecated because it clearer to write out :attr:`os.name` or +:attr:`sys.platform` checks explicitly. + +:note: For macOS (Darwin), ``os.name == "posix"`` as in other Unix-like systems, while + ``sys.platform == "darwin"`. +""" + defenc = sys.getfilesystemencoding() +"""The encoding used to convert between Unicode and bytes filenames.""" @overload @@ -50,7 +76,7 @@ def safe_decode(s: AnyStr) -> str: def safe_decode(s: Union[AnyStr, None]) -> Optional[str]: - """Safely decodes a binary string to unicode""" + """Safely decode a binary string to Unicode.""" if isinstance(s, str): return s elif isinstance(s, bytes): @@ -72,7 +98,7 @@ def safe_encode(s: AnyStr) -> bytes: def safe_encode(s: Optional[AnyStr]) -> Optional[bytes]: - """Safely encodes a binary string to unicode""" + """Safely encode a binary string to Unicode.""" if isinstance(s, str): return s.encode(defenc) elif isinstance(s, bytes): @@ -94,7 +120,7 @@ def win_encode(s: AnyStr) -> bytes: def win_encode(s: Optional[AnyStr]) -> Optional[bytes]: - """Encode unicodes for process arguments on Windows.""" + """Encode Unicode strings for process arguments on Windows.""" if isinstance(s, str): return s.encode(locale.getpreferredencoding(False)) elif isinstance(s, bytes): diff --git a/site-packages/git/config.py b/site-packages/git/config.py index 76b1491..2730dda 100644 --- a/site-packages/git/config.py +++ b/site-packages/git/config.py @@ -1,32 +1,24 @@ -# config.py # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors # -# This module is part of GitPython and is released under -# the BSD License: https://opensource.org/license/bsd-3-clause/ -"""Module containing module parser implementation able to properly read and write -configuration files""" +# This module is part of GitPython and is released under the +# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/ + +"""Parser for reading and writing configuration files.""" -import sys import abc +import configparser as cp +import fnmatch from functools import wraps import inspect from io import BufferedReader, IOBase import logging import os -import re -import fnmatch - -from git.compat import ( - defenc, - force_text, - is_win, -) - -from git.util import LockFile - import os.path as osp +import re +import sys -import configparser as cp +from git.compat import defenc, force_text +from git.util import LockFile # typing------------------------------------------------------- @@ -55,7 +47,7 @@ T_ConfigParser = TypeVar("T_ConfigParser", bound="GitConfigParser") T_OMD_value = TypeVar("T_OMD_value", str, bytes, int, float, bool) if sys.version_info[:3] < (3, 7, 2): - # typing.Ordereddict not added until py 3.7.2 + # typing.Ordereddict not added until Python 3.7.2. from collections import OrderedDict OrderedDict_OMD = OrderedDict @@ -72,25 +64,24 @@ __all__ = ("GitConfigParser", "SectionConstraint") log = logging.getLogger("git.config") log.addHandler(logging.NullHandler()) -# invariants -# represents the configuration level of a configuration file - CONFIG_LEVELS: ConfigLevels_Tup = ("system", "user", "global", "repository") +"""The configuration level of a configuration file.""" - -# Section pattern to detect conditional includes. -# https://git-scm.com/docs/git-config#_conditional_includes CONDITIONAL_INCLUDE_REGEXP = re.compile(r"(?<=includeIf )\"(gitdir|gitdir/i|onbranch):(.+)\"") +"""Section pattern to detect conditional includes. + +See: https://git-scm.com/docs/git-config#_conditional_includes +""" class MetaParserBuilder(abc.ABCMeta): # noqa: B024 - """Utility class wrapping base-class methods into decorators that assure read-only properties""" + """Utility class wrapping base-class methods into decorators that assure read-only properties.""" def __new__(cls, name: str, bases: Tuple, clsdict: Dict[str, Any]) -> "MetaParserBuilder": + """Equip all base-class methods with a needs_values decorator, and all non-const + methods with a set_dirty_and_flush_changes decorator in addition to that. """ - Equip all base-class methods with a needs_values decorator, and all non-const methods - with a set_dirty_and_flush_changes decorator in addition to that.""" kmm = "_mutating_methods_" if kmm in clsdict: mutating_methods = clsdict[kmm] @@ -109,12 +100,12 @@ class MetaParserBuilder(abc.ABCMeta): # noqa: B024 # END for each base # END if mutating methods configuration is set - new_type = super(MetaParserBuilder, cls).__new__(cls, name, bases, clsdict) + new_type = super().__new__(cls, name, bases, clsdict) return new_type def needs_values(func: Callable[..., _T]) -> Callable[..., _T]: - """Returns method assuring we read values (on demand) before we try to access them""" + """Return a method for ensuring we read values (on demand) before we try to access them.""" @wraps(func) def assure_data_present(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T: @@ -126,9 +117,10 @@ def needs_values(func: Callable[..., _T]) -> Callable[..., _T]: def set_dirty_and_flush_changes(non_const_func: Callable[..., _T]) -> Callable[..., _T]: - """Return method that checks whether given non constant function may be called. - If so, the instance will be set dirty. - Additionally, we flush the changes right to disk""" + """Return a method that checks whether given non constant function may be called. + + If so, the instance will be set dirty. Additionally, we flush the changes right to disk. + """ def flush_changes(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T: rval = non_const_func(self, *args, **kwargs) @@ -142,16 +134,16 @@ def set_dirty_and_flush_changes(non_const_func: Callable[..., _T]) -> Callable[. class SectionConstraint(Generic[T_ConfigParser]): - """Constrains a ConfigParser to only option commands which are constrained to always use the section we have been initialized with. It supports all ConfigParser methods that operate on an option. - :note: - If used as a context manager, will release the wrapped ConfigParser.""" + :note: If used as a context manager, will release the wrapped ConfigParser. + """ __slots__ = ("_config", "_section_name") + _valid_attrs_ = ( "get_value", "set_value", @@ -179,20 +171,21 @@ class SectionConstraint(Generic[T_ConfigParser]): def __getattr__(self, attr: str) -> Any: if attr in self._valid_attrs_: return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs) - return super(SectionConstraint, self).__getattribute__(attr) + return super().__getattribute__(attr) def _call_config(self, method: str, *args: Any, **kwargs: Any) -> Any: """Call the configuration at the given method which must take a section name - as first argument""" + as first argument.""" return getattr(self._config, method)(self._section_name, *args, **kwargs) @property def config(self) -> T_ConfigParser: - """return: Configparser instance we constrain""" + """return: ConfigParser instance we constrain""" return self._config def release(self) -> None: - """Equivalent to GitConfigParser.release(), which is called on our underlying parser instance""" + """Equivalent to GitConfigParser.release(), which is called on our underlying + parser instance.""" return self._config.release() def __enter__(self) -> "SectionConstraint[T_ConfigParser]": @@ -207,36 +200,37 @@ class _OMD(OrderedDict_OMD): """Ordered multi-dict.""" def __setitem__(self, key: str, value: _T) -> None: - super(_OMD, self).__setitem__(key, [value]) + super().__setitem__(key, [value]) def add(self, key: str, value: Any) -> None: if key not in self: - super(_OMD, self).__setitem__(key, [value]) - return None - super(_OMD, self).__getitem__(key).append(value) + super().__setitem__(key, [value]) + return + + super().__getitem__(key).append(value) def setall(self, key: str, values: List[_T]) -> None: - super(_OMD, self).__setitem__(key, values) + super().__setitem__(key, values) def __getitem__(self, key: str) -> Any: - return super(_OMD, self).__getitem__(key)[-1] + return super().__getitem__(key)[-1] def getlast(self, key: str) -> Any: - return super(_OMD, self).__getitem__(key)[-1] + return super().__getitem__(key)[-1] def setlast(self, key: str, value: Any) -> None: if key not in self: - super(_OMD, self).__setitem__(key, [value]) + super().__setitem__(key, [value]) return - prior = super(_OMD, self).__getitem__(key) + prior = super().__getitem__(key) prior[-1] = value def get(self, key: str, default: Union[_T, None] = None) -> Union[_T, None]: - return super(_OMD, self).get(key, [default])[-1] + return super().get(key, [default])[-1] def getall(self, key: str) -> List[_T]: - return super(_OMD, self).__getitem__(key) + return super().__getitem__(key) def items(self) -> List[Tuple[str, _T]]: # type: ignore[override] """List of (key, last value for key).""" @@ -248,9 +242,9 @@ class _OMD(OrderedDict_OMD): def get_config_path(config_level: Lit_config_levels) -> str: - # we do not support an absolute path of the gitconfig on windows , - # use the global config instead - if is_win and config_level == "system": + # We do not support an absolute path of the gitconfig on Windows. + # Use the global config instead. + if os.name == "nt" and config_level == "system": config_level = "global" if config_level == "system": @@ -271,7 +265,6 @@ def get_config_path(config_level: Lit_config_levels) -> str: class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder): - """Implements specifics required to read git style configuration files. This variation behaves much like the git.config command such that the configuration @@ -286,15 +279,20 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder): :note: The config is case-sensitive even when queried, hence section and option names must match perfectly. - If used as a context manager, will release the locked file.""" + + :note: + If used as a context manager, this will release the locked file. + """ # { Configuration - # The lock type determines the type of lock to use in new configuration readers. - # They must be compatible to the LockFile interface. - # A suitable alternative would be the BlockingLockFile t_lock = LockFile - re_comment = re.compile(r"^\s*[#;]") + """The lock type determines the type of lock to use in new configuration readers. + They must be compatible to the LockFile interface. + A suitable alternative would be the :class:`~git.util.BlockingLockFile`. + """ + + re_comment = re.compile(r"^\s*[#;]") # } END configuration optvalueonly_source = r"\s*(?P