update virtualenv module
This commit is contained in:
parent
c44141c428
commit
ac882a7ded
1
site-packages/distlib-0.3.9.dist-info/INSTALLER
Executable file
1
site-packages/distlib-0.3.9.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
284
site-packages/distlib-0.3.9.dist-info/LICENSE.txt
Executable file
284
site-packages/distlib-0.3.9.dist-info/LICENSE.txt
Executable file
@ -0,0 +1,284 @@
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.2 2.1.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2.1 2.2 2002 PSF yes
|
||||
2.2.2 2.2.1 2002 PSF yes
|
||||
2.2.3 2.2.2 2003 PSF yes
|
||||
2.3 2.2.2 2002-2003 PSF yes
|
||||
2.3.1 2.3 2002-2003 PSF yes
|
||||
2.3.2 2.3.1 2002-2003 PSF yes
|
||||
2.3.3 2.3.2 2002-2003 PSF yes
|
||||
2.3.4 2.3.3 2004 PSF yes
|
||||
2.3.5 2.3.4 2005 PSF yes
|
||||
2.4 2.3 2004 PSF yes
|
||||
2.4.1 2.4 2005 PSF yes
|
||||
2.4.2 2.4.1 2005 PSF yes
|
||||
2.4.3 2.4.2 2006 PSF yes
|
||||
2.4.4 2.4.3 2006 PSF yes
|
||||
2.5 2.4 2006 PSF yes
|
||||
2.5.1 2.5 2007 PSF yes
|
||||
2.5.2 2.5.1 2008 PSF yes
|
||||
2.5.3 2.5.2 2008 PSF yes
|
||||
2.6 2.5 2008 PSF yes
|
||||
2.6.1 2.6 2008 PSF yes
|
||||
2.6.2 2.6.1 2009 PSF yes
|
||||
2.6.3 2.6.2 2009 PSF yes
|
||||
2.6.4 2.6.3 2009 PSF yes
|
||||
2.6.5 2.6.4 2010 PSF yes
|
||||
3.0 2.6 2008 PSF yes
|
||||
3.0.1 3.0 2009 PSF yes
|
||||
3.1 3.0.1 2009 PSF yes
|
||||
3.1.1 3.1 2009 PSF yes
|
||||
3.1.2 3.1 2010 PSF yes
|
||||
3.2 3.1 2010 PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
||||
Python Software Foundation; All Rights Reserved" are retained in Python alone or
|
||||
in any derivative version prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
118
site-packages/distlib-0.3.9.dist-info/METADATA
Executable file
118
site-packages/distlib-0.3.9.dist-info/METADATA
Executable file
@ -0,0 +1,118 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: distlib
|
||||
Version: 0.3.9
|
||||
Summary: Distribution utilities
|
||||
Home-page: https://github.com/pypa/distlib
|
||||
Author: Vinay Sajip
|
||||
Author-email: vinay_sajip@red-dove.com
|
||||
License: PSF-2.0
|
||||
Project-URL: Documentation, https://distlib.readthedocs.io/
|
||||
Project-URL: Source, https://github.com/pypa/distlib
|
||||
Project-URL: Tracker, https://github.com/pypa/distlib/issues
|
||||
Platform: any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Console
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Topic :: Software Development
|
||||
License-File: LICENSE.txt
|
||||
|
||||
|badge1| |badge2|
|
||||
|
||||
.. |badge1| image:: https://img.shields.io/github/actions/workflow/status/pypa/distlib/package-tests.yml
|
||||
:alt: GitHub Workflow Status (with event)
|
||||
|
||||
.. |badge2| image:: https://img.shields.io/codecov/c/github/pypa/distlib
|
||||
:target: https://app.codecov.io/gh/pypa/distlib
|
||||
:alt: GitHub coverage status
|
||||
|
||||
What is it?
|
||||
-----------
|
||||
|
||||
Distlib is a library which implements low-level functions that relate to
|
||||
packaging and distribution of Python software. It is intended to be used as the
|
||||
basis for third-party packaging tools. The documentation is available at
|
||||
|
||||
https://distlib.readthedocs.io/
|
||||
|
||||
Main features
|
||||
-------------
|
||||
|
||||
Distlib currently offers the following features:
|
||||
|
||||
* The package ``distlib.database``, which implements a database of installed
|
||||
distributions, as defined by :pep:`376`, and distribution dependency graph
|
||||
logic. Support is also provided for non-installed distributions (i.e.
|
||||
distributions registered with metadata on an index like PyPI), including
|
||||
the ability to scan for dependencies and building dependency graphs.
|
||||
* The package ``distlib.index``, which implements an interface to perform
|
||||
operations on an index, such as registering a project, uploading a
|
||||
distribution or uploading documentation. Support is included for verifying
|
||||
SSL connections (with domain matching) and signing/verifying packages using
|
||||
GnuPG.
|
||||
* The package ``distlib.metadata``, which implements distribution metadata as
|
||||
defined by :pep:`643`, :pep:`566`, :pep:`345`, :pep:`314` and :pep:`241`.
|
||||
* The package ``distlib.markers``, which implements environment markers as
|
||||
defined by :pep:`508`.
|
||||
* The package ``distlib.manifest``, which implements lists of files used
|
||||
in packaging source distributions.
|
||||
* The package ``distlib.locators``, which allows finding distributions, whether
|
||||
on PyPI (XML-RPC or via the "simple" interface), local directories or some
|
||||
other source.
|
||||
* The package ``distlib.resources``, which allows access to data files stored
|
||||
in Python packages, both in the file system and in .zip files.
|
||||
* The package ``distlib.scripts``, which allows installing of scripts with
|
||||
adjustment of shebang lines and support for native Windows executable
|
||||
launchers.
|
||||
* The package ``distlib.version``, which implements version specifiers as
|
||||
defined by :pep:`440`, but also support for working with "legacy" versions and
|
||||
semantic versions.
|
||||
* The package ``distlib.wheel``, which provides support for building and
|
||||
installing from the Wheel format for binary distributions (see :pep:`427`).
|
||||
* The package ``distlib.util``, which contains miscellaneous functions and
|
||||
classes which are useful in packaging, but which do not fit neatly into
|
||||
one of the other packages in ``distlib``.* The package implements enhanced
|
||||
globbing functionality such as the ability to use ``**`` in patterns to
|
||||
specify recursing into subdirectories.
|
||||
|
||||
|
||||
Python version and platform compatibility
|
||||
-----------------------------------------
|
||||
|
||||
Distlib is intended to be used on and is tested on Python versions 2.7 and 3.6 or later,
|
||||
pypy-2.7 and pypy3 on Linux, Windows, and macOS.
|
||||
|
||||
Project status
|
||||
--------------
|
||||
|
||||
The project has reached a mature status in its development: there is a comprehensive
|
||||
test suite and it has been exercised on Windows, Ubuntu and macOS. The project is used
|
||||
by well-known projects such as `pip <https://pypi.org/pypi/pip>`_ and `caniusepython3
|
||||
<https://pypi.org/pypi/caniusepython3>`_.
|
||||
|
||||
This project was migrated from Mercurial to Git and from BitBucket to GitHub, and
|
||||
although all information of importance has been retained across the migration, some
|
||||
commit references in issues and issue comments may have become invalid.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
Everyone interacting in the distlib project's codebases, issue trackers, chat
|
||||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
||||
|
||||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
||||
|
||||
|
38
site-packages/distlib-0.3.9.dist-info/RECORD
Executable file
38
site-packages/distlib-0.3.9.dist-info/RECORD
Executable file
@ -0,0 +1,38 @@
|
||||
distlib-0.3.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
distlib-0.3.9.dist-info/LICENSE.txt,sha256=gI4QyKarjesUn_mz-xn0R6gICUYG1xKpylf-rTVSWZ0,14531
|
||||
distlib-0.3.9.dist-info/METADATA,sha256=GfbwA0gg2QzJ2oWxlg-OHyPG1Q_wWM2NzEtWg-EfxDc,5246
|
||||
distlib-0.3.9.dist-info/RECORD,,
|
||||
distlib-0.3.9.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
|
||||
distlib-0.3.9.dist-info/top_level.txt,sha256=9BERqitu_vzyeyILOcGzX9YyA2AB_xlC4-81V6xoizk,8
|
||||
distlib/__init__.py,sha256=dcwgYGYGQqAEawBXPDtIx80DO_3cOmFv8HTc8JMzknQ,625
|
||||
distlib/__pycache__/__init__.cpython-311.pyc,,
|
||||
distlib/__pycache__/compat.cpython-311.pyc,,
|
||||
distlib/__pycache__/database.cpython-311.pyc,,
|
||||
distlib/__pycache__/index.cpython-311.pyc,,
|
||||
distlib/__pycache__/locators.cpython-311.pyc,,
|
||||
distlib/__pycache__/manifest.cpython-311.pyc,,
|
||||
distlib/__pycache__/markers.cpython-311.pyc,,
|
||||
distlib/__pycache__/metadata.cpython-311.pyc,,
|
||||
distlib/__pycache__/resources.cpython-311.pyc,,
|
||||
distlib/__pycache__/scripts.cpython-311.pyc,,
|
||||
distlib/__pycache__/util.cpython-311.pyc,,
|
||||
distlib/__pycache__/version.cpython-311.pyc,,
|
||||
distlib/__pycache__/wheel.cpython-311.pyc,,
|
||||
distlib/compat.py,sha256=2jRSjRI4o-vlXeTK2BCGIUhkc6e9ZGhSsacRM5oseTw,41467
|
||||
distlib/database.py,sha256=mHy_LxiXIsIVRb-T0-idBrVLw3Ffij5teHCpbjmJ9YU,51160
|
||||
distlib/index.py,sha256=lTbw268rRhj8dw1sib3VZ_0EhSGgoJO3FKJzSFMOaeA,20797
|
||||
distlib/locators.py,sha256=oBeAZpFuPQSY09MgNnLfQGGAXXvVO96BFpZyKMuK4tM,51026
|
||||
distlib/manifest.py,sha256=3qfmAmVwxRqU1o23AlfXrQGZzh6g_GGzTAP_Hb9C5zQ,14168
|
||||
distlib/markers.py,sha256=X6sDvkFGcYS8gUW8hfsWuKEKAqhQZAJ7iXOMLxRYjYk,5164
|
||||
distlib/metadata.py,sha256=zil3sg2EUfLXVigljY2d_03IJt-JSs7nX-73fECMX2s,38724
|
||||
distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
|
||||
distlib/scripts.py,sha256=BJliaDAZaVB7WAkwokgC3HXwLD2iWiHaVI50H7C6eG8,18608
|
||||
distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792
|
||||
distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784
|
||||
distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032
|
||||
distlib/util.py,sha256=vMPGvsS4j9hF6Y9k3Tyom1aaHLb0rFmZAEyzeAdel9w,66682
|
||||
distlib/version.py,sha256=s5VIs8wBn0fxzGxWM_aA2ZZyx525HcZbMvcTlTyZ3Rg,23727
|
||||
distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648
|
||||
distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448
|
||||
distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888
|
||||
distlib/wheel.py,sha256=DFIVguEQHCdxnSdAO0dfFsgMcvVZitg7bCOuLwZ7A_s,43979
|
6
site-packages/distlib-0.3.9.dist-info/WHEEL
Executable file
6
site-packages/distlib-0.3.9.dist-info/WHEEL
Executable file
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
1
site-packages/distlib-0.3.9.dist-info/top_level.txt
Executable file
1
site-packages/distlib-0.3.9.dist-info/top_level.txt
Executable file
@ -0,0 +1 @@
|
||||
distlib
|
33
site-packages/distlib/__init__.py
Executable file
33
site-packages/distlib/__init__.py
Executable file
@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2012-2023 Vinay Sajip.
|
||||
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
import logging
|
||||
|
||||
__version__ = '0.3.9'
|
||||
|
||||
|
||||
class DistlibException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
from logging import NullHandler
|
||||
except ImportError: # pragma: no cover
|
||||
|
||||
class NullHandler(logging.Handler):
|
||||
|
||||
def handle(self, record):
|
||||
pass
|
||||
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
def createLock(self):
|
||||
self.lock = None
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.addHandler(NullHandler())
|
1137
site-packages/distlib/compat.py
Executable file
1137
site-packages/distlib/compat.py
Executable file
File diff suppressed because it is too large
Load Diff
1329
site-packages/distlib/database.py
Executable file
1329
site-packages/distlib/database.py
Executable file
File diff suppressed because it is too large
Load Diff
508
site-packages/distlib/index.py
Executable file
508
site-packages/distlib/index.py
Executable file
@ -0,0 +1,508 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2013-2023 Vinay Sajip.
|
||||
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
try:
|
||||
from threading import Thread
|
||||
except ImportError: # pragma: no cover
|
||||
from dummy_threading import Thread
|
||||
|
||||
from . import DistlibException
|
||||
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
|
||||
urlparse, build_opener, string_types)
|
||||
from .util import zip_dir, ServerProxy
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_INDEX = 'https://pypi.org/pypi'
|
||||
DEFAULT_REALM = 'pypi'
|
||||
|
||||
|
||||
class PackageIndex(object):
|
||||
"""
|
||||
This class represents a package index compatible with PyPI, the Python
|
||||
Package Index.
|
||||
"""
|
||||
|
||||
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
|
||||
|
||||
def __init__(self, url=None):
|
||||
"""
|
||||
Initialise an instance.
|
||||
|
||||
:param url: The URL of the index. If not specified, the URL for PyPI is
|
||||
used.
|
||||
"""
|
||||
self.url = url or DEFAULT_INDEX
|
||||
self.read_configuration()
|
||||
scheme, netloc, path, params, query, frag = urlparse(self.url)
|
||||
if params or query or frag or scheme not in ('http', 'https'):
|
||||
raise DistlibException('invalid repository: %s' % self.url)
|
||||
self.password_handler = None
|
||||
self.ssl_verifier = None
|
||||
self.gpg = None
|
||||
self.gpg_home = None
|
||||
with open(os.devnull, 'w') as sink:
|
||||
# Use gpg by default rather than gpg2, as gpg2 insists on
|
||||
# prompting for passwords
|
||||
for s in ('gpg', 'gpg2'):
|
||||
try:
|
||||
rc = subprocess.check_call([s, '--version'], stdout=sink,
|
||||
stderr=sink)
|
||||
if rc == 0:
|
||||
self.gpg = s
|
||||
break
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _get_pypirc_command(self):
|
||||
"""
|
||||
Get the distutils command for interacting with PyPI configurations.
|
||||
:return: the command.
|
||||
"""
|
||||
from .util import _get_pypirc_command as cmd
|
||||
return cmd()
|
||||
|
||||
def read_configuration(self):
|
||||
"""
|
||||
Read the PyPI access configuration as supported by distutils. This populates
|
||||
``username``, ``password``, ``realm`` and ``url`` attributes from the
|
||||
configuration.
|
||||
"""
|
||||
from .util import _load_pypirc
|
||||
cfg = _load_pypirc(self)
|
||||
self.username = cfg.get('username')
|
||||
self.password = cfg.get('password')
|
||||
self.realm = cfg.get('realm', 'pypi')
|
||||
self.url = cfg.get('repository', self.url)
|
||||
|
||||
def save_configuration(self):
|
||||
"""
|
||||
Save the PyPI access configuration. You must have set ``username`` and
|
||||
``password`` attributes before calling this method.
|
||||
"""
|
||||
self.check_credentials()
|
||||
from .util import _store_pypirc
|
||||
_store_pypirc(self)
|
||||
|
||||
def check_credentials(self):
|
||||
"""
|
||||
Check that ``username`` and ``password`` have been set, and raise an
|
||||
exception if not.
|
||||
"""
|
||||
if self.username is None or self.password is None:
|
||||
raise DistlibException('username and password must be set')
|
||||
pm = HTTPPasswordMgr()
|
||||
_, netloc, _, _, _, _ = urlparse(self.url)
|
||||
pm.add_password(self.realm, netloc, self.username, self.password)
|
||||
self.password_handler = HTTPBasicAuthHandler(pm)
|
||||
|
||||
def register(self, metadata): # pragma: no cover
|
||||
"""
|
||||
Register a distribution on PyPI, using the provided metadata.
|
||||
|
||||
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||
and version number for the distribution to be
|
||||
registered.
|
||||
:return: The HTTP response received from PyPI upon submission of the
|
||||
request.
|
||||
"""
|
||||
self.check_credentials()
|
||||
metadata.validate()
|
||||
d = metadata.todict()
|
||||
d[':action'] = 'verify'
|
||||
request = self.encode_request(d.items(), [])
|
||||
self.send_request(request)
|
||||
d[':action'] = 'submit'
|
||||
request = self.encode_request(d.items(), [])
|
||||
return self.send_request(request)
|
||||
|
||||
def _reader(self, name, stream, outbuf):
|
||||
"""
|
||||
Thread runner for reading lines of from a subprocess into a buffer.
|
||||
|
||||
:param name: The logical name of the stream (used for logging only).
|
||||
:param stream: The stream to read from. This will typically a pipe
|
||||
connected to the output stream of a subprocess.
|
||||
:param outbuf: The list to append the read lines to.
|
||||
"""
|
||||
while True:
|
||||
s = stream.readline()
|
||||
if not s:
|
||||
break
|
||||
s = s.decode('utf-8').rstrip()
|
||||
outbuf.append(s)
|
||||
logger.debug('%s: %s' % (name, s))
|
||||
stream.close()
|
||||
|
||||
def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
||||
"""
|
||||
Return a suitable command for signing a file.
|
||||
|
||||
:param filename: The pathname to the file to be signed.
|
||||
:param signer: The identifier of the signer of the file.
|
||||
:param sign_password: The passphrase for the signer's
|
||||
private key used for signing.
|
||||
:param keystore: The path to a directory which contains the keys
|
||||
used in verification. If not specified, the
|
||||
instance's ``gpg_home`` attribute is used instead.
|
||||
:return: The signing command as a list suitable to be
|
||||
passed to :class:`subprocess.Popen`.
|
||||
"""
|
||||
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
||||
if keystore is None:
|
||||
keystore = self.gpg_home
|
||||
if keystore:
|
||||
cmd.extend(['--homedir', keystore])
|
||||
if sign_password is not None:
|
||||
cmd.extend(['--batch', '--passphrase-fd', '0'])
|
||||
td = tempfile.mkdtemp()
|
||||
sf = os.path.join(td, os.path.basename(filename) + '.asc')
|
||||
cmd.extend(['--detach-sign', '--armor', '--local-user',
|
||||
signer, '--output', sf, filename])
|
||||
logger.debug('invoking: %s', ' '.join(cmd))
|
||||
return cmd, sf
|
||||
|
||||
def run_command(self, cmd, input_data=None):
|
||||
"""
|
||||
Run a command in a child process , passing it any input data specified.
|
||||
|
||||
:param cmd: The command to run.
|
||||
:param input_data: If specified, this must be a byte string containing
|
||||
data to be sent to the child process.
|
||||
:return: A tuple consisting of the subprocess' exit code, a list of
|
||||
lines read from the subprocess' ``stdout``, and a list of
|
||||
lines read from the subprocess' ``stderr``.
|
||||
"""
|
||||
kwargs = {
|
||||
'stdout': subprocess.PIPE,
|
||||
'stderr': subprocess.PIPE,
|
||||
}
|
||||
if input_data is not None:
|
||||
kwargs['stdin'] = subprocess.PIPE
|
||||
stdout = []
|
||||
stderr = []
|
||||
p = subprocess.Popen(cmd, **kwargs)
|
||||
# We don't use communicate() here because we may need to
|
||||
# get clever with interacting with the command
|
||||
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
|
||||
t1.start()
|
||||
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
|
||||
t2.start()
|
||||
if input_data is not None:
|
||||
p.stdin.write(input_data)
|
||||
p.stdin.close()
|
||||
|
||||
p.wait()
|
||||
t1.join()
|
||||
t2.join()
|
||||
return p.returncode, stdout, stderr
|
||||
|
||||
def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
||||
"""
|
||||
Sign a file.
|
||||
|
||||
:param filename: The pathname to the file to be signed.
|
||||
:param signer: The identifier of the signer of the file.
|
||||
:param sign_password: The passphrase for the signer's
|
||||
private key used for signing.
|
||||
:param keystore: The path to a directory which contains the keys
|
||||
used in signing. If not specified, the instance's
|
||||
``gpg_home`` attribute is used instead.
|
||||
:return: The absolute pathname of the file where the signature is
|
||||
stored.
|
||||
"""
|
||||
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
|
||||
keystore)
|
||||
rc, stdout, stderr = self.run_command(cmd,
|
||||
sign_password.encode('utf-8'))
|
||||
if rc != 0:
|
||||
raise DistlibException('sign command failed with error '
|
||||
'code %s' % rc)
|
||||
return sig_file
|
||||
|
||||
def upload_file(self, metadata, filename, signer=None, sign_password=None,
|
||||
filetype='sdist', pyversion='source', keystore=None):
|
||||
"""
|
||||
Upload a release file to the index.
|
||||
|
||||
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||
and version number for the file to be uploaded.
|
||||
:param filename: The pathname of the file to be uploaded.
|
||||
:param signer: The identifier of the signer of the file.
|
||||
:param sign_password: The passphrase for the signer's
|
||||
private key used for signing.
|
||||
:param filetype: The type of the file being uploaded. This is the
|
||||
distutils command which produced that file, e.g.
|
||||
``sdist`` or ``bdist_wheel``.
|
||||
:param pyversion: The version of Python which the release relates
|
||||
to. For code compatible with any Python, this would
|
||||
be ``source``, otherwise it would be e.g. ``3.2``.
|
||||
:param keystore: The path to a directory which contains the keys
|
||||
used in signing. If not specified, the instance's
|
||||
``gpg_home`` attribute is used instead.
|
||||
:return: The HTTP response received from PyPI upon submission of the
|
||||
request.
|
||||
"""
|
||||
self.check_credentials()
|
||||
if not os.path.exists(filename):
|
||||
raise DistlibException('not found: %s' % filename)
|
||||
metadata.validate()
|
||||
d = metadata.todict()
|
||||
sig_file = None
|
||||
if signer:
|
||||
if not self.gpg:
|
||||
logger.warning('no signing program available - not signed')
|
||||
else:
|
||||
sig_file = self.sign_file(filename, signer, sign_password,
|
||||
keystore)
|
||||
with open(filename, 'rb') as f:
|
||||
file_data = f.read()
|
||||
md5_digest = hashlib.md5(file_data).hexdigest()
|
||||
sha256_digest = hashlib.sha256(file_data).hexdigest()
|
||||
d.update({
|
||||
':action': 'file_upload',
|
||||
'protocol_version': '1',
|
||||
'filetype': filetype,
|
||||
'pyversion': pyversion,
|
||||
'md5_digest': md5_digest,
|
||||
'sha256_digest': sha256_digest,
|
||||
})
|
||||
files = [('content', os.path.basename(filename), file_data)]
|
||||
if sig_file:
|
||||
with open(sig_file, 'rb') as f:
|
||||
sig_data = f.read()
|
||||
files.append(('gpg_signature', os.path.basename(sig_file),
|
||||
sig_data))
|
||||
shutil.rmtree(os.path.dirname(sig_file))
|
||||
request = self.encode_request(d.items(), files)
|
||||
return self.send_request(request)
|
||||
|
||||
def upload_documentation(self, metadata, doc_dir): # pragma: no cover
|
||||
"""
|
||||
Upload documentation to the index.
|
||||
|
||||
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||
and version number for the documentation to be
|
||||
uploaded.
|
||||
:param doc_dir: The pathname of the directory which contains the
|
||||
documentation. This should be the directory that
|
||||
contains the ``index.html`` for the documentation.
|
||||
:return: The HTTP response received from PyPI upon submission of the
|
||||
request.
|
||||
"""
|
||||
self.check_credentials()
|
||||
if not os.path.isdir(doc_dir):
|
||||
raise DistlibException('not a directory: %r' % doc_dir)
|
||||
fn = os.path.join(doc_dir, 'index.html')
|
||||
if not os.path.exists(fn):
|
||||
raise DistlibException('not found: %r' % fn)
|
||||
metadata.validate()
|
||||
name, version = metadata.name, metadata.version
|
||||
zip_data = zip_dir(doc_dir).getvalue()
|
||||
fields = [(':action', 'doc_upload'),
|
||||
('name', name), ('version', version)]
|
||||
files = [('content', name, zip_data)]
|
||||
request = self.encode_request(fields, files)
|
||||
return self.send_request(request)
|
||||
|
||||
def get_verify_command(self, signature_filename, data_filename,
|
||||
keystore=None):
|
||||
"""
|
||||
Return a suitable command for verifying a file.
|
||||
|
||||
:param signature_filename: The pathname to the file containing the
|
||||
signature.
|
||||
:param data_filename: The pathname to the file containing the
|
||||
signed data.
|
||||
:param keystore: The path to a directory which contains the keys
|
||||
used in verification. If not specified, the
|
||||
instance's ``gpg_home`` attribute is used instead.
|
||||
:return: The verifying command as a list suitable to be
|
||||
passed to :class:`subprocess.Popen`.
|
||||
"""
|
||||
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
||||
if keystore is None:
|
||||
keystore = self.gpg_home
|
||||
if keystore:
|
||||
cmd.extend(['--homedir', keystore])
|
||||
cmd.extend(['--verify', signature_filename, data_filename])
|
||||
logger.debug('invoking: %s', ' '.join(cmd))
|
||||
return cmd
|
||||
|
||||
def verify_signature(self, signature_filename, data_filename,
|
||||
keystore=None):
|
||||
"""
|
||||
Verify a signature for a file.
|
||||
|
||||
:param signature_filename: The pathname to the file containing the
|
||||
signature.
|
||||
:param data_filename: The pathname to the file containing the
|
||||
signed data.
|
||||
:param keystore: The path to a directory which contains the keys
|
||||
used in verification. If not specified, the
|
||||
instance's ``gpg_home`` attribute is used instead.
|
||||
:return: True if the signature was verified, else False.
|
||||
"""
|
||||
if not self.gpg:
|
||||
raise DistlibException('verification unavailable because gpg '
|
||||
'unavailable')
|
||||
cmd = self.get_verify_command(signature_filename, data_filename,
|
||||
keystore)
|
||||
rc, stdout, stderr = self.run_command(cmd)
|
||||
if rc not in (0, 1):
|
||||
raise DistlibException('verify command failed with error code %s' % rc)
|
||||
return rc == 0
|
||||
|
||||
def download_file(self, url, destfile, digest=None, reporthook=None):
|
||||
"""
|
||||
This is a convenience method for downloading a file from an URL.
|
||||
Normally, this will be a file from the index, though currently
|
||||
no check is made for this (i.e. a file can be downloaded from
|
||||
anywhere).
|
||||
|
||||
The method is just like the :func:`urlretrieve` function in the
|
||||
standard library, except that it allows digest computation to be
|
||||
done during download and checking that the downloaded data
|
||||
matched any expected value.
|
||||
|
||||
:param url: The URL of the file to be downloaded (assumed to be
|
||||
available via an HTTP GET request).
|
||||
:param destfile: The pathname where the downloaded file is to be
|
||||
saved.
|
||||
:param digest: If specified, this must be a (hasher, value)
|
||||
tuple, where hasher is the algorithm used (e.g.
|
||||
``'md5'``) and ``value`` is the expected value.
|
||||
:param reporthook: The same as for :func:`urlretrieve` in the
|
||||
standard library.
|
||||
"""
|
||||
if digest is None:
|
||||
digester = None
|
||||
logger.debug('No digest specified')
|
||||
else:
|
||||
if isinstance(digest, (list, tuple)):
|
||||
hasher, digest = digest
|
||||
else:
|
||||
hasher = 'md5'
|
||||
digester = getattr(hashlib, hasher)()
|
||||
logger.debug('Digest specified: %s' % digest)
|
||||
# The following code is equivalent to urlretrieve.
|
||||
# We need to do it this way so that we can compute the
|
||||
# digest of the file as we go.
|
||||
with open(destfile, 'wb') as dfp:
|
||||
# addinfourl is not a context manager on 2.x
|
||||
# so we have to use try/finally
|
||||
sfp = self.send_request(Request(url))
|
||||
try:
|
||||
headers = sfp.info()
|
||||
blocksize = 8192
|
||||
size = -1
|
||||
read = 0
|
||||
blocknum = 0
|
||||
if "content-length" in headers:
|
||||
size = int(headers["Content-Length"])
|
||||
if reporthook:
|
||||
reporthook(blocknum, blocksize, size)
|
||||
while True:
|
||||
block = sfp.read(blocksize)
|
||||
if not block:
|
||||
break
|
||||
read += len(block)
|
||||
dfp.write(block)
|
||||
if digester:
|
||||
digester.update(block)
|
||||
blocknum += 1
|
||||
if reporthook:
|
||||
reporthook(blocknum, blocksize, size)
|
||||
finally:
|
||||
sfp.close()
|
||||
|
||||
# check that we got the whole file, if we can
|
||||
if size >= 0 and read < size:
|
||||
raise DistlibException(
|
||||
'retrieval incomplete: got only %d out of %d bytes'
|
||||
% (read, size))
|
||||
# if we have a digest, it must match.
|
||||
if digester:
|
||||
actual = digester.hexdigest()
|
||||
if digest != actual:
|
||||
raise DistlibException('%s digest mismatch for %s: expected '
|
||||
'%s, got %s' % (hasher, destfile,
|
||||
digest, actual))
|
||||
logger.debug('Digest verified: %s', digest)
|
||||
|
||||
def send_request(self, req):
|
||||
"""
|
||||
Send a standard library :class:`Request` to PyPI and return its
|
||||
response.
|
||||
|
||||
:param req: The request to send.
|
||||
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
||||
"""
|
||||
handlers = []
|
||||
if self.password_handler:
|
||||
handlers.append(self.password_handler)
|
||||
if self.ssl_verifier:
|
||||
handlers.append(self.ssl_verifier)
|
||||
opener = build_opener(*handlers)
|
||||
return opener.open(req)
|
||||
|
||||
def encode_request(self, fields, files):
|
||||
"""
|
||||
Encode fields and files for posting to an HTTP server.
|
||||
|
||||
:param fields: The fields to send as a list of (fieldname, value)
|
||||
tuples.
|
||||
:param files: The files to send as a list of (fieldname, filename,
|
||||
file_bytes) tuple.
|
||||
"""
|
||||
# Adapted from packaging, which in turn was adapted from
|
||||
# http://code.activestate.com/recipes/146306
|
||||
|
||||
parts = []
|
||||
boundary = self.boundary
|
||||
for k, values in fields:
|
||||
if not isinstance(values, (list, tuple)):
|
||||
values = [values]
|
||||
|
||||
for v in values:
|
||||
parts.extend((
|
||||
b'--' + boundary,
|
||||
('Content-Disposition: form-data; name="%s"' %
|
||||
k).encode('utf-8'),
|
||||
b'',
|
||||
v.encode('utf-8')))
|
||||
for key, filename, value in files:
|
||||
parts.extend((
|
||||
b'--' + boundary,
|
||||
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
||||
(key, filename)).encode('utf-8'),
|
||||
b'',
|
||||
value))
|
||||
|
||||
parts.extend((b'--' + boundary + b'--', b''))
|
||||
|
||||
body = b'\r\n'.join(parts)
|
||||
ct = b'multipart/form-data; boundary=' + boundary
|
||||
headers = {
|
||||
'Content-type': ct,
|
||||
'Content-length': str(len(body))
|
||||
}
|
||||
return Request(self.url, body, headers)
|
||||
|
||||
def search(self, terms, operator=None): # pragma: no cover
|
||||
if isinstance(terms, string_types):
|
||||
terms = {'name': terms}
|
||||
rpc_proxy = ServerProxy(self.url, timeout=3.0)
|
||||
try:
|
||||
return rpc_proxy.search(terms, operator or 'and')
|
||||
finally:
|
||||
rpc_proxy('close')()
|
1295
site-packages/distlib/locators.py
Executable file
1295
site-packages/distlib/locators.py
Executable file
File diff suppressed because it is too large
Load Diff
384
site-packages/distlib/manifest.py
Executable file
384
site-packages/distlib/manifest.py
Executable file
@ -0,0 +1,384 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2012-2023 Python Software Foundation.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
"""
|
||||
Class representing the list of files in a distribution.
|
||||
|
||||
Equivalent to distutils.filelist, but fixes some problems.
|
||||
"""
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from . import DistlibException
|
||||
from .compat import fsdecode
|
||||
from .util import convert_path
|
||||
|
||||
|
||||
__all__ = ['Manifest']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# a \ followed by some spaces + EOL
|
||||
_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
|
||||
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
||||
|
||||
#
|
||||
# Due to the different results returned by fnmatch.translate, we need
|
||||
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
|
||||
# to be brought in for Python 3.6 onwards.
|
||||
#
|
||||
_PYTHON_VERSION = sys.version_info[:2]
|
||||
|
||||
|
||||
class Manifest(object):
|
||||
"""
|
||||
A list of files built by exploring the filesystem and filtered by applying various
|
||||
patterns to what we find there.
|
||||
"""
|
||||
|
||||
def __init__(self, base=None):
|
||||
"""
|
||||
Initialise an instance.
|
||||
|
||||
:param base: The base directory to explore under.
|
||||
"""
|
||||
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
|
||||
self.prefix = self.base + os.sep
|
||||
self.allfiles = None
|
||||
self.files = set()
|
||||
|
||||
#
|
||||
# Public API
|
||||
#
|
||||
|
||||
def findall(self):
|
||||
"""Find all files under the base and set ``allfiles`` to the absolute
|
||||
pathnames of files found.
|
||||
"""
|
||||
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
||||
|
||||
self.allfiles = allfiles = []
|
||||
root = self.base
|
||||
stack = [root]
|
||||
pop = stack.pop
|
||||
push = stack.append
|
||||
|
||||
while stack:
|
||||
root = pop()
|
||||
names = os.listdir(root)
|
||||
|
||||
for name in names:
|
||||
fullname = os.path.join(root, name)
|
||||
|
||||
# Avoid excess stat calls -- just one will do, thank you!
|
||||
stat = os.stat(fullname)
|
||||
mode = stat.st_mode
|
||||
if S_ISREG(mode):
|
||||
allfiles.append(fsdecode(fullname))
|
||||
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
||||
push(fullname)
|
||||
|
||||
def add(self, item):
|
||||
"""
|
||||
Add a file to the manifest.
|
||||
|
||||
:param item: The pathname to add. This can be relative to the base.
|
||||
"""
|
||||
if not item.startswith(self.prefix):
|
||||
item = os.path.join(self.base, item)
|
||||
self.files.add(os.path.normpath(item))
|
||||
|
||||
def add_many(self, items):
|
||||
"""
|
||||
Add a list of files to the manifest.
|
||||
|
||||
:param items: The pathnames to add. These can be relative to the base.
|
||||
"""
|
||||
for item in items:
|
||||
self.add(item)
|
||||
|
||||
def sorted(self, wantdirs=False):
|
||||
"""
|
||||
Return sorted files in directory order
|
||||
"""
|
||||
|
||||
def add_dir(dirs, d):
|
||||
dirs.add(d)
|
||||
logger.debug('add_dir added %s', d)
|
||||
if d != self.base:
|
||||
parent, _ = os.path.split(d)
|
||||
assert parent not in ('', '/')
|
||||
add_dir(dirs, parent)
|
||||
|
||||
result = set(self.files) # make a copy!
|
||||
if wantdirs:
|
||||
dirs = set()
|
||||
for f in result:
|
||||
add_dir(dirs, os.path.dirname(f))
|
||||
result |= dirs
|
||||
return [os.path.join(*path_tuple) for path_tuple in
|
||||
sorted(os.path.split(path) for path in result)]
|
||||
|
||||
def clear(self):
|
||||
"""Clear all collected files."""
|
||||
self.files = set()
|
||||
self.allfiles = []
|
||||
|
||||
def process_directive(self, directive):
|
||||
"""
|
||||
Process a directive which either adds some files from ``allfiles`` to
|
||||
``files``, or removes some files from ``files``.
|
||||
|
||||
:param directive: The directive to process. This should be in a format
|
||||
compatible with distutils ``MANIFEST.in`` files:
|
||||
|
||||
http://docs.python.org/distutils/sourcedist.html#commands
|
||||
"""
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dirpattern).
|
||||
action, patterns, thedir, dirpattern = self._parse_directive(directive)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
if action == 'include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, anchor=True):
|
||||
logger.warning('no files found matching %r', pattern)
|
||||
|
||||
elif action == 'exclude':
|
||||
for pattern in patterns:
|
||||
self._exclude_pattern(pattern, anchor=True)
|
||||
|
||||
elif action == 'global-include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, anchor=False):
|
||||
logger.warning('no files found matching %r '
|
||||
'anywhere in distribution', pattern)
|
||||
|
||||
elif action == 'global-exclude':
|
||||
for pattern in patterns:
|
||||
self._exclude_pattern(pattern, anchor=False)
|
||||
|
||||
elif action == 'recursive-include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, prefix=thedir):
|
||||
logger.warning('no files found matching %r '
|
||||
'under directory %r', pattern, thedir)
|
||||
|
||||
elif action == 'recursive-exclude':
|
||||
for pattern in patterns:
|
||||
self._exclude_pattern(pattern, prefix=thedir)
|
||||
|
||||
elif action == 'graft':
|
||||
if not self._include_pattern(None, prefix=dirpattern):
|
||||
logger.warning('no directories found matching %r',
|
||||
dirpattern)
|
||||
|
||||
elif action == 'prune':
|
||||
if not self._exclude_pattern(None, prefix=dirpattern):
|
||||
logger.warning('no previously-included directories found '
|
||||
'matching %r', dirpattern)
|
||||
else: # pragma: no cover
|
||||
# This should never happen, as it should be caught in
|
||||
# _parse_template_line
|
||||
raise DistlibException(
|
||||
'invalid action %r' % action)
|
||||
|
||||
#
|
||||
# Private API
|
||||
#
|
||||
|
||||
def _parse_directive(self, directive):
|
||||
"""
|
||||
Validate a directive.
|
||||
:param directive: The directive to validate.
|
||||
:return: A tuple of action, patterns, thedir, dir_patterns
|
||||
"""
|
||||
words = directive.split()
|
||||
if len(words) == 1 and words[0] not in ('include', 'exclude',
|
||||
'global-include',
|
||||
'global-exclude',
|
||||
'recursive-include',
|
||||
'recursive-exclude',
|
||||
'graft', 'prune'):
|
||||
# no action given, let's use the default 'include'
|
||||
words.insert(0, 'include')
|
||||
|
||||
action = words[0]
|
||||
patterns = thedir = dir_pattern = None
|
||||
|
||||
if action in ('include', 'exclude',
|
||||
'global-include', 'global-exclude'):
|
||||
if len(words) < 2:
|
||||
raise DistlibException(
|
||||
'%r expects <pattern1> <pattern2> ...' % action)
|
||||
|
||||
patterns = [convert_path(word) for word in words[1:]]
|
||||
|
||||
elif action in ('recursive-include', 'recursive-exclude'):
|
||||
if len(words) < 3:
|
||||
raise DistlibException(
|
||||
'%r expects <dir> <pattern1> <pattern2> ...' % action)
|
||||
|
||||
thedir = convert_path(words[1])
|
||||
patterns = [convert_path(word) for word in words[2:]]
|
||||
|
||||
elif action in ('graft', 'prune'):
|
||||
if len(words) != 2:
|
||||
raise DistlibException(
|
||||
'%r expects a single <dir_pattern>' % action)
|
||||
|
||||
dir_pattern = convert_path(words[1])
|
||||
|
||||
else:
|
||||
raise DistlibException('unknown action %r' % action)
|
||||
|
||||
return action, patterns, thedir, dir_pattern
|
||||
|
||||
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
||||
is_regex=False):
|
||||
"""Select strings (presumably filenames) from 'self.files' that
|
||||
match 'pattern', a Unix-style wildcard (glob) pattern.
|
||||
|
||||
Patterns are not quite the same as implemented by the 'fnmatch'
|
||||
module: '*' and '?' match non-special characters, where "special"
|
||||
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
||||
DOS/Windows; and colon on Mac OS.
|
||||
|
||||
If 'anchor' is true (the default), then the pattern match is more
|
||||
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
||||
'anchor' is false, both of these will match.
|
||||
|
||||
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
||||
(itself a pattern) and ending with 'pattern', with anything in between
|
||||
them, will match. 'anchor' is ignored in this case.
|
||||
|
||||
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
||||
'pattern' is assumed to be either a string containing a regex or a
|
||||
regex object -- no translation is done, the regex is just compiled
|
||||
and used as-is.
|
||||
|
||||
Selected strings will be added to self.files.
|
||||
|
||||
Return True if files are found.
|
||||
"""
|
||||
# XXX docstring lying about what the special chars are?
|
||||
found = False
|
||||
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
|
||||
# delayed loading of allfiles list
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
|
||||
for name in self.allfiles:
|
||||
if pattern_re.search(name):
|
||||
self.files.add(name)
|
||||
found = True
|
||||
return found
|
||||
|
||||
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
|
||||
is_regex=False):
|
||||
"""Remove strings (presumably filenames) from 'files' that match
|
||||
'pattern'.
|
||||
|
||||
Other parameters are the same as for 'include_pattern()', above.
|
||||
The list 'self.files' is modified in place. Return True if files are
|
||||
found.
|
||||
|
||||
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
|
||||
packaging source distributions
|
||||
"""
|
||||
found = False
|
||||
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
for f in list(self.files):
|
||||
if pattern_re.search(f):
|
||||
self.files.remove(f)
|
||||
found = True
|
||||
return found
|
||||
|
||||
def _translate_pattern(self, pattern, anchor=True, prefix=None,
|
||||
is_regex=False):
|
||||
"""Translate a shell-like wildcard pattern to a compiled regular
|
||||
expression.
|
||||
|
||||
Return the compiled regex. If 'is_regex' true,
|
||||
then 'pattern' is directly compiled to a regex (if it's a string)
|
||||
or just returned as-is (assumes it's a regex object).
|
||||
"""
|
||||
if is_regex:
|
||||
if isinstance(pattern, str):
|
||||
return re.compile(pattern)
|
||||
else:
|
||||
return pattern
|
||||
|
||||
if _PYTHON_VERSION > (3, 2):
|
||||
# ditch start and end characters
|
||||
start, _, end = self._glob_to_re('_').partition('_')
|
||||
|
||||
if pattern:
|
||||
pattern_re = self._glob_to_re(pattern)
|
||||
if _PYTHON_VERSION > (3, 2):
|
||||
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
||||
else:
|
||||
pattern_re = ''
|
||||
|
||||
base = re.escape(os.path.join(self.base, ''))
|
||||
if prefix is not None:
|
||||
# ditch end of pattern character
|
||||
if _PYTHON_VERSION <= (3, 2):
|
||||
empty_pattern = self._glob_to_re('')
|
||||
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
|
||||
else:
|
||||
prefix_re = self._glob_to_re(prefix)
|
||||
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
||||
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
||||
sep = os.sep
|
||||
if os.sep == '\\':
|
||||
sep = r'\\'
|
||||
if _PYTHON_VERSION <= (3, 2):
|
||||
pattern_re = '^' + base + sep.join((prefix_re,
|
||||
'.*' + pattern_re))
|
||||
else:
|
||||
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
||||
pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
|
||||
pattern_re, end)
|
||||
else: # no prefix -- respect anchor flag
|
||||
if anchor:
|
||||
if _PYTHON_VERSION <= (3, 2):
|
||||
pattern_re = '^' + base + pattern_re
|
||||
else:
|
||||
pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
|
||||
|
||||
return re.compile(pattern_re)
|
||||
|
||||
def _glob_to_re(self, pattern):
|
||||
"""Translate a shell-like glob pattern to a regular expression.
|
||||
|
||||
Return a string containing the regex. Differs from
|
||||
'fnmatch.translate()' in that '*' does not match "special characters"
|
||||
(which are platform-specific).
|
||||
"""
|
||||
pattern_re = fnmatch.translate(pattern)
|
||||
|
||||
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
||||
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
||||
# and by extension they shouldn't match such "special characters" under
|
||||
# any OS. So change all non-escaped dots in the RE to match any
|
||||
# character except the special characters (currently: just os.sep).
|
||||
sep = os.sep
|
||||
if os.sep == '\\':
|
||||
# we're using a regex to manipulate a regex, so we need
|
||||
# to escape the backslash twice
|
||||
sep = r'\\\\'
|
||||
escaped = r'\1[^%s]' % sep
|
||||
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
||||
return pattern_re
|
162
site-packages/distlib/markers.py
Executable file
162
site-packages/distlib/markers.py
Executable file
@ -0,0 +1,162 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2012-2023 Vinay Sajip.
|
||||
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
"""
|
||||
Parser for the environment markers micro-language defined in PEP 508.
|
||||
"""
|
||||
|
||||
# Note: In PEP 345, the micro-language was Python compatible, so the ast
|
||||
# module could be used to parse it. However, PEP 508 introduced operators such
|
||||
# as ~= and === which aren't in Python, necessitating a different approach.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import platform
|
||||
|
||||
from .compat import string_types
|
||||
from .util import in_venv, parse_marker
|
||||
from .version import LegacyVersion as LV
|
||||
|
||||
__all__ = ['interpret']
|
||||
|
||||
_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")')
|
||||
_VERSION_MARKERS = {'python_version', 'python_full_version'}
|
||||
|
||||
|
||||
def _is_version_marker(s):
|
||||
return isinstance(s, string_types) and s in _VERSION_MARKERS
|
||||
|
||||
|
||||
def _is_literal(o):
|
||||
if not isinstance(o, string_types) or not o:
|
||||
return False
|
||||
return o[0] in '\'"'
|
||||
|
||||
|
||||
def _get_versions(s):
|
||||
return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)}
|
||||
|
||||
|
||||
class Evaluator(object):
|
||||
"""
|
||||
This class is used to evaluate marker expressions.
|
||||
"""
|
||||
|
||||
operations = {
|
||||
'==': lambda x, y: x == y,
|
||||
'===': lambda x, y: x == y,
|
||||
'~=': lambda x, y: x == y or x > y,
|
||||
'!=': lambda x, y: x != y,
|
||||
'<': lambda x, y: x < y,
|
||||
'<=': lambda x, y: x == y or x < y,
|
||||
'>': lambda x, y: x > y,
|
||||
'>=': lambda x, y: x == y or x > y,
|
||||
'and': lambda x, y: x and y,
|
||||
'or': lambda x, y: x or y,
|
||||
'in': lambda x, y: x in y,
|
||||
'not in': lambda x, y: x not in y,
|
||||
}
|
||||
|
||||
def evaluate(self, expr, context):
|
||||
"""
|
||||
Evaluate a marker expression returned by the :func:`parse_requirement`
|
||||
function in the specified context.
|
||||
"""
|
||||
if isinstance(expr, string_types):
|
||||
if expr[0] in '\'"':
|
||||
result = expr[1:-1]
|
||||
else:
|
||||
if expr not in context:
|
||||
raise SyntaxError('unknown variable: %s' % expr)
|
||||
result = context[expr]
|
||||
else:
|
||||
assert isinstance(expr, dict)
|
||||
op = expr['op']
|
||||
if op not in self.operations:
|
||||
raise NotImplementedError('op not implemented: %s' % op)
|
||||
elhs = expr['lhs']
|
||||
erhs = expr['rhs']
|
||||
if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
|
||||
raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
|
||||
|
||||
lhs = self.evaluate(elhs, context)
|
||||
rhs = self.evaluate(erhs, context)
|
||||
if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and
|
||||
op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
|
||||
lhs = LV(lhs)
|
||||
rhs = LV(rhs)
|
||||
elif _is_version_marker(elhs) and op in ('in', 'not in'):
|
||||
lhs = LV(lhs)
|
||||
rhs = _get_versions(rhs)
|
||||
result = self.operations[op](lhs, rhs)
|
||||
return result
|
||||
|
||||
|
||||
_DIGITS = re.compile(r'\d+\.\d+')
|
||||
|
||||
|
||||
def default_context():
|
||||
|
||||
def format_full_version(info):
|
||||
version = '%s.%s.%s' % (info.major, info.minor, info.micro)
|
||||
kind = info.releaselevel
|
||||
if kind != 'final':
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
if hasattr(sys, 'implementation'):
|
||||
implementation_version = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
else:
|
||||
implementation_version = '0'
|
||||
implementation_name = ''
|
||||
|
||||
ppv = platform.python_version()
|
||||
m = _DIGITS.match(ppv)
|
||||
pv = m.group(0)
|
||||
result = {
|
||||
'implementation_name': implementation_name,
|
||||
'implementation_version': implementation_version,
|
||||
'os_name': os.name,
|
||||
'platform_machine': platform.machine(),
|
||||
'platform_python_implementation': platform.python_implementation(),
|
||||
'platform_release': platform.release(),
|
||||
'platform_system': platform.system(),
|
||||
'platform_version': platform.version(),
|
||||
'platform_in_venv': str(in_venv()),
|
||||
'python_full_version': ppv,
|
||||
'python_version': pv,
|
||||
'sys_platform': sys.platform,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
DEFAULT_CONTEXT = default_context()
|
||||
del default_context
|
||||
|
||||
evaluator = Evaluator()
|
||||
|
||||
|
||||
def interpret(marker, execution_context=None):
|
||||
"""
|
||||
Interpret a marker and return a result depending on environment.
|
||||
|
||||
:param marker: The marker to interpret.
|
||||
:type marker: str
|
||||
:param execution_context: The context used for name lookup.
|
||||
:type execution_context: mapping
|
||||
"""
|
||||
try:
|
||||
expr, rest = parse_marker(marker)
|
||||
except Exception as e:
|
||||
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
|
||||
if rest and rest[0] != '#':
|
||||
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
|
||||
context = dict(DEFAULT_CONTEXT)
|
||||
if execution_context:
|
||||
context.update(execution_context)
|
||||
return evaluator.evaluate(expr, context)
|
1031
site-packages/distlib/metadata.py
Executable file
1031
site-packages/distlib/metadata.py
Executable file
File diff suppressed because it is too large
Load Diff
358
site-packages/distlib/resources.py
Executable file
358
site-packages/distlib/resources.py
Executable file
@ -0,0 +1,358 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2013-2017 Vinay Sajip.
|
||||
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import bisect
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import types
|
||||
import zipimport
|
||||
|
||||
from . import DistlibException
|
||||
from .util import cached_property, get_cache_base, Cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
cache = None # created when needed
|
||||
|
||||
|
||||
class ResourceCache(Cache):
|
||||
def __init__(self, base=None):
|
||||
if base is None:
|
||||
# Use native string to avoid issues on 2.x: see Python #20140.
|
||||
base = os.path.join(get_cache_base(), str('resource-cache'))
|
||||
super(ResourceCache, self).__init__(base)
|
||||
|
||||
def is_stale(self, resource, path):
|
||||
"""
|
||||
Is the cache stale for the given resource?
|
||||
|
||||
:param resource: The :class:`Resource` being cached.
|
||||
:param path: The path of the resource in the cache.
|
||||
:return: True if the cache is stale.
|
||||
"""
|
||||
# Cache invalidation is a hard problem :-)
|
||||
return True
|
||||
|
||||
def get(self, resource):
|
||||
"""
|
||||
Get a resource into the cache,
|
||||
|
||||
:param resource: A :class:`Resource` instance.
|
||||
:return: The pathname of the resource in the cache.
|
||||
"""
|
||||
prefix, path = resource.finder.get_cache_info(resource)
|
||||
if prefix is None:
|
||||
result = path
|
||||
else:
|
||||
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
|
||||
dirname = os.path.dirname(result)
|
||||
if not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
||||
if not os.path.exists(result):
|
||||
stale = True
|
||||
else:
|
||||
stale = self.is_stale(resource, path)
|
||||
if stale:
|
||||
# write the bytes of the resource to the cache location
|
||||
with open(result, 'wb') as f:
|
||||
f.write(resource.bytes)
|
||||
return result
|
||||
|
||||
|
||||
class ResourceBase(object):
|
||||
def __init__(self, finder, name):
|
||||
self.finder = finder
|
||||
self.name = name
|
||||
|
||||
|
||||
class Resource(ResourceBase):
|
||||
"""
|
||||
A class representing an in-package resource, such as a data file. This is
|
||||
not normally instantiated by user code, but rather by a
|
||||
:class:`ResourceFinder` which manages the resource.
|
||||
"""
|
||||
is_container = False # Backwards compatibility
|
||||
|
||||
def as_stream(self):
|
||||
"""
|
||||
Get the resource as a stream.
|
||||
|
||||
This is not a property to make it obvious that it returns a new stream
|
||||
each time.
|
||||
"""
|
||||
return self.finder.get_stream(self)
|
||||
|
||||
@cached_property
|
||||
def file_path(self):
|
||||
global cache
|
||||
if cache is None:
|
||||
cache = ResourceCache()
|
||||
return cache.get(self)
|
||||
|
||||
@cached_property
|
||||
def bytes(self):
|
||||
return self.finder.get_bytes(self)
|
||||
|
||||
@cached_property
|
||||
def size(self):
|
||||
return self.finder.get_size(self)
|
||||
|
||||
|
||||
class ResourceContainer(ResourceBase):
|
||||
is_container = True # Backwards compatibility
|
||||
|
||||
@cached_property
|
||||
def resources(self):
|
||||
return self.finder.get_resources(self)
|
||||
|
||||
|
||||
class ResourceFinder(object):
|
||||
"""
|
||||
Resource finder for file system resources.
|
||||
"""
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
skipped_extensions = ('.pyc', '.pyo', '.class')
|
||||
else:
|
||||
skipped_extensions = ('.pyc', '.pyo')
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.loader = getattr(module, '__loader__', None)
|
||||
self.base = os.path.dirname(getattr(module, '__file__', ''))
|
||||
|
||||
def _adjust_path(self, path):
|
||||
return os.path.realpath(path)
|
||||
|
||||
def _make_path(self, resource_name):
|
||||
# Issue #50: need to preserve type of path on Python 2.x
|
||||
# like os.path._get_sep
|
||||
if isinstance(resource_name, bytes): # should only happen on 2.x
|
||||
sep = b'/'
|
||||
else:
|
||||
sep = '/'
|
||||
parts = resource_name.split(sep)
|
||||
parts.insert(0, self.base)
|
||||
result = os.path.join(*parts)
|
||||
return self._adjust_path(result)
|
||||
|
||||
def _find(self, path):
|
||||
return os.path.exists(path)
|
||||
|
||||
def get_cache_info(self, resource):
|
||||
return None, resource.path
|
||||
|
||||
def find(self, resource_name):
|
||||
path = self._make_path(resource_name)
|
||||
if not self._find(path):
|
||||
result = None
|
||||
else:
|
||||
if self._is_directory(path):
|
||||
result = ResourceContainer(self, resource_name)
|
||||
else:
|
||||
result = Resource(self, resource_name)
|
||||
result.path = path
|
||||
return result
|
||||
|
||||
def get_stream(self, resource):
|
||||
return open(resource.path, 'rb')
|
||||
|
||||
def get_bytes(self, resource):
|
||||
with open(resource.path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
def get_size(self, resource):
|
||||
return os.path.getsize(resource.path)
|
||||
|
||||
def get_resources(self, resource):
|
||||
def allowed(f):
|
||||
return (f != '__pycache__' and not
|
||||
f.endswith(self.skipped_extensions))
|
||||
return set([f for f in os.listdir(resource.path) if allowed(f)])
|
||||
|
||||
def is_container(self, resource):
|
||||
return self._is_directory(resource.path)
|
||||
|
||||
_is_directory = staticmethod(os.path.isdir)
|
||||
|
||||
def iterator(self, resource_name):
|
||||
resource = self.find(resource_name)
|
||||
if resource is not None:
|
||||
todo = [resource]
|
||||
while todo:
|
||||
resource = todo.pop(0)
|
||||
yield resource
|
||||
if resource.is_container:
|
||||
rname = resource.name
|
||||
for name in resource.resources:
|
||||
if not rname:
|
||||
new_name = name
|
||||
else:
|
||||
new_name = '/'.join([rname, name])
|
||||
child = self.find(new_name)
|
||||
if child.is_container:
|
||||
todo.append(child)
|
||||
else:
|
||||
yield child
|
||||
|
||||
|
||||
class ZipResourceFinder(ResourceFinder):
|
||||
"""
|
||||
Resource finder for resources in .zip files.
|
||||
"""
|
||||
def __init__(self, module):
|
||||
super(ZipResourceFinder, self).__init__(module)
|
||||
archive = self.loader.archive
|
||||
self.prefix_len = 1 + len(archive)
|
||||
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
|
||||
if hasattr(self.loader, '_files'):
|
||||
self._files = self.loader._files
|
||||
else:
|
||||
self._files = zipimport._zip_directory_cache[archive]
|
||||
self.index = sorted(self._files)
|
||||
|
||||
def _adjust_path(self, path):
|
||||
return path
|
||||
|
||||
def _find(self, path):
|
||||
path = path[self.prefix_len:]
|
||||
if path in self._files:
|
||||
result = True
|
||||
else:
|
||||
if path and path[-1] != os.sep:
|
||||
path = path + os.sep
|
||||
i = bisect.bisect(self.index, path)
|
||||
try:
|
||||
result = self.index[i].startswith(path)
|
||||
except IndexError:
|
||||
result = False
|
||||
if not result:
|
||||
logger.debug('_find failed: %r %r', path, self.loader.prefix)
|
||||
else:
|
||||
logger.debug('_find worked: %r %r', path, self.loader.prefix)
|
||||
return result
|
||||
|
||||
def get_cache_info(self, resource):
|
||||
prefix = self.loader.archive
|
||||
path = resource.path[1 + len(prefix):]
|
||||
return prefix, path
|
||||
|
||||
def get_bytes(self, resource):
|
||||
return self.loader.get_data(resource.path)
|
||||
|
||||
def get_stream(self, resource):
|
||||
return io.BytesIO(self.get_bytes(resource))
|
||||
|
||||
def get_size(self, resource):
|
||||
path = resource.path[self.prefix_len:]
|
||||
return self._files[path][3]
|
||||
|
||||
def get_resources(self, resource):
|
||||
path = resource.path[self.prefix_len:]
|
||||
if path and path[-1] != os.sep:
|
||||
path += os.sep
|
||||
plen = len(path)
|
||||
result = set()
|
||||
i = bisect.bisect(self.index, path)
|
||||
while i < len(self.index):
|
||||
if not self.index[i].startswith(path):
|
||||
break
|
||||
s = self.index[i][plen:]
|
||||
result.add(s.split(os.sep, 1)[0]) # only immediate children
|
||||
i += 1
|
||||
return result
|
||||
|
||||
def _is_directory(self, path):
|
||||
path = path[self.prefix_len:]
|
||||
if path and path[-1] != os.sep:
|
||||
path += os.sep
|
||||
i = bisect.bisect(self.index, path)
|
||||
try:
|
||||
result = self.index[i].startswith(path)
|
||||
except IndexError:
|
||||
result = False
|
||||
return result
|
||||
|
||||
|
||||
_finder_registry = {
|
||||
type(None): ResourceFinder,
|
||||
zipimport.zipimporter: ZipResourceFinder
|
||||
}
|
||||
|
||||
try:
|
||||
# In Python 3.6, _frozen_importlib -> _frozen_importlib_external
|
||||
try:
|
||||
import _frozen_importlib_external as _fi
|
||||
except ImportError:
|
||||
import _frozen_importlib as _fi
|
||||
_finder_registry[_fi.SourceFileLoader] = ResourceFinder
|
||||
_finder_registry[_fi.FileFinder] = ResourceFinder
|
||||
# See issue #146
|
||||
_finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
|
||||
del _fi
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
|
||||
def register_finder(loader, finder_maker):
|
||||
_finder_registry[type(loader)] = finder_maker
|
||||
|
||||
|
||||
_finder_cache = {}
|
||||
|
||||
|
||||
def finder(package):
|
||||
"""
|
||||
Return a resource finder for a package.
|
||||
:param package: The name of the package.
|
||||
:return: A :class:`ResourceFinder` instance for the package.
|
||||
"""
|
||||
if package in _finder_cache:
|
||||
result = _finder_cache[package]
|
||||
else:
|
||||
if package not in sys.modules:
|
||||
__import__(package)
|
||||
module = sys.modules[package]
|
||||
path = getattr(module, '__path__', None)
|
||||
if path is None:
|
||||
raise DistlibException('You cannot get a finder for a module, '
|
||||
'only for a package')
|
||||
loader = getattr(module, '__loader__', None)
|
||||
finder_maker = _finder_registry.get(type(loader))
|
||||
if finder_maker is None:
|
||||
raise DistlibException('Unable to locate finder for %r' % package)
|
||||
result = finder_maker(module)
|
||||
_finder_cache[package] = result
|
||||
return result
|
||||
|
||||
|
||||
_dummy_module = types.ModuleType(str('__dummy__'))
|
||||
|
||||
|
||||
def finder_for_path(path):
|
||||
"""
|
||||
Return a resource finder for a path, which should represent a container.
|
||||
|
||||
:param path: The path.
|
||||
:return: A :class:`ResourceFinder` instance for the path.
|
||||
"""
|
||||
result = None
|
||||
# calls any path hooks, gets importer into cache
|
||||
pkgutil.get_importer(path)
|
||||
loader = sys.path_importer_cache.get(path)
|
||||
finder = _finder_registry.get(type(loader))
|
||||
if finder:
|
||||
module = _dummy_module
|
||||
module.__file__ = os.path.join(path, '')
|
||||
module.__loader__ = loader
|
||||
result = finder(module)
|
||||
return result
|
447
site-packages/distlib/scripts.py
Executable file
447
site-packages/distlib/scripts.py
Executable file
@ -0,0 +1,447 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2013-2023 Vinay Sajip.
|
||||
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
from io import BytesIO
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
from zipfile import ZipInfo
|
||||
|
||||
from .compat import sysconfig, detect_encoding, ZipFile
|
||||
from .resources import finder
|
||||
from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_DEFAULT_MANIFEST = '''
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%s"
|
||||
type="win32"/>
|
||||
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>'''.strip()
|
||||
|
||||
# check if Python is called on the first line with this expression
|
||||
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
|
||||
SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from %(module)s import %(import_name)s
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(%(func)s())
|
||||
'''
|
||||
|
||||
# Pre-fetch the contents of all executable wrapper stubs.
|
||||
# This is to address https://github.com/pypa/pip/issues/12666.
|
||||
# When updating pip, we rename the old pip in place before installing the
|
||||
# new version. If we try to fetch a wrapper *after* that rename, the finder
|
||||
# machinery will be confused as the package is no longer available at the
|
||||
# location where it was imported from. So we load everything into memory in
|
||||
# advance.
|
||||
|
||||
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):
|
||||
# Issue 31: don't hardcode an absolute package name, but
|
||||
# determine it relative to the current package
|
||||
DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0]
|
||||
|
||||
WRAPPERS = {
|
||||
r.name: r.bytes
|
||||
for r in finder(DISTLIB_PACKAGE).iterator("")
|
||||
if r.name.endswith(".exe")
|
||||
}
|
||||
|
||||
|
||||
def enquote_executable(executable):
|
||||
if ' ' in executable:
|
||||
# make sure we quote only the executable in case of env
|
||||
# for example /usr/bin/env "/dir with spaces/bin/jython"
|
||||
# instead of "/usr/bin/env /dir with spaces/bin/jython"
|
||||
# otherwise whole
|
||||
if executable.startswith('/usr/bin/env '):
|
||||
env, _executable = executable.split(' ', 1)
|
||||
if ' ' in _executable and not _executable.startswith('"'):
|
||||
executable = '%s "%s"' % (env, _executable)
|
||||
else:
|
||||
if not executable.startswith('"'):
|
||||
executable = '"%s"' % executable
|
||||
return executable
|
||||
|
||||
|
||||
# Keep the old name around (for now), as there is at least one project using it!
|
||||
_enquote_executable = enquote_executable
|
||||
|
||||
|
||||
class ScriptMaker(object):
|
||||
"""
|
||||
A class to copy or create scripts from source scripts or callable
|
||||
specifications.
|
||||
"""
|
||||
script_template = SCRIPT_TEMPLATE
|
||||
|
||||
executable = None # for shebangs
|
||||
|
||||
def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None):
|
||||
self.source_dir = source_dir
|
||||
self.target_dir = target_dir
|
||||
self.add_launchers = add_launchers
|
||||
self.force = False
|
||||
self.clobber = False
|
||||
# It only makes sense to set mode bits on POSIX.
|
||||
self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix')
|
||||
self.variants = set(('', 'X.Y'))
|
||||
self._fileop = fileop or FileOperator(dry_run)
|
||||
|
||||
self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt')
|
||||
self.version_info = sys.version_info
|
||||
|
||||
def _get_alternate_executable(self, executable, options):
|
||||
if options.get('gui', False) and self._is_nt: # pragma: no cover
|
||||
dn, fn = os.path.split(executable)
|
||||
fn = fn.replace('python', 'pythonw')
|
||||
executable = os.path.join(dn, fn)
|
||||
return executable
|
||||
|
||||
if sys.platform.startswith('java'): # pragma: no cover
|
||||
|
||||
def _is_shell(self, executable):
|
||||
"""
|
||||
Determine if the specified executable is a script
|
||||
(contains a #! line)
|
||||
"""
|
||||
try:
|
||||
with open(executable) as fp:
|
||||
return fp.read(2) == '#!'
|
||||
except (OSError, IOError):
|
||||
logger.warning('Failed to open %s', executable)
|
||||
return False
|
||||
|
||||
def _fix_jython_executable(self, executable):
|
||||
if self._is_shell(executable):
|
||||
# Workaround for Jython is not needed on Linux systems.
|
||||
import java
|
||||
|
||||
if java.lang.System.getProperty('os.name') == 'Linux':
|
||||
return executable
|
||||
elif executable.lower().endswith('jython.exe'):
|
||||
# Use wrapper exe for Jython on Windows
|
||||
return executable
|
||||
return '/usr/bin/env %s' % executable
|
||||
|
||||
def _build_shebang(self, executable, post_interp):
|
||||
"""
|
||||
Build a shebang line. In the simple case (on Windows, or a shebang line
|
||||
which is not too long or contains spaces) use a simple formulation for
|
||||
the shebang. Otherwise, use /bin/sh as the executable, with a contrived
|
||||
shebang which allows the script to run either under Python or sh, using
|
||||
suitable quoting. Thanks to Harald Nordgren for his input.
|
||||
|
||||
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
|
||||
https://hg.mozilla.org/mozilla-central/file/tip/mach
|
||||
"""
|
||||
if os.name != 'posix':
|
||||
simple_shebang = True
|
||||
elif getattr(sys, "cross_compiling", False):
|
||||
# In a cross-compiling environment, the shebang will likely be a
|
||||
# script; this *must* be invoked with the "safe" version of the
|
||||
# shebang, or else using os.exec() to run the entry script will
|
||||
# fail, raising "OSError 8 [Errno 8] Exec format error".
|
||||
simple_shebang = False
|
||||
else:
|
||||
# Add 3 for '#!' prefix and newline suffix.
|
||||
shebang_length = len(executable) + len(post_interp) + 3
|
||||
if sys.platform == 'darwin':
|
||||
max_shebang_length = 512
|
||||
else:
|
||||
max_shebang_length = 127
|
||||
simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length))
|
||||
|
||||
if simple_shebang:
|
||||
result = b'#!' + executable + post_interp + b'\n'
|
||||
else:
|
||||
result = b'#!/bin/sh\n'
|
||||
result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
|
||||
result += b"' '''\n"
|
||||
return result
|
||||
|
||||
def _get_shebang(self, encoding, post_interp=b'', options=None):
|
||||
enquote = True
|
||||
if self.executable:
|
||||
executable = self.executable
|
||||
enquote = False # assume this will be taken care of
|
||||
elif not sysconfig.is_python_build():
|
||||
executable = get_executable()
|
||||
elif in_venv(): # pragma: no cover
|
||||
executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE'))
|
||||
else: # pragma: no cover
|
||||
if os.name == 'nt':
|
||||
# for Python builds from source on Windows, no Python executables with
|
||||
# a version suffix are created, so we use python.exe
|
||||
executable = os.path.join(sysconfig.get_config_var('BINDIR'),
|
||||
'python%s' % (sysconfig.get_config_var('EXE')))
|
||||
else:
|
||||
executable = os.path.join(
|
||||
sysconfig.get_config_var('BINDIR'),
|
||||
'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE')))
|
||||
if options:
|
||||
executable = self._get_alternate_executable(executable, options)
|
||||
|
||||
if sys.platform.startswith('java'): # pragma: no cover
|
||||
executable = self._fix_jython_executable(executable)
|
||||
|
||||
# Normalise case for Windows - COMMENTED OUT
|
||||
# executable = os.path.normcase(executable)
|
||||
# N.B. The normalising operation above has been commented out: See
|
||||
# issue #124. Although paths in Windows are generally case-insensitive,
|
||||
# they aren't always. For example, a path containing a ẞ (which is a
|
||||
# LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
|
||||
# LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
|
||||
# Windows as equivalent in path names.
|
||||
|
||||
# If the user didn't specify an executable, it may be necessary to
|
||||
# cater for executable paths with spaces (not uncommon on Windows)
|
||||
if enquote:
|
||||
executable = enquote_executable(executable)
|
||||
# Issue #51: don't use fsencode, since we later try to
|
||||
# check that the shebang is decodable using utf-8.
|
||||
executable = executable.encode('utf-8')
|
||||
# in case of IronPython, play safe and enable frames support
|
||||
if (sys.platform == 'cli' and '-X:Frames' not in post_interp and
|
||||
'-X:FullFrames' not in post_interp): # pragma: no cover
|
||||
post_interp += b' -X:Frames'
|
||||
shebang = self._build_shebang(executable, post_interp)
|
||||
# Python parser starts to read a script using UTF-8 until
|
||||
# it gets a #coding:xxx cookie. The shebang has to be the
|
||||
# first line of a file, the #coding:xxx cookie cannot be
|
||||
# written before. So the shebang has to be decodable from
|
||||
# UTF-8.
|
||||
try:
|
||||
shebang.decode('utf-8')
|
||||
except UnicodeDecodeError: # pragma: no cover
|
||||
raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang)
|
||||
# If the script is encoded to a custom encoding (use a
|
||||
# #coding:xxx cookie), the shebang has to be decodable from
|
||||
# the script encoding too.
|
||||
if encoding != 'utf-8':
|
||||
try:
|
||||
shebang.decode(encoding)
|
||||
except UnicodeDecodeError: # pragma: no cover
|
||||
raise ValueError('The shebang (%r) is not decodable '
|
||||
'from the script encoding (%r)' % (shebang, encoding))
|
||||
return shebang
|
||||
|
||||
def _get_script_text(self, entry):
|
||||
return self.script_template % dict(
|
||||
module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix)
|
||||
|
||||
manifest = _DEFAULT_MANIFEST
|
||||
|
||||
def get_manifest(self, exename):
|
||||
base = os.path.basename(exename)
|
||||
return self.manifest % base
|
||||
|
||||
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
||||
use_launcher = self.add_launchers and self._is_nt
|
||||
if not use_launcher:
|
||||
script_bytes = shebang + script_bytes
|
||||
else: # pragma: no cover
|
||||
if ext == 'py':
|
||||
launcher = self._get_launcher('t')
|
||||
else:
|
||||
launcher = self._get_launcher('w')
|
||||
stream = BytesIO()
|
||||
with ZipFile(stream, 'w') as zf:
|
||||
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
if source_date_epoch:
|
||||
date_time = time.gmtime(int(source_date_epoch))[:6]
|
||||
zinfo = ZipInfo(filename='__main__.py', date_time=date_time)
|
||||
zf.writestr(zinfo, script_bytes)
|
||||
else:
|
||||
zf.writestr('__main__.py', script_bytes)
|
||||
zip_data = stream.getvalue()
|
||||
script_bytes = launcher + shebang + zip_data
|
||||
for name in names:
|
||||
outname = os.path.join(self.target_dir, name)
|
||||
if use_launcher: # pragma: no cover
|
||||
n, e = os.path.splitext(outname)
|
||||
if e.startswith('.py'):
|
||||
outname = n
|
||||
outname = '%s.exe' % outname
|
||||
try:
|
||||
self._fileop.write_binary_file(outname, script_bytes)
|
||||
except Exception:
|
||||
# Failed writing an executable - it might be in use.
|
||||
logger.warning('Failed to write executable - trying to '
|
||||
'use .deleteme logic')
|
||||
dfname = '%s.deleteme' % outname
|
||||
if os.path.exists(dfname):
|
||||
os.remove(dfname) # Not allowed to fail here
|
||||
os.rename(outname, dfname) # nor here
|
||||
self._fileop.write_binary_file(outname, script_bytes)
|
||||
logger.debug('Able to replace executable using '
|
||||
'.deleteme logic')
|
||||
try:
|
||||
os.remove(dfname)
|
||||
except Exception:
|
||||
pass # still in use - ignore error
|
||||
else:
|
||||
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
|
||||
outname = '%s.%s' % (outname, ext)
|
||||
if os.path.exists(outname) and not self.clobber:
|
||||
logger.warning('Skipping existing file %s', outname)
|
||||
continue
|
||||
self._fileop.write_binary_file(outname, script_bytes)
|
||||
if self.set_mode:
|
||||
self._fileop.set_executable_mode([outname])
|
||||
filenames.append(outname)
|
||||
|
||||
variant_separator = '-'
|
||||
|
||||
def get_script_filenames(self, name):
|
||||
result = set()
|
||||
if '' in self.variants:
|
||||
result.add(name)
|
||||
if 'X' in self.variants:
|
||||
result.add('%s%s' % (name, self.version_info[0]))
|
||||
if 'X.Y' in self.variants:
|
||||
result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1]))
|
||||
return result
|
||||
|
||||
def _make_script(self, entry, filenames, options=None):
|
||||
post_interp = b''
|
||||
if options:
|
||||
args = options.get('interpreter_args', [])
|
||||
if args:
|
||||
args = ' %s' % ' '.join(args)
|
||||
post_interp = args.encode('utf-8')
|
||||
shebang = self._get_shebang('utf-8', post_interp, options=options)
|
||||
script = self._get_script_text(entry).encode('utf-8')
|
||||
scriptnames = self.get_script_filenames(entry.name)
|
||||
if options and options.get('gui', False):
|
||||
ext = 'pyw'
|
||||
else:
|
||||
ext = 'py'
|
||||
self._write_script(scriptnames, shebang, script, filenames, ext)
|
||||
|
||||
def _copy_script(self, script, filenames):
|
||||
adjust = False
|
||||
script = os.path.join(self.source_dir, convert_path(script))
|
||||
outname = os.path.join(self.target_dir, os.path.basename(script))
|
||||
if not self.force and not self._fileop.newer(script, outname):
|
||||
logger.debug('not copying %s (up-to-date)', script)
|
||||
return
|
||||
|
||||
# Always open the file, but ignore failures in dry-run mode --
|
||||
# that way, we'll get accurate feedback if we can read the
|
||||
# script.
|
||||
try:
|
||||
f = open(script, 'rb')
|
||||
except IOError: # pragma: no cover
|
||||
if not self.dry_run:
|
||||
raise
|
||||
f = None
|
||||
else:
|
||||
first_line = f.readline()
|
||||
if not first_line: # pragma: no cover
|
||||
logger.warning('%s is an empty file (skipping)', script)
|
||||
return
|
||||
|
||||
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
|
||||
if match:
|
||||
adjust = True
|
||||
post_interp = match.group(1) or b''
|
||||
|
||||
if not adjust:
|
||||
if f:
|
||||
f.close()
|
||||
self._fileop.copy_file(script, outname)
|
||||
if self.set_mode:
|
||||
self._fileop.set_executable_mode([outname])
|
||||
filenames.append(outname)
|
||||
else:
|
||||
logger.info('copying and adjusting %s -> %s', script, self.target_dir)
|
||||
if not self._fileop.dry_run:
|
||||
encoding, lines = detect_encoding(f.readline)
|
||||
f.seek(0)
|
||||
shebang = self._get_shebang(encoding, post_interp)
|
||||
if b'pythonw' in first_line: # pragma: no cover
|
||||
ext = 'pyw'
|
||||
else:
|
||||
ext = 'py'
|
||||
n = os.path.basename(outname)
|
||||
self._write_script([n], shebang, f.read(), filenames, ext)
|
||||
if f:
|
||||
f.close()
|
||||
|
||||
@property
|
||||
def dry_run(self):
|
||||
return self._fileop.dry_run
|
||||
|
||||
@dry_run.setter
|
||||
def dry_run(self, value):
|
||||
self._fileop.dry_run = value
|
||||
|
||||
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
|
||||
# Executable launcher support.
|
||||
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
|
||||
|
||||
def _get_launcher(self, kind):
|
||||
if struct.calcsize('P') == 8: # 64-bit
|
||||
bits = '64'
|
||||
else:
|
||||
bits = '32'
|
||||
platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
|
||||
name = '%s%s%s.exe' % (kind, bits, platform_suffix)
|
||||
if name not in WRAPPERS:
|
||||
msg = ('Unable to find resource %s in package %s' %
|
||||
(name, DISTLIB_PACKAGE))
|
||||
raise ValueError(msg)
|
||||
return WRAPPERS[name]
|
||||
|
||||
# Public API follows
|
||||
|
||||
def make(self, specification, options=None):
|
||||
"""
|
||||
Make a script.
|
||||
|
||||
:param specification: The specification, which is either a valid export
|
||||
entry specification (to make a script from a
|
||||
callable) or a filename (to make a script by
|
||||
copying from a source location).
|
||||
:param options: A dictionary of options controlling script generation.
|
||||
:return: A list of all absolute pathnames written to.
|
||||
"""
|
||||
filenames = []
|
||||
entry = get_export_entry(specification)
|
||||
if entry is None:
|
||||
self._copy_script(specification, filenames)
|
||||
else:
|
||||
self._make_script(entry, filenames, options=options)
|
||||
return filenames
|
||||
|
||||
def make_multiple(self, specifications, options=None):
|
||||
"""
|
||||
Take a list of specifications and make scripts from them,
|
||||
:param specifications: A list of specifications.
|
||||
:return: A list of all absolute pathnames written to,
|
||||
"""
|
||||
filenames = []
|
||||
for specification in specifications:
|
||||
filenames.extend(self.make(specification, options))
|
||||
return filenames
|
BIN
site-packages/distlib/t32.exe
Executable file
BIN
site-packages/distlib/t32.exe
Executable file
Binary file not shown.
BIN
site-packages/distlib/t64-arm.exe
Executable file
BIN
site-packages/distlib/t64-arm.exe
Executable file
Binary file not shown.
BIN
site-packages/distlib/t64.exe
Executable file
BIN
site-packages/distlib/t64.exe
Executable file
Binary file not shown.
1984
site-packages/distlib/util.py
Executable file
1984
site-packages/distlib/util.py
Executable file
File diff suppressed because it is too large
Load Diff
750
site-packages/distlib/version.py
Executable file
750
site-packages/distlib/version.py
Executable file
@ -0,0 +1,750 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2012-2023 The Python Software Foundation.
|
||||
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||
#
|
||||
"""
|
||||
Implementation of a flexible versioning scheme providing support for PEP-440,
|
||||
setuptools-compatible and semantic versioning.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from .compat import string_types
|
||||
from .util import parse_requirement
|
||||
|
||||
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
|
||||
'LegacyVersion', 'LegacyMatcher',
|
||||
'SemanticVersion', 'SemanticMatcher',
|
||||
'UnsupportedVersionError', 'get_scheme']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UnsupportedVersionError(ValueError):
|
||||
"""This is an unsupported version."""
|
||||
pass
|
||||
|
||||
|
||||
class Version(object):
|
||||
def __init__(self, s):
|
||||
self._string = s = s.strip()
|
||||
self._parts = parts = self.parse(s)
|
||||
assert isinstance(parts, tuple)
|
||||
assert len(parts) > 0
|
||||
|
||||
def parse(self, s):
|
||||
raise NotImplementedError('please implement in a subclass')
|
||||
|
||||
def _check_compatible(self, other):
|
||||
if type(self) != type(other):
|
||||
raise TypeError('cannot compare %r and %r' % (self, other))
|
||||
|
||||
def __eq__(self, other):
|
||||
self._check_compatible(other)
|
||||
return self._parts == other._parts
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
self._check_compatible(other)
|
||||
return self._parts < other._parts
|
||||
|
||||
def __gt__(self, other):
|
||||
return not (self.__lt__(other) or self.__eq__(other))
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__lt__(other) or self.__eq__(other)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__gt__(other) or self.__eq__(other)
|
||||
|
||||
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||
def __hash__(self):
|
||||
return hash(self._parts)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s('%s')" % (self.__class__.__name__, self._string)
|
||||
|
||||
def __str__(self):
|
||||
return self._string
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
raise NotImplementedError('Please implement in subclasses.')
|
||||
|
||||
|
||||
class Matcher(object):
|
||||
version_class = None
|
||||
|
||||
# value is either a callable or the name of a method
|
||||
_operators = {
|
||||
'<': lambda v, c, p: v < c,
|
||||
'>': lambda v, c, p: v > c,
|
||||
'<=': lambda v, c, p: v == c or v < c,
|
||||
'>=': lambda v, c, p: v == c or v > c,
|
||||
'==': lambda v, c, p: v == c,
|
||||
'===': lambda v, c, p: v == c,
|
||||
# by default, compatible => >=.
|
||||
'~=': lambda v, c, p: v == c or v > c,
|
||||
'!=': lambda v, c, p: v != c,
|
||||
}
|
||||
|
||||
# this is a method only to support alternative implementations
|
||||
# via overriding
|
||||
def parse_requirement(self, s):
|
||||
return parse_requirement(s)
|
||||
|
||||
def __init__(self, s):
|
||||
if self.version_class is None:
|
||||
raise ValueError('Please specify a version class')
|
||||
self._string = s = s.strip()
|
||||
r = self.parse_requirement(s)
|
||||
if not r:
|
||||
raise ValueError('Not valid: %r' % s)
|
||||
self.name = r.name
|
||||
self.key = self.name.lower() # for case-insensitive comparisons
|
||||
clist = []
|
||||
if r.constraints:
|
||||
# import pdb; pdb.set_trace()
|
||||
for op, s in r.constraints:
|
||||
if s.endswith('.*'):
|
||||
if op not in ('==', '!='):
|
||||
raise ValueError('\'.*\' not allowed for '
|
||||
'%r constraints' % op)
|
||||
# Could be a partial version (e.g. for '2.*') which
|
||||
# won't parse as a version, so keep it as a string
|
||||
vn, prefix = s[:-2], True
|
||||
# Just to check that vn is a valid version
|
||||
self.version_class(vn)
|
||||
else:
|
||||
# Should parse as a version, so we can create an
|
||||
# instance for the comparison
|
||||
vn, prefix = self.version_class(s), False
|
||||
clist.append((op, vn, prefix))
|
||||
self._parts = tuple(clist)
|
||||
|
||||
def match(self, version):
|
||||
"""
|
||||
Check if the provided version matches the constraints.
|
||||
|
||||
:param version: The version to match against this instance.
|
||||
:type version: String or :class:`Version` instance.
|
||||
"""
|
||||
if isinstance(version, string_types):
|
||||
version = self.version_class(version)
|
||||
for operator, constraint, prefix in self._parts:
|
||||
f = self._operators.get(operator)
|
||||
if isinstance(f, string_types):
|
||||
f = getattr(self, f)
|
||||
if not f:
|
||||
msg = ('%r not implemented '
|
||||
'for %s' % (operator, self.__class__.__name__))
|
||||
raise NotImplementedError(msg)
|
||||
if not f(version, constraint, prefix):
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def exact_version(self):
|
||||
result = None
|
||||
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
|
||||
result = self._parts[0][1]
|
||||
return result
|
||||
|
||||
def _check_compatible(self, other):
|
||||
if type(self) != type(other) or self.name != other.name:
|
||||
raise TypeError('cannot compare %s and %s' % (self, other))
|
||||
|
||||
def __eq__(self, other):
|
||||
self._check_compatible(other)
|
||||
return self.key == other.key and self._parts == other._parts
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||
def __hash__(self):
|
||||
return hash(self.key) + hash(self._parts)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self._string)
|
||||
|
||||
def __str__(self):
|
||||
return self._string
|
||||
|
||||
|
||||
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?'
|
||||
r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?'
|
||||
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I)
|
||||
|
||||
|
||||
def _pep_440_key(s):
|
||||
s = s.strip()
|
||||
m = PEP440_VERSION_RE.match(s)
|
||||
if not m:
|
||||
raise UnsupportedVersionError('Not a valid version: %s' % s)
|
||||
groups = m.groups()
|
||||
nums = tuple(int(v) for v in groups[1].split('.'))
|
||||
while len(nums) > 1 and nums[-1] == 0:
|
||||
nums = nums[:-1]
|
||||
|
||||
if not groups[0]:
|
||||
epoch = 0
|
||||
else:
|
||||
epoch = int(groups[0][:-1])
|
||||
pre = groups[4:6]
|
||||
post = groups[7:9]
|
||||
dev = groups[10:12]
|
||||
local = groups[13]
|
||||
if pre == (None, None):
|
||||
pre = ()
|
||||
else:
|
||||
if pre[1] is None:
|
||||
pre = pre[0], 0
|
||||
else:
|
||||
pre = pre[0], int(pre[1])
|
||||
if post == (None, None):
|
||||
post = ()
|
||||
else:
|
||||
if post[1] is None:
|
||||
post = post[0], 0
|
||||
else:
|
||||
post = post[0], int(post[1])
|
||||
if dev == (None, None):
|
||||
dev = ()
|
||||
else:
|
||||
if dev[1] is None:
|
||||
dev = dev[0], 0
|
||||
else:
|
||||
dev = dev[0], int(dev[1])
|
||||
if local is None:
|
||||
local = ()
|
||||
else:
|
||||
parts = []
|
||||
for part in local.split('.'):
|
||||
# to ensure that numeric compares as > lexicographic, avoid
|
||||
# comparing them directly, but encode a tuple which ensures
|
||||
# correct sorting
|
||||
if part.isdigit():
|
||||
part = (1, int(part))
|
||||
else:
|
||||
part = (0, part)
|
||||
parts.append(part)
|
||||
local = tuple(parts)
|
||||
if not pre:
|
||||
# either before pre-release, or final release and after
|
||||
if not post and dev:
|
||||
# before pre-release
|
||||
pre = ('a', -1) # to sort before a0
|
||||
else:
|
||||
pre = ('z',) # to sort after all pre-releases
|
||||
# now look at the state of post and dev.
|
||||
if not post:
|
||||
post = ('_',) # sort before 'a'
|
||||
if not dev:
|
||||
dev = ('final',)
|
||||
|
||||
return epoch, nums, pre, post, dev, local
|
||||
|
||||
|
||||
_normalized_key = _pep_440_key
|
||||
|
||||
|
||||
class NormalizedVersion(Version):
|
||||
"""A rational version.
|
||||
|
||||
Good:
|
||||
1.2 # equivalent to "1.2.0"
|
||||
1.2.0
|
||||
1.2a1
|
||||
1.2.3a2
|
||||
1.2.3b1
|
||||
1.2.3c1
|
||||
1.2.3.4
|
||||
TODO: fill this out
|
||||
|
||||
Bad:
|
||||
1 # minimum two numbers
|
||||
1.2a # release level must have a release serial
|
||||
1.2.3b
|
||||
"""
|
||||
def parse(self, s):
|
||||
result = _normalized_key(s)
|
||||
# _normalized_key loses trailing zeroes in the release
|
||||
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
|
||||
# However, PEP 440 prefix matching needs it: for example,
|
||||
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
|
||||
m = PEP440_VERSION_RE.match(s) # must succeed
|
||||
groups = m.groups()
|
||||
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
|
||||
return result
|
||||
|
||||
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
|
||||
|
||||
|
||||
def _match_prefix(x, y):
|
||||
x = str(x)
|
||||
y = str(y)
|
||||
if x == y:
|
||||
return True
|
||||
if not x.startswith(y):
|
||||
return False
|
||||
n = len(y)
|
||||
return x[n] == '.'
|
||||
|
||||
|
||||
class NormalizedMatcher(Matcher):
|
||||
version_class = NormalizedVersion
|
||||
|
||||
# value is either a callable or the name of a method
|
||||
_operators = {
|
||||
'~=': '_match_compatible',
|
||||
'<': '_match_lt',
|
||||
'>': '_match_gt',
|
||||
'<=': '_match_le',
|
||||
'>=': '_match_ge',
|
||||
'==': '_match_eq',
|
||||
'===': '_match_arbitrary',
|
||||
'!=': '_match_ne',
|
||||
}
|
||||
|
||||
def _adjust_local(self, version, constraint, prefix):
|
||||
if prefix:
|
||||
strip_local = '+' not in constraint and version._parts[-1]
|
||||
else:
|
||||
# both constraint and version are
|
||||
# NormalizedVersion instances.
|
||||
# If constraint does not have a local component,
|
||||
# ensure the version doesn't, either.
|
||||
strip_local = not constraint._parts[-1] and version._parts[-1]
|
||||
if strip_local:
|
||||
s = version._string.split('+', 1)[0]
|
||||
version = self.version_class(s)
|
||||
return version, constraint
|
||||
|
||||
def _match_lt(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
if version >= constraint:
|
||||
return False
|
||||
release_clause = constraint._release_clause
|
||||
pfx = '.'.join([str(i) for i in release_clause])
|
||||
return not _match_prefix(version, pfx)
|
||||
|
||||
def _match_gt(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
if version <= constraint:
|
||||
return False
|
||||
release_clause = constraint._release_clause
|
||||
pfx = '.'.join([str(i) for i in release_clause])
|
||||
return not _match_prefix(version, pfx)
|
||||
|
||||
def _match_le(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
return version <= constraint
|
||||
|
||||
def _match_ge(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
return version >= constraint
|
||||
|
||||
def _match_eq(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
if not prefix:
|
||||
result = (version == constraint)
|
||||
else:
|
||||
result = _match_prefix(version, constraint)
|
||||
return result
|
||||
|
||||
def _match_arbitrary(self, version, constraint, prefix):
|
||||
return str(version) == str(constraint)
|
||||
|
||||
def _match_ne(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
if not prefix:
|
||||
result = (version != constraint)
|
||||
else:
|
||||
result = not _match_prefix(version, constraint)
|
||||
return result
|
||||
|
||||
def _match_compatible(self, version, constraint, prefix):
|
||||
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||
if version == constraint:
|
||||
return True
|
||||
if version < constraint:
|
||||
return False
|
||||
# if not prefix:
|
||||
# return True
|
||||
release_clause = constraint._release_clause
|
||||
if len(release_clause) > 1:
|
||||
release_clause = release_clause[:-1]
|
||||
pfx = '.'.join([str(i) for i in release_clause])
|
||||
return _match_prefix(version, pfx)
|
||||
|
||||
|
||||
_REPLACEMENTS = (
|
||||
(re.compile('[.+-]$'), ''), # remove trailing puncts
|
||||
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
|
||||
(re.compile('^[.-]'), ''), # remove leading puncts
|
||||
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
|
||||
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
||||
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
||||
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
||||
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
|
||||
(re.compile(r'\b(pre-alpha|prealpha)\b'),
|
||||
'pre.alpha'), # standardise
|
||||
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
|
||||
)
|
||||
|
||||
_SUFFIX_REPLACEMENTS = (
|
||||
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
|
||||
(re.compile('[,*")([\\]]'), ''), # remove unwanted chars
|
||||
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
|
||||
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
||||
(re.compile(r'\.$'), ''), # trailing '.'
|
||||
)
|
||||
|
||||
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
|
||||
|
||||
|
||||
def _suggest_semantic_version(s):
|
||||
"""
|
||||
Try to suggest a semantic form for a version for which
|
||||
_suggest_normalized_version couldn't come up with anything.
|
||||
"""
|
||||
result = s.strip().lower()
|
||||
for pat, repl in _REPLACEMENTS:
|
||||
result = pat.sub(repl, result)
|
||||
if not result:
|
||||
result = '0.0.0'
|
||||
|
||||
# Now look for numeric prefix, and separate it out from
|
||||
# the rest.
|
||||
# import pdb; pdb.set_trace()
|
||||
m = _NUMERIC_PREFIX.match(result)
|
||||
if not m:
|
||||
prefix = '0.0.0'
|
||||
suffix = result
|
||||
else:
|
||||
prefix = m.groups()[0].split('.')
|
||||
prefix = [int(i) for i in prefix]
|
||||
while len(prefix) < 3:
|
||||
prefix.append(0)
|
||||
if len(prefix) == 3:
|
||||
suffix = result[m.end():]
|
||||
else:
|
||||
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
|
||||
prefix = prefix[:3]
|
||||
prefix = '.'.join([str(i) for i in prefix])
|
||||
suffix = suffix.strip()
|
||||
if suffix:
|
||||
# import pdb; pdb.set_trace()
|
||||
# massage the suffix.
|
||||
for pat, repl in _SUFFIX_REPLACEMENTS:
|
||||
suffix = pat.sub(repl, suffix)
|
||||
|
||||
if not suffix:
|
||||
result = prefix
|
||||
else:
|
||||
sep = '-' if 'dev' in suffix else '+'
|
||||
result = prefix + sep + suffix
|
||||
if not is_semver(result):
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
def _suggest_normalized_version(s):
|
||||
"""Suggest a normalized version close to the given version string.
|
||||
|
||||
If you have a version string that isn't rational (i.e. NormalizedVersion
|
||||
doesn't like it) then you might be able to get an equivalent (or close)
|
||||
rational version from this function.
|
||||
|
||||
This does a number of simple normalizations to the given string, based
|
||||
on observation of versions currently in use on PyPI. Given a dump of
|
||||
those version during PyCon 2009, 4287 of them:
|
||||
- 2312 (53.93%) match NormalizedVersion without change
|
||||
with the automatic suggestion
|
||||
- 3474 (81.04%) match when using this suggestion method
|
||||
|
||||
@param s {str} An irrational version string.
|
||||
@returns A rational version string, or None, if couldn't determine one.
|
||||
"""
|
||||
try:
|
||||
_normalized_key(s)
|
||||
return s # already rational
|
||||
except UnsupportedVersionError:
|
||||
pass
|
||||
|
||||
rs = s.lower()
|
||||
|
||||
# part of this could use maketrans
|
||||
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
|
||||
('beta', 'b'), ('rc', 'c'), ('-final', ''),
|
||||
('-pre', 'c'),
|
||||
('-release', ''), ('.release', ''), ('-stable', ''),
|
||||
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
|
||||
('final', '')):
|
||||
rs = rs.replace(orig, repl)
|
||||
|
||||
# if something ends with dev or pre, we add a 0
|
||||
rs = re.sub(r"pre$", r"pre0", rs)
|
||||
rs = re.sub(r"dev$", r"dev0", rs)
|
||||
|
||||
# if we have something like "b-2" or "a.2" at the end of the
|
||||
# version, that is probably beta, alpha, etc
|
||||
# let's remove the dash or dot
|
||||
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
|
||||
|
||||
# 1.0-dev-r371 -> 1.0.dev371
|
||||
# 0.1-dev-r79 -> 0.1.dev79
|
||||
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
|
||||
|
||||
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
|
||||
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
|
||||
|
||||
# Clean: v0.3, v1.0
|
||||
if rs.startswith('v'):
|
||||
rs = rs[1:]
|
||||
|
||||
# Clean leading '0's on numbers.
|
||||
# TODO: unintended side-effect on, e.g., "2003.05.09"
|
||||
# PyPI stats: 77 (~2%) better
|
||||
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
|
||||
|
||||
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
|
||||
# zero.
|
||||
# PyPI stats: 245 (7.56%) better
|
||||
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
|
||||
|
||||
# the 'dev-rNNN' tag is a dev tag
|
||||
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
|
||||
|
||||
# clean the - when used as a pre delimiter
|
||||
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
|
||||
|
||||
# a terminal "dev" or "devel" can be changed into ".dev0"
|
||||
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
|
||||
|
||||
# a terminal "dev" can be changed into ".dev0"
|
||||
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
|
||||
|
||||
# a terminal "final" or "stable" can be removed
|
||||
rs = re.sub(r"(final|stable)$", "", rs)
|
||||
|
||||
# The 'r' and the '-' tags are post release tags
|
||||
# 0.4a1.r10 -> 0.4a1.post10
|
||||
# 0.9.33-17222 -> 0.9.33.post17222
|
||||
# 0.9.33-r17222 -> 0.9.33.post17222
|
||||
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
|
||||
|
||||
# Clean 'r' instead of 'dev' usage:
|
||||
# 0.9.33+r17222 -> 0.9.33.dev17222
|
||||
# 1.0dev123 -> 1.0.dev123
|
||||
# 1.0.git123 -> 1.0.dev123
|
||||
# 1.0.bzr123 -> 1.0.dev123
|
||||
# 0.1a0dev.123 -> 0.1a0.dev123
|
||||
# PyPI stats: ~150 (~4%) better
|
||||
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
|
||||
|
||||
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
|
||||
# 0.2.pre1 -> 0.2c1
|
||||
# 0.2-c1 -> 0.2c1
|
||||
# 1.0preview123 -> 1.0c123
|
||||
# PyPI stats: ~21 (0.62%) better
|
||||
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
|
||||
|
||||
# Tcl/Tk uses "px" for their post release markers
|
||||
rs = re.sub(r"p(\d+)$", r".post\1", rs)
|
||||
|
||||
try:
|
||||
_normalized_key(rs)
|
||||
except UnsupportedVersionError:
|
||||
rs = None
|
||||
return rs
|
||||
|
||||
#
|
||||
# Legacy version processing (distribute-compatible)
|
||||
#
|
||||
|
||||
|
||||
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
|
||||
_VERSION_REPLACE = {
|
||||
'pre': 'c',
|
||||
'preview': 'c',
|
||||
'-': 'final-',
|
||||
'rc': 'c',
|
||||
'dev': '@',
|
||||
'': None,
|
||||
'.': None,
|
||||
}
|
||||
|
||||
|
||||
def _legacy_key(s):
|
||||
def get_parts(s):
|
||||
result = []
|
||||
for p in _VERSION_PART.split(s.lower()):
|
||||
p = _VERSION_REPLACE.get(p, p)
|
||||
if p:
|
||||
if '0' <= p[:1] <= '9':
|
||||
p = p.zfill(8)
|
||||
else:
|
||||
p = '*' + p
|
||||
result.append(p)
|
||||
result.append('*final')
|
||||
return result
|
||||
|
||||
result = []
|
||||
for p in get_parts(s):
|
||||
if p.startswith('*'):
|
||||
if p < '*final':
|
||||
while result and result[-1] == '*final-':
|
||||
result.pop()
|
||||
while result and result[-1] == '00000000':
|
||||
result.pop()
|
||||
result.append(p)
|
||||
return tuple(result)
|
||||
|
||||
|
||||
class LegacyVersion(Version):
|
||||
def parse(self, s):
|
||||
return _legacy_key(s)
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
result = False
|
||||
for x in self._parts:
|
||||
if (isinstance(x, string_types) and x.startswith('*') and x < '*final'):
|
||||
result = True
|
||||
break
|
||||
return result
|
||||
|
||||
|
||||
class LegacyMatcher(Matcher):
|
||||
version_class = LegacyVersion
|
||||
|
||||
_operators = dict(Matcher._operators)
|
||||
_operators['~='] = '_match_compatible'
|
||||
|
||||
numeric_re = re.compile(r'^(\d+(\.\d+)*)')
|
||||
|
||||
def _match_compatible(self, version, constraint, prefix):
|
||||
if version < constraint:
|
||||
return False
|
||||
m = self.numeric_re.match(str(constraint))
|
||||
if not m:
|
||||
logger.warning('Cannot compute compatible match for version %s '
|
||||
' and constraint %s', version, constraint)
|
||||
return True
|
||||
s = m.groups()[0]
|
||||
if '.' in s:
|
||||
s = s.rsplit('.', 1)[0]
|
||||
return _match_prefix(version, s)
|
||||
|
||||
#
|
||||
# Semantic versioning
|
||||
#
|
||||
|
||||
|
||||
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
|
||||
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
|
||||
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
|
||||
|
||||
|
||||
def is_semver(s):
|
||||
return _SEMVER_RE.match(s)
|
||||
|
||||
|
||||
def _semantic_key(s):
|
||||
def make_tuple(s, absent):
|
||||
if s is None:
|
||||
result = (absent,)
|
||||
else:
|
||||
parts = s[1:].split('.')
|
||||
# We can't compare ints and strings on Python 3, so fudge it
|
||||
# by zero-filling numeric values so simulate a numeric comparison
|
||||
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
|
||||
return result
|
||||
|
||||
m = is_semver(s)
|
||||
if not m:
|
||||
raise UnsupportedVersionError(s)
|
||||
groups = m.groups()
|
||||
major, minor, patch = [int(i) for i in groups[:3]]
|
||||
# choose the '|' and '*' so that versions sort correctly
|
||||
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
|
||||
return (major, minor, patch), pre, build
|
||||
|
||||
|
||||
class SemanticVersion(Version):
|
||||
def parse(self, s):
|
||||
return _semantic_key(s)
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return self._parts[1][0] != '|'
|
||||
|
||||
|
||||
class SemanticMatcher(Matcher):
|
||||
version_class = SemanticVersion
|
||||
|
||||
|
||||
class VersionScheme(object):
|
||||
def __init__(self, key, matcher, suggester=None):
|
||||
self.key = key
|
||||
self.matcher = matcher
|
||||
self.suggester = suggester
|
||||
|
||||
def is_valid_version(self, s):
|
||||
try:
|
||||
self.matcher.version_class(s)
|
||||
result = True
|
||||
except UnsupportedVersionError:
|
||||
result = False
|
||||
return result
|
||||
|
||||
def is_valid_matcher(self, s):
|
||||
try:
|
||||
self.matcher(s)
|
||||
result = True
|
||||
except UnsupportedVersionError:
|
||||
result = False
|
||||
return result
|
||||
|
||||
def is_valid_constraint_list(self, s):
|
||||
"""
|
||||
Used for processing some metadata fields
|
||||
"""
|
||||
# See issue #140. Be tolerant of a single trailing comma.
|
||||
if s.endswith(','):
|
||||
s = s[:-1]
|
||||
return self.is_valid_matcher('dummy_name (%s)' % s)
|
||||
|
||||
def suggest(self, s):
|
||||
if self.suggester is None:
|
||||
result = None
|
||||
else:
|
||||
result = self.suggester(s)
|
||||
return result
|
||||
|
||||
|
||||
_SCHEMES = {
|
||||
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
|
||||
_suggest_normalized_version),
|
||||
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
|
||||
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
|
||||
_suggest_semantic_version),
|
||||
}
|
||||
|
||||
_SCHEMES['default'] = _SCHEMES['normalized']
|
||||
|
||||
|
||||
def get_scheme(name):
|
||||
if name not in _SCHEMES:
|
||||
raise ValueError('unknown scheme name: %r' % name)
|
||||
return _SCHEMES[name]
|
BIN
site-packages/distlib/w32.exe
Executable file
BIN
site-packages/distlib/w32.exe
Executable file
Binary file not shown.
BIN
site-packages/distlib/w64-arm.exe
Executable file
BIN
site-packages/distlib/w64-arm.exe
Executable file
Binary file not shown.
BIN
site-packages/distlib/w64.exe
Executable file
BIN
site-packages/distlib/w64.exe
Executable file
Binary file not shown.
1100
site-packages/distlib/wheel.py
Executable file
1100
site-packages/distlib/wheel.py
Executable file
File diff suppressed because it is too large
Load Diff
1
site-packages/distutils-precedence.pth
Executable file
1
site-packages/distutils-precedence.pth
Executable file
@ -0,0 +1 @@
|
||||
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
1
site-packages/filelock-3.16.1.dist-info/INSTALLER
Executable file
1
site-packages/filelock-3.16.1.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
59
site-packages/filelock-3.16.1.dist-info/METADATA
Executable file
59
site-packages/filelock-3.16.1.dist-info/METADATA
Executable file
@ -0,0 +1,59 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: filelock
|
||||
Version: 3.16.1
|
||||
Summary: A platform independent file lock.
|
||||
Project-URL: Documentation, https://py-filelock.readthedocs.io
|
||||
Project-URL: Homepage, https://github.com/tox-dev/py-filelock
|
||||
Project-URL: Source, https://github.com/tox-dev/py-filelock
|
||||
Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues
|
||||
Maintainer-email: Bernát Gábor <gaborjbernat@gmail.com>
|
||||
License-Expression: Unlicense
|
||||
License-File: LICENSE
|
||||
Keywords: application,cache,directory,log,user
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: The Unlicense (Unlicense)
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Topic :: Internet
|
||||
Classifier: Topic :: Software Development :: Libraries
|
||||
Classifier: Topic :: System
|
||||
Requires-Python: >=3.8
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: furo>=2024.8.6; extra == 'docs'
|
||||
Requires-Dist: sphinx-autodoc-typehints>=2.4.1; extra == 'docs'
|
||||
Requires-Dist: sphinx>=8.0.2; extra == 'docs'
|
||||
Provides-Extra: testing
|
||||
Requires-Dist: covdefaults>=2.3; extra == 'testing'
|
||||
Requires-Dist: coverage>=7.6.1; extra == 'testing'
|
||||
Requires-Dist: diff-cover>=9.2; extra == 'testing'
|
||||
Requires-Dist: pytest-asyncio>=0.24; extra == 'testing'
|
||||
Requires-Dist: pytest-cov>=5; extra == 'testing'
|
||||
Requires-Dist: pytest-mock>=3.14; extra == 'testing'
|
||||
Requires-Dist: pytest-timeout>=2.3.1; extra == 'testing'
|
||||
Requires-Dist: pytest>=8.3.3; extra == 'testing'
|
||||
Requires-Dist: virtualenv>=20.26.4; extra == 'testing'
|
||||
Provides-Extra: typing
|
||||
Requires-Dist: typing-extensions>=4.12.2; (python_version < '3.11') and extra == 'typing'
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# filelock
|
||||
|
||||
[](https://pypi.org/project/filelock/)
|
||||
[](https://pypi.org/project/filelock/)
|
||||
[](https://py-filelock.readthedocs.io/en/latest/?badge=latest)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://pepy.tech/project/filelock)
|
||||
[](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml)
|
||||
|
||||
For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html).
|
24
site-packages/filelock-3.16.1.dist-info/RECORD
Executable file
24
site-packages/filelock-3.16.1.dist-info/RECORD
Executable file
@ -0,0 +1,24 @@
|
||||
filelock-3.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
filelock-3.16.1.dist-info/METADATA,sha256=LXL5-XQe_eTKkdNs76A6jSicQ1DBSTXqkDcjsprWvIM,2944
|
||||
filelock-3.16.1.dist-info/RECORD,,
|
||||
filelock-3.16.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
||||
filelock-3.16.1.dist-info/licenses/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210
|
||||
filelock/__init__.py,sha256=_t_-OAGXo_qyPa9lNQ1YnzVYEvSW3I0onPqzpomsVVg,1769
|
||||
filelock/__pycache__/__init__.cpython-311.pyc,,
|
||||
filelock/__pycache__/_api.cpython-311.pyc,,
|
||||
filelock/__pycache__/_error.cpython-311.pyc,,
|
||||
filelock/__pycache__/_soft.cpython-311.pyc,,
|
||||
filelock/__pycache__/_unix.cpython-311.pyc,,
|
||||
filelock/__pycache__/_util.cpython-311.pyc,,
|
||||
filelock/__pycache__/_windows.cpython-311.pyc,,
|
||||
filelock/__pycache__/asyncio.cpython-311.pyc,,
|
||||
filelock/__pycache__/version.cpython-311.pyc,,
|
||||
filelock/_api.py,sha256=GVeBEGjpDD8S1bYqG6_u0MZfbYHS6XrHs_n3PVKq-h0,14541
|
||||
filelock/_error.py,sha256=-5jMcjTu60YAvAO1UbqDD1GIEjVkwr8xCFwDBtMeYDg,787
|
||||
filelock/_soft.py,sha256=haqtc_TB_KJbYv2a8iuEAclKuM4fMG1vTcp28sK919c,1711
|
||||
filelock/_unix.py,sha256=-FXP0tjInBHUYygOlMpp4taUmD87QOkrD_4ybg_iT7Q,2259
|
||||
filelock/_util.py,sha256=QHBoNFIYfbAThhotH3Q8E2acFc84wpG49-T-uu017ZE,1715
|
||||
filelock/_windows.py,sha256=eMKL8dZKrgekf5VYVGR14an29JGEInRtUO8ui9ABywg,2177
|
||||
filelock/asyncio.py,sha256=3D4JP4Ms5IXTGib5eOekyr6uH6rZlieV_moVGY36juA,12463
|
||||
filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
filelock/version.py,sha256=KSOBzuLwiqiVWDPGfMj1ntr25YrY6JBDr8RvinQX_FM,413
|
4
site-packages/filelock-3.16.1.dist-info/WHEEL
Executable file
4
site-packages/filelock-3.16.1.dist-info/WHEEL
Executable file
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.25.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
24
site-packages/filelock-3.16.1.dist-info/licenses/LICENSE
Executable file
24
site-packages/filelock-3.16.1.dist-info/licenses/LICENSE
Executable file
@ -0,0 +1,24 @@
|
||||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
distribute this software, either in source code form or as a compiled
|
||||
binary, for any purpose, commercial or non-commercial, and by any
|
||||
means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors
|
||||
of this software dedicate any and all copyright interest in the
|
||||
software to the public domain. We make this dedication for the benefit
|
||||
of the public at large and to the detriment of our heirs and
|
||||
successors. We intend this dedication to be an overt act of
|
||||
relinquishment in perpetuity of all present and future rights to this
|
||||
software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For more information, please refer to <http://unlicense.org>
|
70
site-packages/filelock/__init__.py
Executable file
70
site-packages/filelock/__init__.py
Executable file
@ -0,0 +1,70 @@
|
||||
"""
|
||||
A platform independent file lock that supports the with-statement.
|
||||
|
||||
.. autodata:: filelock.__version__
|
||||
:no-value:
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ._api import AcquireReturnProxy, BaseFileLock
|
||||
from ._error import Timeout
|
||||
from ._soft import SoftFileLock
|
||||
from ._unix import UnixFileLock, has_fcntl
|
||||
from ._windows import WindowsFileLock
|
||||
from .asyncio import (
|
||||
AsyncAcquireReturnProxy,
|
||||
AsyncSoftFileLock,
|
||||
AsyncUnixFileLock,
|
||||
AsyncWindowsFileLock,
|
||||
BaseAsyncFileLock,
|
||||
)
|
||||
from .version import version
|
||||
|
||||
#: version of the project as a string
|
||||
__version__: str = version
|
||||
|
||||
|
||||
if sys.platform == "win32": # pragma: win32 cover
|
||||
_FileLock: type[BaseFileLock] = WindowsFileLock
|
||||
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
|
||||
else: # pragma: win32 no cover # noqa: PLR5501
|
||||
if has_fcntl:
|
||||
_FileLock: type[BaseFileLock] = UnixFileLock
|
||||
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
|
||||
else:
|
||||
_FileLock = SoftFileLock
|
||||
_AsyncFileLock = AsyncSoftFileLock
|
||||
if warnings is not None:
|
||||
warnings.warn("only soft file lock is available", stacklevel=2)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
FileLock = SoftFileLock
|
||||
AsyncFileLock = AsyncSoftFileLock
|
||||
else:
|
||||
#: Alias for the lock, which should be used for the current platform.
|
||||
FileLock = _FileLock
|
||||
AsyncFileLock = _AsyncFileLock
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AcquireReturnProxy",
|
||||
"AsyncAcquireReturnProxy",
|
||||
"AsyncFileLock",
|
||||
"AsyncSoftFileLock",
|
||||
"AsyncUnixFileLock",
|
||||
"AsyncWindowsFileLock",
|
||||
"BaseAsyncFileLock",
|
||||
"BaseFileLock",
|
||||
"FileLock",
|
||||
"SoftFileLock",
|
||||
"Timeout",
|
||||
"UnixFileLock",
|
||||
"WindowsFileLock",
|
||||
"__version__",
|
||||
]
|
403
site-packages/filelock/_api.py
Executable file
403
site-packages/filelock/_api.py
Executable file
@ -0,0 +1,403 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from threading import local
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
from ._error import Timeout
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import sys
|
||||
from types import TracebackType
|
||||
|
||||
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
|
||||
from typing import Self
|
||||
else: # pragma: no cover (<py311)
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger("filelock")
|
||||
|
||||
|
||||
# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
|
||||
# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
|
||||
# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
|
||||
class AcquireReturnProxy:
|
||||
"""A context-aware object that will release the lock file when exiting."""
|
||||
|
||||
def __init__(self, lock: BaseFileLock) -> None:
|
||||
self.lock = lock
|
||||
|
||||
def __enter__(self) -> BaseFileLock:
|
||||
return self.lock
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
self.lock.release()
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileLockContext:
|
||||
"""A dataclass which holds the context for a ``BaseFileLock`` object."""
|
||||
|
||||
# The context is held in a separate class to allow optional use of thread local storage via the
|
||||
# ThreadLocalFileContext class.
|
||||
|
||||
#: The path to the lock file.
|
||||
lock_file: str
|
||||
|
||||
#: The default timeout value.
|
||||
timeout: float
|
||||
|
||||
#: The mode for the lock files
|
||||
mode: int
|
||||
|
||||
#: Whether the lock should be blocking or not
|
||||
blocking: bool
|
||||
|
||||
#: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held
|
||||
lock_file_fd: int | None = None
|
||||
|
||||
#: The lock counter is used for implementing the nested locking mechanism.
|
||||
lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0
|
||||
|
||||
|
||||
class ThreadLocalFileContext(FileLockContext, local):
|
||||
"""A thread local version of the ``FileLockContext`` class."""
|
||||
|
||||
|
||||
class FileLockMeta(ABCMeta):
|
||||
def __call__( # noqa: PLR0913
|
||||
cls,
|
||||
lock_file: str | os.PathLike[str],
|
||||
timeout: float = -1,
|
||||
mode: int = 0o644,
|
||||
thread_local: bool = True, # noqa: FBT001, FBT002
|
||||
*,
|
||||
blocking: bool = True,
|
||||
is_singleton: bool = False,
|
||||
**kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401
|
||||
) -> BaseFileLock:
|
||||
if is_singleton:
|
||||
instance = cls._instances.get(str(lock_file)) # type: ignore[attr-defined]
|
||||
if instance:
|
||||
params_to_check = {
|
||||
"thread_local": (thread_local, instance.is_thread_local()),
|
||||
"timeout": (timeout, instance.timeout),
|
||||
"mode": (mode, instance.mode),
|
||||
"blocking": (blocking, instance.blocking),
|
||||
}
|
||||
|
||||
non_matching_params = {
|
||||
name: (passed_param, set_param)
|
||||
for name, (passed_param, set_param) in params_to_check.items()
|
||||
if passed_param != set_param
|
||||
}
|
||||
if not non_matching_params:
|
||||
return cast(BaseFileLock, instance)
|
||||
|
||||
# parameters do not match; raise error
|
||||
msg = "Singleton lock instances cannot be initialized with differing arguments"
|
||||
msg += "\nNon-matching arguments: "
|
||||
for param_name, (passed_param, set_param) in non_matching_params.items():
|
||||
msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)"
|
||||
raise ValueError(msg)
|
||||
|
||||
# Workaround to make `__init__`'s params optional in subclasses
|
||||
# E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant
|
||||
# (https://github.com/tox-dev/filelock/pull/340)
|
||||
|
||||
all_params = {
|
||||
"timeout": timeout,
|
||||
"mode": mode,
|
||||
"thread_local": thread_local,
|
||||
"blocking": blocking,
|
||||
"is_singleton": is_singleton,
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
present_params = inspect.signature(cls.__init__).parameters # type: ignore[misc]
|
||||
init_params = {key: value for key, value in all_params.items() if key in present_params}
|
||||
|
||||
instance = super().__call__(lock_file, **init_params)
|
||||
|
||||
if is_singleton:
|
||||
cls._instances[str(lock_file)] = instance # type: ignore[attr-defined]
|
||||
|
||||
return cast(BaseFileLock, instance)
|
||||
|
||||
|
||||
class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta):
|
||||
"""Abstract base class for a file lock object."""
|
||||
|
||||
_instances: WeakValueDictionary[str, BaseFileLock]
|
||||
|
||||
def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
|
||||
"""Setup unique state for lock subclasses."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
cls._instances = WeakValueDictionary()
|
||||
|
||||
def __init__( # noqa: PLR0913
|
||||
self,
|
||||
lock_file: str | os.PathLike[str],
|
||||
timeout: float = -1,
|
||||
mode: int = 0o644,
|
||||
thread_local: bool = True, # noqa: FBT001, FBT002
|
||||
*,
|
||||
blocking: bool = True,
|
||||
is_singleton: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Create a new lock object.
|
||||
|
||||
:param lock_file: path to the file
|
||||
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
|
||||
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
|
||||
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
|
||||
:param mode: file permissions for the lockfile
|
||||
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
|
||||
``False`` then the lock will be reentrant across threads.
|
||||
:param blocking: whether the lock should be blocking or not
|
||||
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
|
||||
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
|
||||
to pass the same object around.
|
||||
|
||||
"""
|
||||
self._is_thread_local = thread_local
|
||||
self._is_singleton = is_singleton
|
||||
|
||||
# Create the context. Note that external code should not work with the context directly and should instead use
|
||||
# properties of this class.
|
||||
kwargs: dict[str, Any] = {
|
||||
"lock_file": os.fspath(lock_file),
|
||||
"timeout": timeout,
|
||||
"mode": mode,
|
||||
"blocking": blocking,
|
||||
}
|
||||
self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs)
|
||||
|
||||
def is_thread_local(self) -> bool:
|
||||
""":return: a flag indicating if this lock is thread local or not"""
|
||||
return self._is_thread_local
|
||||
|
||||
@property
|
||||
def is_singleton(self) -> bool:
|
||||
""":return: a flag indicating if this lock is singleton or not"""
|
||||
return self._is_singleton
|
||||
|
||||
@property
|
||||
def lock_file(self) -> str:
|
||||
""":return: path to the lock file"""
|
||||
return self._context.lock_file
|
||||
|
||||
@property
|
||||
def timeout(self) -> float:
|
||||
"""
|
||||
:return: the default timeout value, in seconds
|
||||
|
||||
.. versionadded:: 2.0.0
|
||||
"""
|
||||
return self._context.timeout
|
||||
|
||||
@timeout.setter
|
||||
def timeout(self, value: float | str) -> None:
|
||||
"""
|
||||
Change the default timeout value.
|
||||
|
||||
:param value: the new value, in seconds
|
||||
|
||||
"""
|
||||
self._context.timeout = float(value)
|
||||
|
||||
@property
|
||||
def blocking(self) -> bool:
|
||||
""":return: whether the locking is blocking or not"""
|
||||
return self._context.blocking
|
||||
|
||||
@blocking.setter
|
||||
def blocking(self, value: bool) -> None:
|
||||
"""
|
||||
Change the default blocking value.
|
||||
|
||||
:param value: the new value as bool
|
||||
|
||||
"""
|
||||
self._context.blocking = value
|
||||
|
||||
@property
|
||||
def mode(self) -> int:
|
||||
""":return: the file permissions for the lockfile"""
|
||||
return self._context.mode
|
||||
|
||||
@abstractmethod
|
||||
def _acquire(self) -> None:
|
||||
"""If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file."""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def _release(self) -> None:
|
||||
"""Releases the lock and sets self._context.lock_file_fd to None."""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def is_locked(self) -> bool:
|
||||
"""
|
||||
|
||||
:return: A boolean indicating if the lock file is holding the lock currently.
|
||||
|
||||
.. versionchanged:: 2.0.0
|
||||
|
||||
This was previously a method and is now a property.
|
||||
"""
|
||||
return self._context.lock_file_fd is not None
|
||||
|
||||
@property
|
||||
def lock_counter(self) -> int:
|
||||
""":return: The number of times this lock has been acquired (but not yet released)."""
|
||||
return self._context.lock_counter
|
||||
|
||||
def acquire(
|
||||
self,
|
||||
timeout: float | None = None,
|
||||
poll_interval: float = 0.05,
|
||||
*,
|
||||
poll_intervall: float | None = None,
|
||||
blocking: bool | None = None,
|
||||
) -> AcquireReturnProxy:
|
||||
"""
|
||||
Try to acquire the file lock.
|
||||
|
||||
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
|
||||
if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
|
||||
:param poll_interval: interval of trying to acquire the lock file
|
||||
:param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
|
||||
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
|
||||
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
|
||||
:raises Timeout: if fails to acquire lock within the timeout period
|
||||
:return: a context object that will unlock the file when the context is exited
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# You can use this method in the context manager (recommended)
|
||||
with lock.acquire():
|
||||
pass
|
||||
|
||||
# Or use an equivalent try-finally construct:
|
||||
lock.acquire()
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
.. versionchanged:: 2.0.0
|
||||
|
||||
This method returns now a *proxy* object instead of *self*,
|
||||
so that it can be used in a with statement without side effects.
|
||||
|
||||
"""
|
||||
# Use the default timeout, if no timeout is provided.
|
||||
if timeout is None:
|
||||
timeout = self._context.timeout
|
||||
|
||||
if blocking is None:
|
||||
blocking = self._context.blocking
|
||||
|
||||
if poll_intervall is not None:
|
||||
msg = "use poll_interval instead of poll_intervall"
|
||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||
poll_interval = poll_intervall
|
||||
|
||||
# Increment the number right at the beginning. We can still undo it, if something fails.
|
||||
self._context.lock_counter += 1
|
||||
|
||||
lock_id = id(self)
|
||||
lock_filename = self.lock_file
|
||||
start_time = time.perf_counter()
|
||||
try:
|
||||
while True:
|
||||
if not self.is_locked:
|
||||
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
|
||||
self._acquire()
|
||||
if self.is_locked:
|
||||
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
|
||||
break
|
||||
if blocking is False:
|
||||
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
|
||||
raise Timeout(lock_filename) # noqa: TRY301
|
||||
if 0 <= timeout < time.perf_counter() - start_time:
|
||||
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
|
||||
raise Timeout(lock_filename) # noqa: TRY301
|
||||
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
|
||||
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
|
||||
time.sleep(poll_interval)
|
||||
except BaseException: # Something did go wrong, so decrement the counter.
|
||||
self._context.lock_counter = max(0, self._context.lock_counter - 1)
|
||||
raise
|
||||
return AcquireReturnProxy(lock=self)
|
||||
|
||||
def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002
|
||||
"""
|
||||
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
|
||||
Also note, that the lock file itself is not automatically deleted.
|
||||
|
||||
:param force: If true, the lock counter is ignored and the lock is released in every case/
|
||||
|
||||
"""
|
||||
if self.is_locked:
|
||||
self._context.lock_counter -= 1
|
||||
|
||||
if self._context.lock_counter == 0 or force:
|
||||
lock_id, lock_filename = id(self), self.lock_file
|
||||
|
||||
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
|
||||
self._release()
|
||||
self._context.lock_counter = 0
|
||||
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""
|
||||
Acquire the lock.
|
||||
|
||||
:return: the lock object
|
||||
|
||||
"""
|
||||
self.acquire()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
"""
|
||||
Release the lock.
|
||||
|
||||
:param exc_type: the exception type if raised
|
||||
:param exc_value: the exception value if raised
|
||||
:param traceback: the exception traceback if raised
|
||||
|
||||
"""
|
||||
self.release()
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""Called when the lock object is deleted."""
|
||||
self.release(force=True)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AcquireReturnProxy",
|
||||
"BaseFileLock",
|
||||
]
|
30
site-packages/filelock/_error.py
Executable file
30
site-packages/filelock/_error.py
Executable file
@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
class Timeout(TimeoutError): # noqa: N818
|
||||
"""Raised when the lock could not be acquired in *timeout* seconds."""
|
||||
|
||||
def __init__(self, lock_file: str) -> None:
|
||||
super().__init__()
|
||||
self._lock_file = lock_file
|
||||
|
||||
def __reduce__(self) -> str | tuple[Any, ...]:
|
||||
return self.__class__, (self._lock_file,) # Properly pickle the exception
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"The file lock '{self._lock_file}' could not be acquired."
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.lock_file!r})"
|
||||
|
||||
@property
|
||||
def lock_file(self) -> str:
|
||||
""":return: The path of the file lock."""
|
||||
return self._lock_file
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Timeout",
|
||||
]
|
47
site-packages/filelock/_soft.py
Executable file
47
site-packages/filelock/_soft.py
Executable file
@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from errno import EACCES, EEXIST
|
||||
from pathlib import Path
|
||||
|
||||
from ._api import BaseFileLock
|
||||
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
||||
|
||||
|
||||
class SoftFileLock(BaseFileLock):
|
||||
"""Simply watches the existence of the lock file."""
|
||||
|
||||
def _acquire(self) -> None:
|
||||
raise_on_not_writable_file(self.lock_file)
|
||||
ensure_directory_exists(self.lock_file)
|
||||
# first check for exists and read-only mode as the open will mask this case as EEXIST
|
||||
flags = (
|
||||
os.O_WRONLY # open for writing only
|
||||
| os.O_CREAT
|
||||
| os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
|
||||
| os.O_TRUNC # truncate the file to zero byte
|
||||
)
|
||||
try:
|
||||
file_handler = os.open(self.lock_file, flags, self._context.mode)
|
||||
except OSError as exception: # re-raise unless expected exception
|
||||
if not (
|
||||
exception.errno == EEXIST # lock already exist
|
||||
or (exception.errno == EACCES and sys.platform == "win32") # has no access to this lock
|
||||
): # pragma: win32 no cover
|
||||
raise
|
||||
else:
|
||||
self._context.lock_file_fd = file_handler
|
||||
|
||||
def _release(self) -> None:
|
||||
assert self._context.lock_file_fd is not None # noqa: S101
|
||||
os.close(self._context.lock_file_fd) # the lock file is definitely not None
|
||||
self._context.lock_file_fd = None
|
||||
with suppress(OSError): # the file is already deleted and that's what we want
|
||||
Path(self.lock_file).unlink()
|
||||
|
||||
|
||||
__all__ = [
|
||||
"SoftFileLock",
|
||||
]
|
68
site-packages/filelock/_unix.py
Executable file
68
site-packages/filelock/_unix.py
Executable file
@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from errno import ENOSYS
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
from ._api import BaseFileLock
|
||||
from ._util import ensure_directory_exists
|
||||
|
||||
#: a flag to indicate if the fcntl API is available
|
||||
has_fcntl = False
|
||||
if sys.platform == "win32": # pragma: win32 cover
|
||||
|
||||
class UnixFileLock(BaseFileLock):
|
||||
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
||||
|
||||
def _acquire(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def _release(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
else: # pragma: win32 no cover
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
has_fcntl = True
|
||||
|
||||
class UnixFileLock(BaseFileLock):
|
||||
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
||||
|
||||
def _acquire(self) -> None:
|
||||
ensure_directory_exists(self.lock_file)
|
||||
open_flags = os.O_RDWR | os.O_TRUNC
|
||||
if not Path(self.lock_file).exists():
|
||||
open_flags |= os.O_CREAT
|
||||
fd = os.open(self.lock_file, open_flags, self._context.mode)
|
||||
with suppress(PermissionError): # This locked is not owned by this UID
|
||||
os.fchmod(fd, self._context.mode)
|
||||
try:
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
except OSError as exception:
|
||||
os.close(fd)
|
||||
if exception.errno == ENOSYS: # NotImplemented error
|
||||
msg = "FileSystem does not appear to support flock; use SoftFileLock instead"
|
||||
raise NotImplementedError(msg) from exception
|
||||
else:
|
||||
self._context.lock_file_fd = fd
|
||||
|
||||
def _release(self) -> None:
|
||||
# Do not remove the lockfile:
|
||||
# https://github.com/tox-dev/py-filelock/issues/31
|
||||
# https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
|
||||
fd = cast(int, self._context.lock_file_fd)
|
||||
self._context.lock_file_fd = None
|
||||
fcntl.flock(fd, fcntl.LOCK_UN)
|
||||
os.close(fd)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"UnixFileLock",
|
||||
"has_fcntl",
|
||||
]
|
52
site-packages/filelock/_util.py
Executable file
52
site-packages/filelock/_util.py
Executable file
@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
from errno import EACCES, EISDIR
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def raise_on_not_writable_file(filename: str) -> None:
|
||||
"""
|
||||
Raise an exception if attempting to open the file for writing would fail.
|
||||
|
||||
This is done so files that will never be writable can be separated from files that are writable but currently
|
||||
locked.
|
||||
|
||||
:param filename: file to check
|
||||
:raises OSError: as if the file was opened for writing.
|
||||
|
||||
"""
|
||||
try: # use stat to do exists + can write to check without race condition
|
||||
file_stat = os.stat(filename) # noqa: PTH116
|
||||
except OSError:
|
||||
return # swallow does not exist or other errors
|
||||
|
||||
if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
|
||||
if not (file_stat.st_mode & stat.S_IWUSR):
|
||||
raise PermissionError(EACCES, "Permission denied", filename)
|
||||
|
||||
if stat.S_ISDIR(file_stat.st_mode):
|
||||
if sys.platform == "win32": # pragma: win32 cover
|
||||
# On Windows, this is PermissionError
|
||||
raise PermissionError(EACCES, "Permission denied", filename)
|
||||
else: # pragma: win32 no cover # noqa: RET506
|
||||
# On linux / macOS, this is IsADirectoryError
|
||||
raise IsADirectoryError(EISDIR, "Is a directory", filename)
|
||||
|
||||
|
||||
def ensure_directory_exists(filename: Path | str) -> None:
|
||||
"""
|
||||
Ensure the directory containing the file exists (create it if necessary).
|
||||
|
||||
:param filename: file.
|
||||
|
||||
"""
|
||||
Path(filename).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ensure_directory_exists",
|
||||
"raise_on_not_writable_file",
|
||||
]
|
65
site-packages/filelock/_windows.py
Executable file
65
site-packages/filelock/_windows.py
Executable file
@ -0,0 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from errno import EACCES
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
from ._api import BaseFileLock
|
||||
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
||||
|
||||
if sys.platform == "win32": # pragma: win32 cover
|
||||
import msvcrt
|
||||
|
||||
class WindowsFileLock(BaseFileLock):
|
||||
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
||||
|
||||
def _acquire(self) -> None:
|
||||
raise_on_not_writable_file(self.lock_file)
|
||||
ensure_directory_exists(self.lock_file)
|
||||
flags = (
|
||||
os.O_RDWR # open for read and write
|
||||
| os.O_CREAT # create file if not exists
|
||||
| os.O_TRUNC # truncate file if not empty
|
||||
)
|
||||
try:
|
||||
fd = os.open(self.lock_file, flags, self._context.mode)
|
||||
except OSError as exception:
|
||||
if exception.errno != EACCES: # has no access to this lock
|
||||
raise
|
||||
else:
|
||||
try:
|
||||
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
|
||||
except OSError as exception:
|
||||
os.close(fd) # close file first
|
||||
if exception.errno != EACCES: # file is already locked
|
||||
raise
|
||||
else:
|
||||
self._context.lock_file_fd = fd
|
||||
|
||||
def _release(self) -> None:
|
||||
fd = cast(int, self._context.lock_file_fd)
|
||||
self._context.lock_file_fd = None
|
||||
msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
|
||||
os.close(fd)
|
||||
|
||||
with suppress(OSError): # Probably another instance of the application hat acquired the file lock.
|
||||
Path(self.lock_file).unlink()
|
||||
|
||||
else: # pragma: win32 no cover
|
||||
|
||||
class WindowsFileLock(BaseFileLock):
|
||||
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
||||
|
||||
def _acquire(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def _release(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"WindowsFileLock",
|
||||
]
|
342
site-packages/filelock/asyncio.py
Executable file
342
site-packages/filelock/asyncio.py
Executable file
@ -0,0 +1,342 @@
|
||||
"""An asyncio-based implementation of the file lock.""" # noqa: A005
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from threading import local
|
||||
from typing import TYPE_CHECKING, Any, Callable, NoReturn, cast
|
||||
|
||||
from ._api import BaseFileLock, FileLockContext, FileLockMeta
|
||||
from ._error import Timeout
|
||||
from ._soft import SoftFileLock
|
||||
from ._unix import UnixFileLock
|
||||
from ._windows import WindowsFileLock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import sys
|
||||
from concurrent import futures
|
||||
from types import TracebackType
|
||||
|
||||
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
|
||||
from typing import Self
|
||||
else: # pragma: no cover (<py311)
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger("filelock")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AsyncFileLockContext(FileLockContext):
|
||||
"""A dataclass which holds the context for a ``BaseAsyncFileLock`` object."""
|
||||
|
||||
#: Whether run in executor
|
||||
run_in_executor: bool = True
|
||||
|
||||
#: The executor
|
||||
executor: futures.Executor | None = None
|
||||
|
||||
#: The loop
|
||||
loop: asyncio.AbstractEventLoop | None = None
|
||||
|
||||
|
||||
class AsyncThreadLocalFileContext(AsyncFileLockContext, local):
|
||||
"""A thread local version of the ``FileLockContext`` class."""
|
||||
|
||||
|
||||
class AsyncAcquireReturnProxy:
|
||||
"""A context-aware object that will release the lock file when exiting."""
|
||||
|
||||
def __init__(self, lock: BaseAsyncFileLock) -> None: # noqa: D107
|
||||
self.lock = lock
|
||||
|
||||
async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
|
||||
return self.lock
|
||||
|
||||
async def __aexit__( # noqa: D105
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
await self.lock.release()
|
||||
|
||||
|
||||
class AsyncFileLockMeta(FileLockMeta):
|
||||
def __call__( # type: ignore[override] # noqa: PLR0913
|
||||
cls, # noqa: N805
|
||||
lock_file: str | os.PathLike[str],
|
||||
timeout: float = -1,
|
||||
mode: int = 0o644,
|
||||
thread_local: bool = False, # noqa: FBT001, FBT002
|
||||
*,
|
||||
blocking: bool = True,
|
||||
is_singleton: bool = False,
|
||||
loop: asyncio.AbstractEventLoop | None = None,
|
||||
run_in_executor: bool = True,
|
||||
executor: futures.Executor | None = None,
|
||||
) -> BaseAsyncFileLock:
|
||||
if thread_local and run_in_executor:
|
||||
msg = "run_in_executor is not supported when thread_local is True"
|
||||
raise ValueError(msg)
|
||||
instance = super().__call__(
|
||||
lock_file=lock_file,
|
||||
timeout=timeout,
|
||||
mode=mode,
|
||||
thread_local=thread_local,
|
||||
blocking=blocking,
|
||||
is_singleton=is_singleton,
|
||||
loop=loop,
|
||||
run_in_executor=run_in_executor,
|
||||
executor=executor,
|
||||
)
|
||||
return cast(BaseAsyncFileLock, instance)
|
||||
|
||||
|
||||
class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta):
|
||||
"""Base class for asynchronous file locks."""
|
||||
|
||||
def __init__( # noqa: PLR0913
|
||||
self,
|
||||
lock_file: str | os.PathLike[str],
|
||||
timeout: float = -1,
|
||||
mode: int = 0o644,
|
||||
thread_local: bool = False, # noqa: FBT001, FBT002
|
||||
*,
|
||||
blocking: bool = True,
|
||||
is_singleton: bool = False,
|
||||
loop: asyncio.AbstractEventLoop | None = None,
|
||||
run_in_executor: bool = True,
|
||||
executor: futures.Executor | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Create a new lock object.
|
||||
|
||||
:param lock_file: path to the file
|
||||
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
|
||||
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
|
||||
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
|
||||
:param mode: file permissions for the lockfile
|
||||
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
|
||||
``False`` then the lock will be reentrant across threads.
|
||||
:param blocking: whether the lock should be blocking or not
|
||||
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
|
||||
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
|
||||
to pass the same object around.
|
||||
:param loop: The event loop to use. If not specified, the running event loop will be used.
|
||||
:param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor.
|
||||
:param executor: The executor to use. If not specified, the default executor will be used.
|
||||
|
||||
"""
|
||||
self._is_thread_local = thread_local
|
||||
self._is_singleton = is_singleton
|
||||
|
||||
# Create the context. Note that external code should not work with the context directly and should instead use
|
||||
# properties of this class.
|
||||
kwargs: dict[str, Any] = {
|
||||
"lock_file": os.fspath(lock_file),
|
||||
"timeout": timeout,
|
||||
"mode": mode,
|
||||
"blocking": blocking,
|
||||
"loop": loop,
|
||||
"run_in_executor": run_in_executor,
|
||||
"executor": executor,
|
||||
}
|
||||
self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)(
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@property
|
||||
def run_in_executor(self) -> bool:
|
||||
"""::return: whether run in executor."""
|
||||
return self._context.run_in_executor
|
||||
|
||||
@property
|
||||
def executor(self) -> futures.Executor | None:
|
||||
"""::return: the executor."""
|
||||
return self._context.executor
|
||||
|
||||
@executor.setter
|
||||
def executor(self, value: futures.Executor | None) -> None: # pragma: no cover
|
||||
"""
|
||||
Change the executor.
|
||||
|
||||
:param value: the new executor or ``None``
|
||||
:type value: futures.Executor | None
|
||||
|
||||
"""
|
||||
self._context.executor = value
|
||||
|
||||
@property
|
||||
def loop(self) -> asyncio.AbstractEventLoop | None:
|
||||
"""::return: the event loop."""
|
||||
return self._context.loop
|
||||
|
||||
async def acquire( # type: ignore[override]
|
||||
self,
|
||||
timeout: float | None = None,
|
||||
poll_interval: float = 0.05,
|
||||
*,
|
||||
blocking: bool | None = None,
|
||||
) -> AsyncAcquireReturnProxy:
|
||||
"""
|
||||
Try to acquire the file lock.
|
||||
|
||||
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default
|
||||
:attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and
|
||||
this method will block until the lock could be acquired
|
||||
:param poll_interval: interval of trying to acquire the lock file
|
||||
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
|
||||
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
|
||||
:raises Timeout: if fails to acquire lock within the timeout period
|
||||
:return: a context object that will unlock the file when the context is exited
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# You can use this method in the context manager (recommended)
|
||||
with lock.acquire():
|
||||
pass
|
||||
|
||||
# Or use an equivalent try-finally construct:
|
||||
lock.acquire()
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
"""
|
||||
# Use the default timeout, if no timeout is provided.
|
||||
if timeout is None:
|
||||
timeout = self._context.timeout
|
||||
|
||||
if blocking is None:
|
||||
blocking = self._context.blocking
|
||||
|
||||
# Increment the number right at the beginning. We can still undo it, if something fails.
|
||||
self._context.lock_counter += 1
|
||||
|
||||
lock_id = id(self)
|
||||
lock_filename = self.lock_file
|
||||
start_time = time.perf_counter()
|
||||
try:
|
||||
while True:
|
||||
if not self.is_locked:
|
||||
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
|
||||
await self._run_internal_method(self._acquire)
|
||||
if self.is_locked:
|
||||
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
|
||||
break
|
||||
if blocking is False:
|
||||
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
|
||||
raise Timeout(lock_filename) # noqa: TRY301
|
||||
if 0 <= timeout < time.perf_counter() - start_time:
|
||||
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
|
||||
raise Timeout(lock_filename) # noqa: TRY301
|
||||
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
|
||||
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
|
||||
await asyncio.sleep(poll_interval)
|
||||
except BaseException: # Something did go wrong, so decrement the counter.
|
||||
self._context.lock_counter = max(0, self._context.lock_counter - 1)
|
||||
raise
|
||||
return AsyncAcquireReturnProxy(lock=self)
|
||||
|
||||
async def release(self, force: bool = False) -> None: # type: ignore[override] # noqa: FBT001, FBT002
|
||||
"""
|
||||
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
|
||||
Also note, that the lock file itself is not automatically deleted.
|
||||
|
||||
:param force: If true, the lock counter is ignored and the lock is released in every case/
|
||||
|
||||
"""
|
||||
if self.is_locked:
|
||||
self._context.lock_counter -= 1
|
||||
|
||||
if self._context.lock_counter == 0 or force:
|
||||
lock_id, lock_filename = id(self), self.lock_file
|
||||
|
||||
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
|
||||
await self._run_internal_method(self._release)
|
||||
self._context.lock_counter = 0
|
||||
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
|
||||
|
||||
async def _run_internal_method(self, method: Callable[[], Any]) -> None:
|
||||
if asyncio.iscoroutinefunction(method):
|
||||
await method()
|
||||
elif self.run_in_executor:
|
||||
loop = self.loop or asyncio.get_running_loop()
|
||||
await loop.run_in_executor(self.executor, method)
|
||||
else:
|
||||
method()
|
||||
|
||||
def __enter__(self) -> NoReturn:
|
||||
"""
|
||||
Replace old __enter__ method to avoid using it.
|
||||
|
||||
NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
|
||||
|
||||
:return: none
|
||||
:rtype: NoReturn
|
||||
"""
|
||||
msg = "Do not use `with` for asyncio locks, use `async with` instead."
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
"""
|
||||
Acquire the lock.
|
||||
|
||||
:return: the lock object
|
||||
|
||||
"""
|
||||
await self.acquire()
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
"""
|
||||
Release the lock.
|
||||
|
||||
:param exc_type: the exception type if raised
|
||||
:param exc_value: the exception value if raised
|
||||
:param traceback: the exception traceback if raised
|
||||
|
||||
"""
|
||||
await self.release()
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""Called when the lock object is deleted."""
|
||||
with contextlib.suppress(RuntimeError):
|
||||
loop = self.loop or asyncio.get_running_loop()
|
||||
if not loop.is_running(): # pragma: no cover
|
||||
loop.run_until_complete(self.release(force=True))
|
||||
else:
|
||||
loop.create_task(self.release(force=True))
|
||||
|
||||
|
||||
class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
|
||||
"""Simply watches the existence of the lock file."""
|
||||
|
||||
|
||||
class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
|
||||
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
||||
|
||||
|
||||
class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
|
||||
"""Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems."""
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AsyncAcquireReturnProxy",
|
||||
"AsyncSoftFileLock",
|
||||
"AsyncUnixFileLock",
|
||||
"AsyncWindowsFileLock",
|
||||
"BaseAsyncFileLock",
|
||||
]
|
0
site-packages/filelock/py.typed
Executable file
0
site-packages/filelock/py.typed
Executable file
16
site-packages/filelock/version.py
Executable file
16
site-packages/filelock/version.py
Executable file
@ -0,0 +1,16 @@
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
TYPE_CHECKING = False
|
||||
if TYPE_CHECKING:
|
||||
from typing import Tuple, Union
|
||||
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
||||
else:
|
||||
VERSION_TUPLE = object
|
||||
|
||||
version: str
|
||||
__version__: str
|
||||
__version_tuple__: VERSION_TUPLE
|
||||
version_tuple: VERSION_TUPLE
|
||||
|
||||
__version__ = version = '3.16.1'
|
||||
__version_tuple__ = version_tuple = (3, 16, 1)
|
1
site-packages/platformdirs-4.3.6.dist-info/INSTALLER
Executable file
1
site-packages/platformdirs-4.3.6.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
327
site-packages/platformdirs-4.3.6.dist-info/METADATA
Executable file
327
site-packages/platformdirs-4.3.6.dist-info/METADATA
Executable file
@ -0,0 +1,327 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: platformdirs
|
||||
Version: 4.3.6
|
||||
Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`.
|
||||
Project-URL: Changelog, https://github.com/tox-dev/platformdirs/releases
|
||||
Project-URL: Documentation, https://platformdirs.readthedocs.io
|
||||
Project-URL: Homepage, https://github.com/tox-dev/platformdirs
|
||||
Project-URL: Source, https://github.com/tox-dev/platformdirs
|
||||
Project-URL: Tracker, https://github.com/tox-dev/platformdirs/issues
|
||||
Maintainer-email: Bernát Gábor <gaborjbernat@gmail.com>, Julian Berman <Julian@GrayVines.com>, Ofek Lev <oss@ofek.dev>, Ronny Pfannschmidt <opensource@ronnypfannschmidt.de>
|
||||
License-Expression: MIT
|
||||
License-File: LICENSE
|
||||
Keywords: appdirs,application,cache,directory,log,user
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=3.8
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: furo>=2024.8.6; extra == 'docs'
|
||||
Requires-Dist: proselint>=0.14; extra == 'docs'
|
||||
Requires-Dist: sphinx-autodoc-typehints>=2.4; extra == 'docs'
|
||||
Requires-Dist: sphinx>=8.0.2; extra == 'docs'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: appdirs==1.4.4; extra == 'test'
|
||||
Requires-Dist: covdefaults>=2.3; extra == 'test'
|
||||
Requires-Dist: pytest-cov>=5; extra == 'test'
|
||||
Requires-Dist: pytest-mock>=3.14; extra == 'test'
|
||||
Requires-Dist: pytest>=8.3.2; extra == 'test'
|
||||
Provides-Extra: type
|
||||
Requires-Dist: mypy>=1.11.2; extra == 'type'
|
||||
Description-Content-Type: text/x-rst
|
||||
|
||||
The problem
|
||||
===========
|
||||
|
||||
.. image:: https://badge.fury.io/py/platformdirs.svg
|
||||
:target: https://badge.fury.io/py/platformdirs
|
||||
.. image:: https://img.shields.io/pypi/pyversions/platformdirs.svg
|
||||
:target: https://pypi.python.org/pypi/platformdirs/
|
||||
.. image:: https://github.com/tox-dev/platformdirs/actions/workflows/check.yaml/badge.svg
|
||||
:target: https://github.com/platformdirs/platformdirs/actions
|
||||
.. image:: https://static.pepy.tech/badge/platformdirs/month
|
||||
:target: https://pepy.tech/project/platformdirs
|
||||
|
||||
When writing desktop application, finding the right location to store user data
|
||||
and configuration varies per platform. Even for single-platform apps, there
|
||||
may by plenty of nuances in figuring out the right location.
|
||||
|
||||
For example, if running on macOS, you should use::
|
||||
|
||||
~/Library/Application Support/<AppName>
|
||||
|
||||
If on Windows (at least English Win) that should be::
|
||||
|
||||
C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
|
||||
|
||||
or possibly::
|
||||
|
||||
C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
|
||||
|
||||
for `roaming profiles <https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-vista/cc766489(v=ws.10)>`_ but that is another story.
|
||||
|
||||
On Linux (and other Unices), according to the `XDG Basedir Spec`_, it should be::
|
||||
|
||||
~/.local/share/<AppName>
|
||||
|
||||
.. _XDG Basedir Spec: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
``platformdirs`` to the rescue
|
||||
==============================
|
||||
|
||||
This kind of thing is what the ``platformdirs`` package is for.
|
||||
``platformdirs`` will help you choose an appropriate:
|
||||
|
||||
- user data dir (``user_data_dir``)
|
||||
- user config dir (``user_config_dir``)
|
||||
- user cache dir (``user_cache_dir``)
|
||||
- site data dir (``site_data_dir``)
|
||||
- site config dir (``site_config_dir``)
|
||||
- user log dir (``user_log_dir``)
|
||||
- user documents dir (``user_documents_dir``)
|
||||
- user downloads dir (``user_downloads_dir``)
|
||||
- user pictures dir (``user_pictures_dir``)
|
||||
- user videos dir (``user_videos_dir``)
|
||||
- user music dir (``user_music_dir``)
|
||||
- user desktop dir (``user_desktop_dir``)
|
||||
- user runtime dir (``user_runtime_dir``)
|
||||
|
||||
And also:
|
||||
|
||||
- Is slightly opinionated on the directory names used. Look for "OPINION" in
|
||||
documentation and code for when an opinion is being applied.
|
||||
|
||||
Example output
|
||||
==============
|
||||
|
||||
On macOS:
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from platformdirs import *
|
||||
>>> appname = "SuperApp"
|
||||
>>> appauthor = "Acme"
|
||||
>>> user_data_dir(appname, appauthor)
|
||||
'/Users/trentm/Library/Application Support/SuperApp'
|
||||
>>> site_data_dir(appname, appauthor)
|
||||
'/Library/Application Support/SuperApp'
|
||||
>>> user_cache_dir(appname, appauthor)
|
||||
'/Users/trentm/Library/Caches/SuperApp'
|
||||
>>> user_log_dir(appname, appauthor)
|
||||
'/Users/trentm/Library/Logs/SuperApp'
|
||||
>>> user_documents_dir()
|
||||
'/Users/trentm/Documents'
|
||||
>>> user_downloads_dir()
|
||||
'/Users/trentm/Downloads'
|
||||
>>> user_pictures_dir()
|
||||
'/Users/trentm/Pictures'
|
||||
>>> user_videos_dir()
|
||||
'/Users/trentm/Movies'
|
||||
>>> user_music_dir()
|
||||
'/Users/trentm/Music'
|
||||
>>> user_desktop_dir()
|
||||
'/Users/trentm/Desktop'
|
||||
>>> user_runtime_dir(appname, appauthor)
|
||||
'/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
|
||||
|
||||
On Windows:
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from platformdirs import *
|
||||
>>> appname = "SuperApp"
|
||||
>>> appauthor = "Acme"
|
||||
>>> user_data_dir(appname, appauthor)
|
||||
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
|
||||
>>> user_data_dir(appname, appauthor, roaming=True)
|
||||
'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
|
||||
>>> user_cache_dir(appname, appauthor)
|
||||
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
|
||||
>>> user_log_dir(appname, appauthor)
|
||||
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
|
||||
>>> user_documents_dir()
|
||||
'C:\\Users\\trentm\\Documents'
|
||||
>>> user_downloads_dir()
|
||||
'C:\\Users\\trentm\\Downloads'
|
||||
>>> user_pictures_dir()
|
||||
'C:\\Users\\trentm\\Pictures'
|
||||
>>> user_videos_dir()
|
||||
'C:\\Users\\trentm\\Videos'
|
||||
>>> user_music_dir()
|
||||
'C:\\Users\\trentm\\Music'
|
||||
>>> user_desktop_dir()
|
||||
'C:\\Users\\trentm\\Desktop'
|
||||
>>> user_runtime_dir(appname, appauthor)
|
||||
'C:\\Users\\trentm\\AppData\\Local\\Temp\\Acme\\SuperApp'
|
||||
|
||||
On Linux:
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from platformdirs import *
|
||||
>>> appname = "SuperApp"
|
||||
>>> appauthor = "Acme"
|
||||
>>> user_data_dir(appname, appauthor)
|
||||
'/home/trentm/.local/share/SuperApp'
|
||||
>>> site_data_dir(appname, appauthor)
|
||||
'/usr/local/share/SuperApp'
|
||||
>>> site_data_dir(appname, appauthor, multipath=True)
|
||||
'/usr/local/share/SuperApp:/usr/share/SuperApp'
|
||||
>>> user_cache_dir(appname, appauthor)
|
||||
'/home/trentm/.cache/SuperApp'
|
||||
>>> user_log_dir(appname, appauthor)
|
||||
'/home/trentm/.local/state/SuperApp/log'
|
||||
>>> user_config_dir(appname)
|
||||
'/home/trentm/.config/SuperApp'
|
||||
>>> user_documents_dir()
|
||||
'/home/trentm/Documents'
|
||||
>>> user_downloads_dir()
|
||||
'/home/trentm/Downloads'
|
||||
>>> user_pictures_dir()
|
||||
'/home/trentm/Pictures'
|
||||
>>> user_videos_dir()
|
||||
'/home/trentm/Videos'
|
||||
>>> user_music_dir()
|
||||
'/home/trentm/Music'
|
||||
>>> user_desktop_dir()
|
||||
'/home/trentm/Desktop'
|
||||
>>> user_runtime_dir(appname, appauthor)
|
||||
'/run/user/{os.getuid()}/SuperApp'
|
||||
>>> site_config_dir(appname)
|
||||
'/etc/xdg/SuperApp'
|
||||
>>> os.environ["XDG_CONFIG_DIRS"] = "/etc:/usr/local/etc"
|
||||
>>> site_config_dir(appname, multipath=True)
|
||||
'/etc/SuperApp:/usr/local/etc/SuperApp'
|
||||
|
||||
On Android::
|
||||
|
||||
>>> from platformdirs import *
|
||||
>>> appname = "SuperApp"
|
||||
>>> appauthor = "Acme"
|
||||
>>> user_data_dir(appname, appauthor)
|
||||
'/data/data/com.myApp/files/SuperApp'
|
||||
>>> user_cache_dir(appname, appauthor)
|
||||
'/data/data/com.myApp/cache/SuperApp'
|
||||
>>> user_log_dir(appname, appauthor)
|
||||
'/data/data/com.myApp/cache/SuperApp/log'
|
||||
>>> user_config_dir(appname)
|
||||
'/data/data/com.myApp/shared_prefs/SuperApp'
|
||||
>>> user_documents_dir()
|
||||
'/storage/emulated/0/Documents'
|
||||
>>> user_downloads_dir()
|
||||
'/storage/emulated/0/Downloads'
|
||||
>>> user_pictures_dir()
|
||||
'/storage/emulated/0/Pictures'
|
||||
>>> user_videos_dir()
|
||||
'/storage/emulated/0/DCIM/Camera'
|
||||
>>> user_music_dir()
|
||||
'/storage/emulated/0/Music'
|
||||
>>> user_desktop_dir()
|
||||
'/storage/emulated/0/Desktop'
|
||||
>>> user_runtime_dir(appname, appauthor)
|
||||
'/data/data/com.myApp/cache/SuperApp/tmp'
|
||||
|
||||
Note: Some android apps like Termux and Pydroid are used as shells. These
|
||||
apps are used by the end user to emulate Linux environment. Presence of
|
||||
``SHELL`` environment variable is used by Platformdirs to differentiate
|
||||
between general android apps and android apps used as shells. Shell android
|
||||
apps also support ``XDG_*`` environment variables.
|
||||
|
||||
|
||||
``PlatformDirs`` for convenience
|
||||
================================
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from platformdirs import PlatformDirs
|
||||
>>> dirs = PlatformDirs("SuperApp", "Acme")
|
||||
>>> dirs.user_data_dir
|
||||
'/Users/trentm/Library/Application Support/SuperApp'
|
||||
>>> dirs.site_data_dir
|
||||
'/Library/Application Support/SuperApp'
|
||||
>>> dirs.user_cache_dir
|
||||
'/Users/trentm/Library/Caches/SuperApp'
|
||||
>>> dirs.user_log_dir
|
||||
'/Users/trentm/Library/Logs/SuperApp'
|
||||
>>> dirs.user_documents_dir
|
||||
'/Users/trentm/Documents'
|
||||
>>> dirs.user_downloads_dir
|
||||
'/Users/trentm/Downloads'
|
||||
>>> dirs.user_pictures_dir
|
||||
'/Users/trentm/Pictures'
|
||||
>>> dirs.user_videos_dir
|
||||
'/Users/trentm/Movies'
|
||||
>>> dirs.user_music_dir
|
||||
'/Users/trentm/Music'
|
||||
>>> dirs.user_desktop_dir
|
||||
'/Users/trentm/Desktop'
|
||||
>>> dirs.user_runtime_dir
|
||||
'/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
|
||||
|
||||
Per-version isolation
|
||||
=====================
|
||||
|
||||
If you have multiple versions of your app in use that you want to be
|
||||
able to run side-by-side, then you may want version-isolation for these
|
||||
dirs::
|
||||
|
||||
>>> from platformdirs import PlatformDirs
|
||||
>>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0")
|
||||
>>> dirs.user_data_dir
|
||||
'/Users/trentm/Library/Application Support/SuperApp/1.0'
|
||||
>>> dirs.site_data_dir
|
||||
'/Library/Application Support/SuperApp/1.0'
|
||||
>>> dirs.user_cache_dir
|
||||
'/Users/trentm/Library/Caches/SuperApp/1.0'
|
||||
>>> dirs.user_log_dir
|
||||
'/Users/trentm/Library/Logs/SuperApp/1.0'
|
||||
>>> dirs.user_documents_dir
|
||||
'/Users/trentm/Documents'
|
||||
>>> dirs.user_downloads_dir
|
||||
'/Users/trentm/Downloads'
|
||||
>>> dirs.user_pictures_dir
|
||||
'/Users/trentm/Pictures'
|
||||
>>> dirs.user_videos_dir
|
||||
'/Users/trentm/Movies'
|
||||
>>> dirs.user_music_dir
|
||||
'/Users/trentm/Music'
|
||||
>>> dirs.user_desktop_dir
|
||||
'/Users/trentm/Desktop'
|
||||
>>> dirs.user_runtime_dir
|
||||
'/Users/trentm/Library/Caches/TemporaryItems/SuperApp/1.0'
|
||||
|
||||
Be wary of using this for configuration files though; you'll need to handle
|
||||
migrating configuration files manually.
|
||||
|
||||
Why this Fork?
|
||||
==============
|
||||
|
||||
This repository is a friendly fork of the wonderful work started by
|
||||
`ActiveState <https://github.com/ActiveState/appdirs>`_ who created
|
||||
``appdirs``, this package's ancestor.
|
||||
|
||||
Maintaining an open source project is no easy task, particularly
|
||||
from within an organization, and the Python community is indebted
|
||||
to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for
|
||||
creating an incredibly useful simple module, as evidenced by the wide
|
||||
number of users it has attracted over the years.
|
||||
|
||||
Nonetheless, given the number of long-standing open issues
|
||||
and pull requests, and no clear path towards `ensuring
|
||||
that maintenance of the package would continue or grow
|
||||
<https://github.com/ActiveState/appdirs/issues/79>`_, this fork was
|
||||
created.
|
||||
|
||||
Contributions are most welcome.
|
22
site-packages/platformdirs-4.3.6.dist-info/RECORD
Executable file
22
site-packages/platformdirs-4.3.6.dist-info/RECORD
Executable file
@ -0,0 +1,22 @@
|
||||
platformdirs-4.3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
platformdirs-4.3.6.dist-info/METADATA,sha256=085GgRFo5U1nc9NR8e6unEWKxUjDMsgSHDyaCETsCQ4,11868
|
||||
platformdirs-4.3.6.dist-info/RECORD,,
|
||||
platformdirs-4.3.6.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
||||
platformdirs-4.3.6.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
|
||||
platformdirs/__init__.py,sha256=mVCfMmBM4q24lq6336V3VJncdxaOegI4qQSmQCjkR5E,22284
|
||||
platformdirs/__main__.py,sha256=HnsUQHpiBaiTxwcmwVw-nFaPdVNZtQIdi1eWDtI-MzI,1493
|
||||
platformdirs/__pycache__/__init__.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/__main__.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/android.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/api.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/macos.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/unix.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/version.cpython-311.pyc,,
|
||||
platformdirs/__pycache__/windows.cpython-311.pyc,,
|
||||
platformdirs/android.py,sha256=kV5oL3V3DZ6WZKu9yFiQupv18yp_jlSV2ChH1TmPcds,9007
|
||||
platformdirs/api.py,sha256=2dfUDNbEXeDhDKarqtR5NY7oUikUZ4RZhs3ozstmhBQ,9246
|
||||
platformdirs/macos.py,sha256=UlbyFZ8Rzu3xndCqQEHrfsYTeHwYdFap1Ioz-yxveT4,6154
|
||||
platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
platformdirs/unix.py,sha256=uRPJWRyQEtv7yOSvU94rUmsblo5XKDLA1SzFg55kbK0,10393
|
||||
platformdirs/version.py,sha256=oH4KgTfK4AklbTYVcV_yynvJ9JLI3pyvDVay0hRsLCs,411
|
||||
platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
|
4
site-packages/platformdirs-4.3.6.dist-info/WHEEL
Executable file
4
site-packages/platformdirs-4.3.6.dist-info/WHEEL
Executable file
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.25.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
21
site-packages/platformdirs-4.3.6.dist-info/licenses/LICENSE
Executable file
21
site-packages/platformdirs-4.3.6.dist-info/licenses/LICENSE
Executable file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2010-202x The platformdirs developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
631
site-packages/platformdirs/__init__.py
Executable file
631
site-packages/platformdirs/__init__.py
Executable file
@ -0,0 +1,631 @@
|
||||
"""
|
||||
Utilities for determining application-specific dirs.
|
||||
|
||||
See <https://github.com/platformdirs/platformdirs> for details and usage.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
from .version import __version__
|
||||
from .version import __version_tuple__ as __version_info__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
if sys.platform == "win32":
|
||||
from platformdirs.windows import Windows as _Result
|
||||
elif sys.platform == "darwin":
|
||||
from platformdirs.macos import MacOS as _Result
|
||||
else:
|
||||
from platformdirs.unix import Unix as _Result
|
||||
|
||||
|
||||
def _set_platform_dir_class() -> type[PlatformDirsABC]:
|
||||
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
|
||||
if os.getenv("SHELL") or os.getenv("PREFIX"):
|
||||
return _Result
|
||||
|
||||
from platformdirs.android import _android_folder # noqa: PLC0415
|
||||
|
||||
if _android_folder() is not None:
|
||||
from platformdirs.android import Android # noqa: PLC0415
|
||||
|
||||
return Android # return to avoid redefinition of a result
|
||||
|
||||
return _Result
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Work around mypy issue: https://github.com/python/mypy/issues/10962
|
||||
PlatformDirs = _Result
|
||||
else:
|
||||
PlatformDirs = _set_platform_dir_class() #: Currently active platform
|
||||
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
|
||||
|
||||
|
||||
def user_data_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: data directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_data_dir
|
||||
|
||||
|
||||
def site_data_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: data directory shared by users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
multipath=multipath,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_data_dir
|
||||
|
||||
|
||||
def user_config_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: config directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_config_dir
|
||||
|
||||
|
||||
def site_config_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: config directory shared by the users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
multipath=multipath,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_config_dir
|
||||
|
||||
|
||||
def user_cache_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: cache directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_cache_dir
|
||||
|
||||
|
||||
def site_cache_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: cache directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_cache_dir
|
||||
|
||||
|
||||
def user_state_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: state directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_state_dir
|
||||
|
||||
|
||||
def user_log_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: log directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_log_dir
|
||||
|
||||
|
||||
def user_documents_dir() -> str:
|
||||
""":returns: documents directory tied to the user"""
|
||||
return PlatformDirs().user_documents_dir
|
||||
|
||||
|
||||
def user_downloads_dir() -> str:
|
||||
""":returns: downloads directory tied to the user"""
|
||||
return PlatformDirs().user_downloads_dir
|
||||
|
||||
|
||||
def user_pictures_dir() -> str:
|
||||
""":returns: pictures directory tied to the user"""
|
||||
return PlatformDirs().user_pictures_dir
|
||||
|
||||
|
||||
def user_videos_dir() -> str:
|
||||
""":returns: videos directory tied to the user"""
|
||||
return PlatformDirs().user_videos_dir
|
||||
|
||||
|
||||
def user_music_dir() -> str:
|
||||
""":returns: music directory tied to the user"""
|
||||
return PlatformDirs().user_music_dir
|
||||
|
||||
|
||||
def user_desktop_dir() -> str:
|
||||
""":returns: desktop directory tied to the user"""
|
||||
return PlatformDirs().user_desktop_dir
|
||||
|
||||
|
||||
def user_runtime_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: runtime directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_runtime_dir
|
||||
|
||||
|
||||
def site_runtime_dir(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> str:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: runtime directory shared by users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_runtime_dir
|
||||
|
||||
|
||||
def user_data_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: data path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_data_path
|
||||
|
||||
|
||||
def site_data_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: data path shared by users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
multipath=multipath,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_data_path
|
||||
|
||||
|
||||
def user_config_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: config path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_config_path
|
||||
|
||||
|
||||
def site_config_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
multipath: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: config path shared by the users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
multipath=multipath,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_config_path
|
||||
|
||||
|
||||
def site_cache_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: cache directory tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_cache_path
|
||||
|
||||
|
||||
def user_cache_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: cache path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_cache_path
|
||||
|
||||
|
||||
def user_state_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: state path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
roaming=roaming,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_state_path
|
||||
|
||||
|
||||
def user_log_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: log path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_log_path
|
||||
|
||||
|
||||
def user_documents_path() -> Path:
|
||||
""":returns: documents a path tied to the user"""
|
||||
return PlatformDirs().user_documents_path
|
||||
|
||||
|
||||
def user_downloads_path() -> Path:
|
||||
""":returns: downloads path tied to the user"""
|
||||
return PlatformDirs().user_downloads_path
|
||||
|
||||
|
||||
def user_pictures_path() -> Path:
|
||||
""":returns: pictures path tied to the user"""
|
||||
return PlatformDirs().user_pictures_path
|
||||
|
||||
|
||||
def user_videos_path() -> Path:
|
||||
""":returns: videos path tied to the user"""
|
||||
return PlatformDirs().user_videos_path
|
||||
|
||||
|
||||
def user_music_path() -> Path:
|
||||
""":returns: music path tied to the user"""
|
||||
return PlatformDirs().user_music_path
|
||||
|
||||
|
||||
def user_desktop_path() -> Path:
|
||||
""":returns: desktop path tied to the user"""
|
||||
return PlatformDirs().user_desktop_path
|
||||
|
||||
|
||||
def user_runtime_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: runtime path tied to the user
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).user_runtime_path
|
||||
|
||||
|
||||
def site_runtime_path(
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> Path:
|
||||
"""
|
||||
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
||||
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
||||
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
||||
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
||||
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
:returns: runtime path shared by users
|
||||
"""
|
||||
return PlatformDirs(
|
||||
appname=appname,
|
||||
appauthor=appauthor,
|
||||
version=version,
|
||||
opinion=opinion,
|
||||
ensure_exists=ensure_exists,
|
||||
).site_runtime_path
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AppDirs",
|
||||
"PlatformDirs",
|
||||
"PlatformDirsABC",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"site_cache_dir",
|
||||
"site_cache_path",
|
||||
"site_config_dir",
|
||||
"site_config_path",
|
||||
"site_data_dir",
|
||||
"site_data_path",
|
||||
"site_runtime_dir",
|
||||
"site_runtime_path",
|
||||
"user_cache_dir",
|
||||
"user_cache_path",
|
||||
"user_config_dir",
|
||||
"user_config_path",
|
||||
"user_data_dir",
|
||||
"user_data_path",
|
||||
"user_desktop_dir",
|
||||
"user_desktop_path",
|
||||
"user_documents_dir",
|
||||
"user_documents_path",
|
||||
"user_downloads_dir",
|
||||
"user_downloads_path",
|
||||
"user_log_dir",
|
||||
"user_log_path",
|
||||
"user_music_dir",
|
||||
"user_music_path",
|
||||
"user_pictures_dir",
|
||||
"user_pictures_path",
|
||||
"user_runtime_dir",
|
||||
"user_runtime_path",
|
||||
"user_state_dir",
|
||||
"user_state_path",
|
||||
"user_videos_dir",
|
||||
"user_videos_path",
|
||||
]
|
55
site-packages/platformdirs/__main__.py
Executable file
55
site-packages/platformdirs/__main__.py
Executable file
@ -0,0 +1,55 @@
|
||||
"""Main entry point."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from platformdirs import PlatformDirs, __version__
|
||||
|
||||
PROPS = (
|
||||
"user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"user_documents_dir",
|
||||
"user_downloads_dir",
|
||||
"user_pictures_dir",
|
||||
"user_videos_dir",
|
||||
"user_music_dir",
|
||||
"user_runtime_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir",
|
||||
"site_cache_dir",
|
||||
"site_runtime_dir",
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the main entry point."""
|
||||
app_name = "MyApp"
|
||||
app_author = "MyCompany"
|
||||
|
||||
print(f"-- platformdirs {__version__} --") # noqa: T201
|
||||
|
||||
print("-- app dirs (with optional 'version')") # noqa: T201
|
||||
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
||||
|
||||
print("\n-- app dirs (without optional 'version')") # noqa: T201
|
||||
dirs = PlatformDirs(app_name, app_author)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
||||
|
||||
print("\n-- app dirs (without optional 'appauthor')") # noqa: T201
|
||||
dirs = PlatformDirs(app_name)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
||||
|
||||
print("\n-- app dirs (with disabled 'appauthor')") # noqa: T201
|
||||
dirs = PlatformDirs(app_name, appauthor=False)
|
||||
for prop in PROPS:
|
||||
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
249
site-packages/platformdirs/android.py
Executable file
249
site-packages/platformdirs/android.py
Executable file
@ -0,0 +1,249 @@
|
||||
"""Android."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
|
||||
class Android(PlatformDirsABC):
|
||||
"""
|
||||
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_.
|
||||
|
||||
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>`, `version
|
||||
<platformdirs.api.PlatformDirsABC.version>`, `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "files")
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directory tied to the user, e.g. \
|
||||
``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
|
||||
"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, same as `user_config_dir`"""
|
||||
return self.user_config_dir
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user, e.g.,``/data/user/<userid>/<packagename>/cache/<AppName>``"""
|
||||
return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
|
||||
|
||||
@property
|
||||
def site_cache_dir(self) -> str:
|
||||
""":return: cache directory shared by users, same as `user_cache_dir`"""
|
||||
return self.user_cache_dir
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
"""
|
||||
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
|
||||
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
|
||||
"""
|
||||
path = self.user_cache_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "log") # noqa: PTH118
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
|
||||
return _android_documents_folder()
|
||||
|
||||
@property
|
||||
def user_downloads_dir(self) -> str:
|
||||
""":return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
|
||||
return _android_downloads_folder()
|
||||
|
||||
@property
|
||||
def user_pictures_dir(self) -> str:
|
||||
""":return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
|
||||
return _android_pictures_folder()
|
||||
|
||||
@property
|
||||
def user_videos_dir(self) -> str:
|
||||
""":return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
|
||||
return _android_videos_folder()
|
||||
|
||||
@property
|
||||
def user_music_dir(self) -> str:
|
||||
""":return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
|
||||
return _android_music_folder()
|
||||
|
||||
@property
|
||||
def user_desktop_dir(self) -> str:
|
||||
""":return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
|
||||
return "/storage/emulated/0/Desktop"
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
|
||||
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
|
||||
"""
|
||||
path = self.user_cache_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "tmp") # noqa: PTH118
|
||||
return path
|
||||
|
||||
@property
|
||||
def site_runtime_dir(self) -> str:
|
||||
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
|
||||
return self.user_runtime_dir
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_folder() -> str | None: # noqa: C901
|
||||
""":return: base folder for the Android OS or None if it cannot be found"""
|
||||
result: str | None = None
|
||||
# type checker isn't happy with our "import android", just don't do this when type checking see
|
||||
# https://stackoverflow.com/a/61394121
|
||||
if not TYPE_CHECKING:
|
||||
try:
|
||||
# First try to get a path to android app using python4android (if available)...
|
||||
from android import mActivity # noqa: PLC0415
|
||||
|
||||
context = cast("android.content.Context", mActivity.getApplicationContext()) # noqa: F821
|
||||
result = context.getFilesDir().getParentFile().getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
result = None
|
||||
if result is None:
|
||||
try:
|
||||
# ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
|
||||
# result...
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
result = context.getFilesDir().getParentFile().getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
result = None
|
||||
if result is None:
|
||||
# and if that fails, too, find an android folder looking at path on the sys.path
|
||||
# warning: only works for apps installed under /data, not adopted storage etc.
|
||||
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
|
||||
for path in sys.path:
|
||||
if pattern.match(path):
|
||||
result = path.split("/files")[0]
|
||||
break
|
||||
else:
|
||||
result = None
|
||||
if result is None:
|
||||
# one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
|
||||
# account
|
||||
pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
|
||||
for path in sys.path:
|
||||
if pattern.match(path):
|
||||
result = path.split("/files")[0]
|
||||
break
|
||||
else:
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_documents_folder() -> str:
|
||||
""":return: documents folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
environment = autoclass("android.os.Environment")
|
||||
documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
documents_dir = "/storage/emulated/0/Documents"
|
||||
|
||||
return documents_dir
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_downloads_folder() -> str:
|
||||
""":return: downloads folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
environment = autoclass("android.os.Environment")
|
||||
downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
downloads_dir = "/storage/emulated/0/Downloads"
|
||||
|
||||
return downloads_dir
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_pictures_folder() -> str:
|
||||
""":return: pictures folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
environment = autoclass("android.os.Environment")
|
||||
pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
pictures_dir = "/storage/emulated/0/Pictures"
|
||||
|
||||
return pictures_dir
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_videos_folder() -> str:
|
||||
""":return: videos folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
environment = autoclass("android.os.Environment")
|
||||
videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
videos_dir = "/storage/emulated/0/DCIM/Camera"
|
||||
|
||||
return videos_dir
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _android_music_folder() -> str:
|
||||
""":return: music folder for the Android OS"""
|
||||
# Get directories with pyjnius
|
||||
try:
|
||||
from jnius import autoclass # noqa: PLC0415
|
||||
|
||||
context = autoclass("android.content.Context")
|
||||
environment = autoclass("android.os.Environment")
|
||||
music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
|
||||
except Exception: # noqa: BLE001
|
||||
music_dir = "/storage/emulated/0/Music"
|
||||
|
||||
return music_dir
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Android",
|
||||
]
|
298
site-packages/platformdirs/api.py
Executable file
298
site-packages/platformdirs/api.py
Executable file
@ -0,0 +1,298 @@
|
||||
"""Base API."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterator, Literal
|
||||
|
||||
|
||||
class PlatformDirsABC(ABC): # noqa: PLR0904
|
||||
"""Abstract base class for platform directories."""
|
||||
|
||||
def __init__( # noqa: PLR0913, PLR0917
|
||||
self,
|
||||
appname: str | None = None,
|
||||
appauthor: str | None | Literal[False] = None,
|
||||
version: str | None = None,
|
||||
roaming: bool = False, # noqa: FBT001, FBT002
|
||||
multipath: bool = False, # noqa: FBT001, FBT002
|
||||
opinion: bool = True, # noqa: FBT001, FBT002
|
||||
ensure_exists: bool = False, # noqa: FBT001, FBT002
|
||||
) -> None:
|
||||
"""
|
||||
Create a new platform directory.
|
||||
|
||||
:param appname: See `appname`.
|
||||
:param appauthor: See `appauthor`.
|
||||
:param version: See `version`.
|
||||
:param roaming: See `roaming`.
|
||||
:param multipath: See `multipath`.
|
||||
:param opinion: See `opinion`.
|
||||
:param ensure_exists: See `ensure_exists`.
|
||||
|
||||
"""
|
||||
self.appname = appname #: The name of application.
|
||||
self.appauthor = appauthor
|
||||
"""
|
||||
The name of the app author or distributing body for this application.
|
||||
|
||||
Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it.
|
||||
|
||||
"""
|
||||
self.version = version
|
||||
"""
|
||||
An optional version path element to append to the path.
|
||||
|
||||
You might want to use this if you want multiple versions of your app to be able to run independently. If used,
|
||||
this would typically be ``<major>.<minor>``.
|
||||
|
||||
"""
|
||||
self.roaming = roaming
|
||||
"""
|
||||
Whether to use the roaming appdata directory on Windows.
|
||||
|
||||
That means that for users on a Windows network setup for roaming profiles, this user data will be synced on
|
||||
login (see
|
||||
`here <https://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
|
||||
|
||||
"""
|
||||
self.multipath = multipath
|
||||
"""
|
||||
An optional parameter which indicates that the entire list of data dirs should be returned.
|
||||
|
||||
By default, the first item would only be returned.
|
||||
|
||||
"""
|
||||
self.opinion = opinion #: A flag to indicating to use opinionated values.
|
||||
self.ensure_exists = ensure_exists
|
||||
"""
|
||||
Optionally create the directory (and any missing parents) upon access if it does not exist.
|
||||
|
||||
By default, no directories are created.
|
||||
|
||||
"""
|
||||
|
||||
def _append_app_name_and_version(self, *base: str) -> str:
|
||||
params = list(base[1:])
|
||||
if self.appname:
|
||||
params.append(self.appname)
|
||||
if self.version:
|
||||
params.append(self.version)
|
||||
path = os.path.join(base[0], *params) # noqa: PTH118
|
||||
self._optionally_create_directory(path)
|
||||
return path
|
||||
|
||||
def _optionally_create_directory(self, path: str) -> None:
|
||||
if self.ensure_exists:
|
||||
Path(path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
|
||||
if self.multipath:
|
||||
# If multipath is True, the first path is returned.
|
||||
directory = directory.split(os.pathsep)[0]
|
||||
return Path(directory)
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_cache_dir(self) -> str:
|
||||
""":return: cache directory shared by users"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_downloads_dir(self) -> str:
|
||||
""":return: downloads directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_pictures_dir(self) -> str:
|
||||
""":return: pictures directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_videos_dir(self) -> str:
|
||||
""":return: videos directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_music_dir(self) -> str:
|
||||
""":return: music directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_desktop_dir(self) -> str:
|
||||
""":return: desktop directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_runtime_dir(self) -> str:
|
||||
""":return: runtime directory tied to the user"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def site_runtime_dir(self) -> str:
|
||||
""":return: runtime directory shared by users"""
|
||||
|
||||
@property
|
||||
def user_data_path(self) -> Path:
|
||||
""":return: data path tied to the user"""
|
||||
return Path(self.user_data_dir)
|
||||
|
||||
@property
|
||||
def site_data_path(self) -> Path:
|
||||
""":return: data path shared by users"""
|
||||
return Path(self.site_data_dir)
|
||||
|
||||
@property
|
||||
def user_config_path(self) -> Path:
|
||||
""":return: config path tied to the user"""
|
||||
return Path(self.user_config_dir)
|
||||
|
||||
@property
|
||||
def site_config_path(self) -> Path:
|
||||
""":return: config path shared by the users"""
|
||||
return Path(self.site_config_dir)
|
||||
|
||||
@property
|
||||
def user_cache_path(self) -> Path:
|
||||
""":return: cache path tied to the user"""
|
||||
return Path(self.user_cache_dir)
|
||||
|
||||
@property
|
||||
def site_cache_path(self) -> Path:
|
||||
""":return: cache path shared by users"""
|
||||
return Path(self.site_cache_dir)
|
||||
|
||||
@property
|
||||
def user_state_path(self) -> Path:
|
||||
""":return: state path tied to the user"""
|
||||
return Path(self.user_state_dir)
|
||||
|
||||
@property
|
||||
def user_log_path(self) -> Path:
|
||||
""":return: log path tied to the user"""
|
||||
return Path(self.user_log_dir)
|
||||
|
||||
@property
|
||||
def user_documents_path(self) -> Path:
|
||||
""":return: documents a path tied to the user"""
|
||||
return Path(self.user_documents_dir)
|
||||
|
||||
@property
|
||||
def user_downloads_path(self) -> Path:
|
||||
""":return: downloads path tied to the user"""
|
||||
return Path(self.user_downloads_dir)
|
||||
|
||||
@property
|
||||
def user_pictures_path(self) -> Path:
|
||||
""":return: pictures path tied to the user"""
|
||||
return Path(self.user_pictures_dir)
|
||||
|
||||
@property
|
||||
def user_videos_path(self) -> Path:
|
||||
""":return: videos path tied to the user"""
|
||||
return Path(self.user_videos_dir)
|
||||
|
||||
@property
|
||||
def user_music_path(self) -> Path:
|
||||
""":return: music path tied to the user"""
|
||||
return Path(self.user_music_dir)
|
||||
|
||||
@property
|
||||
def user_desktop_path(self) -> Path:
|
||||
""":return: desktop path tied to the user"""
|
||||
return Path(self.user_desktop_dir)
|
||||
|
||||
@property
|
||||
def user_runtime_path(self) -> Path:
|
||||
""":return: runtime path tied to the user"""
|
||||
return Path(self.user_runtime_dir)
|
||||
|
||||
@property
|
||||
def site_runtime_path(self) -> Path:
|
||||
""":return: runtime path shared by users"""
|
||||
return Path(self.site_runtime_dir)
|
||||
|
||||
def iter_config_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site configuration directories."""
|
||||
yield self.user_config_dir
|
||||
yield self.site_config_dir
|
||||
|
||||
def iter_data_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site data directories."""
|
||||
yield self.user_data_dir
|
||||
yield self.site_data_dir
|
||||
|
||||
def iter_cache_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site cache directories."""
|
||||
yield self.user_cache_dir
|
||||
yield self.site_cache_dir
|
||||
|
||||
def iter_runtime_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site runtime directories."""
|
||||
yield self.user_runtime_dir
|
||||
yield self.site_runtime_dir
|
||||
|
||||
def iter_config_paths(self) -> Iterator[Path]:
|
||||
""":yield: all user and site configuration paths."""
|
||||
for path in self.iter_config_dirs():
|
||||
yield Path(path)
|
||||
|
||||
def iter_data_paths(self) -> Iterator[Path]:
|
||||
""":yield: all user and site data paths."""
|
||||
for path in self.iter_data_dirs():
|
||||
yield Path(path)
|
||||
|
||||
def iter_cache_paths(self) -> Iterator[Path]:
|
||||
""":yield: all user and site cache paths."""
|
||||
for path in self.iter_cache_dirs():
|
||||
yield Path(path)
|
||||
|
||||
def iter_runtime_paths(self) -> Iterator[Path]:
|
||||
""":yield: all user and site runtime paths."""
|
||||
for path in self.iter_runtime_dirs():
|
||||
yield Path(path)
|
144
site-packages/platformdirs/macos.py
Executable file
144
site-packages/platformdirs/macos.py
Executable file
@ -0,0 +1,144 @@
|
||||
"""macOS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class MacOS(PlatformDirsABC):
|
||||
"""
|
||||
Platform directories for the macOS operating system.
|
||||
|
||||
Follows the guidance from
|
||||
`Apple documentation <https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
|
||||
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>`,
|
||||
`version <platformdirs.api.PlatformDirsABC.version>`,
|
||||
`ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) # noqa: PTH111
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
|
||||
If we're using a Python binary managed by `Homebrew <https://brew.sh>`_, the directory
|
||||
will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``.
|
||||
If `multipath <platformdirs.api.PlatformDirsABC.multipath>` is enabled, and we're in Homebrew,
|
||||
the response is a multi-path string separated by ":", e.g.
|
||||
``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version``
|
||||
"""
|
||||
is_homebrew = sys.prefix.startswith("/opt/homebrew")
|
||||
path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else []
|
||||
path_list.append(self._append_app_name_and_version("/Library/Application Support"))
|
||||
if self.multipath:
|
||||
return os.pathsep.join(path_list)
|
||||
return path_list[0]
|
||||
|
||||
@property
|
||||
def site_data_path(self) -> Path:
|
||||
""":return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, same as `site_data_dir`"""
|
||||
return self.site_data_dir
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) # noqa: PTH111
|
||||
|
||||
@property
|
||||
def site_cache_dir(self) -> str:
|
||||
"""
|
||||
:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
|
||||
If we're using a Python binary managed by `Homebrew <https://brew.sh>`_, the directory
|
||||
will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``.
|
||||
If `multipath <platformdirs.api.PlatformDirsABC.multipath>` is enabled, and we're in Homebrew,
|
||||
the response is a multi-path string separated by ":", e.g.
|
||||
``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version``
|
||||
"""
|
||||
is_homebrew = sys.prefix.startswith("/opt/homebrew")
|
||||
path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else []
|
||||
path_list.append(self._append_app_name_and_version("/Library/Caches"))
|
||||
if self.multipath:
|
||||
return os.pathsep.join(path_list)
|
||||
return path_list[0]
|
||||
|
||||
@property
|
||||
def site_cache_path(self) -> Path:
|
||||
""":return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_cache_dir)
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
||||
return os.path.expanduser("~/Documents") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_downloads_dir(self) -> str:
|
||||
""":return: downloads directory tied to the user, e.g. ``~/Downloads``"""
|
||||
return os.path.expanduser("~/Downloads") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_pictures_dir(self) -> str:
|
||||
""":return: pictures directory tied to the user, e.g. ``~/Pictures``"""
|
||||
return os.path.expanduser("~/Pictures") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_videos_dir(self) -> str:
|
||||
""":return: videos directory tied to the user, e.g. ``~/Movies``"""
|
||||
return os.path.expanduser("~/Movies") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_music_dir(self) -> str:
|
||||
""":return: music directory tied to the user, e.g. ``~/Music``"""
|
||||
return os.path.expanduser("~/Music") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_desktop_dir(self) -> str:
|
||||
""":return: desktop directory tied to the user, e.g. ``~/Desktop``"""
|
||||
return os.path.expanduser("~/Desktop") # noqa: PTH111
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
|
||||
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) # noqa: PTH111
|
||||
|
||||
@property
|
||||
def site_runtime_dir(self) -> str:
|
||||
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
|
||||
return self.user_runtime_dir
|
||||
|
||||
|
||||
__all__ = [
|
||||
"MacOS",
|
||||
]
|
0
site-packages/platformdirs/py.typed
Executable file
0
site-packages/platformdirs/py.typed
Executable file
269
site-packages/platformdirs/unix.py
Executable file
269
site-packages/platformdirs/unix.py
Executable file
@ -0,0 +1,269 @@
|
||||
"""Unix."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
from typing import Iterator, NoReturn
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
if sys.platform == "win32":
|
||||
|
||||
def getuid() -> NoReturn:
|
||||
msg = "should only be used on Unix"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
else:
|
||||
from os import getuid
|
||||
|
||||
|
||||
class Unix(PlatformDirsABC): # noqa: PLR0904
|
||||
"""
|
||||
On Unix/Linux, we follow the `XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-
|
||||
latest.html>`_.
|
||||
|
||||
The spec allows overriding directories with environment variables. The examples shown are the default values,
|
||||
alongside the name of the environment variable that overrides them. Makes use of the `appname
|
||||
<platformdirs.api.PlatformDirsABC.appname>`, `version <platformdirs.api.PlatformDirsABC.version>`, `multipath
|
||||
<platformdirs.api.PlatformDirsABC.multipath>`, `opinion <platformdirs.api.PlatformDirsABC.opinion>`, `ensure_exists
|
||||
<platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
|
||||
``$XDG_DATA_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_DATA_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.local/share") # noqa: PTH111
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def _site_data_dirs(self) -> list[str]:
|
||||
path = os.environ.get("XDG_DATA_DIRS", "")
|
||||
if not path.strip():
|
||||
path = f"/usr/local/share{os.pathsep}/usr/share"
|
||||
return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
|
||||
enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
|
||||
OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
|
||||
"""
|
||||
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
|
||||
dirs = self._site_data_dirs
|
||||
if not self.multipath:
|
||||
return dirs[0]
|
||||
return os.pathsep.join(dirs)
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
|
||||
``$XDG_CONFIG_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_CONFIG_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.config") # noqa: PTH111
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def _site_config_dirs(self) -> list[str]:
|
||||
path = os.environ.get("XDG_CONFIG_DIRS", "")
|
||||
if not path.strip():
|
||||
path = "/etc/xdg"
|
||||
return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
"""
|
||||
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
|
||||
is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
|
||||
the OS path separator), e.g. ``/etc/xdg/$appname/$version``
|
||||
"""
|
||||
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
|
||||
dirs = self._site_config_dirs
|
||||
if not self.multipath:
|
||||
return dirs[0]
|
||||
return os.pathsep.join(dirs)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
"""
|
||||
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
|
||||
``~/$XDG_CACHE_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_CACHE_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.cache") # noqa: PTH111
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_cache_dir(self) -> str:
|
||||
""":return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
|
||||
return self._append_app_name_and_version("/var/cache")
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
"""
|
||||
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
|
||||
``$XDG_STATE_HOME/$appname/$version``
|
||||
"""
|
||||
path = os.environ.get("XDG_STATE_HOME", "")
|
||||
if not path.strip():
|
||||
path = os.path.expanduser("~/.local/state") # noqa: PTH111
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
|
||||
path = self.user_state_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "log") # noqa: PTH118
|
||||
self._optionally_create_directory(path)
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
||||
return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
|
||||
|
||||
@property
|
||||
def user_downloads_dir(self) -> str:
|
||||
""":return: downloads directory tied to the user, e.g. ``~/Downloads``"""
|
||||
return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
|
||||
|
||||
@property
|
||||
def user_pictures_dir(self) -> str:
|
||||
""":return: pictures directory tied to the user, e.g. ``~/Pictures``"""
|
||||
return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
|
||||
|
||||
@property
|
||||
def user_videos_dir(self) -> str:
|
||||
""":return: videos directory tied to the user, e.g. ``~/Videos``"""
|
||||
return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
|
||||
|
||||
@property
|
||||
def user_music_dir(self) -> str:
|
||||
""":return: music directory tied to the user, e.g. ``~/Music``"""
|
||||
return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
|
||||
|
||||
@property
|
||||
def user_desktop_dir(self) -> str:
|
||||
""":return: desktop directory tied to the user, e.g. ``~/Desktop``"""
|
||||
return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
|
||||
``$XDG_RUNTIME_DIR/$appname/$version``.
|
||||
|
||||
For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
|
||||
exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
|
||||
is not set.
|
||||
"""
|
||||
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
||||
if not path.strip():
|
||||
if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
|
||||
path = f"/var/run/user/{getuid()}"
|
||||
if not Path(path).exists():
|
||||
path = f"/tmp/runtime-{getuid()}" # noqa: S108
|
||||
else:
|
||||
path = f"/run/user/{getuid()}"
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
|
||||
``$XDG_RUNTIME_DIR/$appname/$version``.
|
||||
|
||||
Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
|
||||
fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
|
||||
|
||||
If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
|
||||
instead.
|
||||
|
||||
For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
|
||||
"""
|
||||
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
||||
if not path.strip():
|
||||
if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
|
||||
path = "/var/run"
|
||||
else:
|
||||
path = "/run"
|
||||
return self._append_app_name_and_version(path)
|
||||
|
||||
@property
|
||||
def site_data_path(self) -> Path:
|
||||
""":return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
||||
|
||||
@property
|
||||
def site_config_path(self) -> Path:
|
||||
""":return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_config_dir)
|
||||
|
||||
@property
|
||||
def site_cache_path(self) -> Path:
|
||||
""":return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
|
||||
return self._first_item_as_path_if_multipath(self.site_cache_dir)
|
||||
|
||||
def iter_config_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site configuration directories."""
|
||||
yield self.user_config_dir
|
||||
yield from self._site_config_dirs
|
||||
|
||||
def iter_data_dirs(self) -> Iterator[str]:
|
||||
""":yield: all user and site data directories."""
|
||||
yield self.user_data_dir
|
||||
yield from self._site_data_dirs
|
||||
|
||||
|
||||
def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
|
||||
media_dir = _get_user_dirs_folder(env_var)
|
||||
if media_dir is None:
|
||||
media_dir = os.environ.get(env_var, "").strip()
|
||||
if not media_dir:
|
||||
media_dir = os.path.expanduser(fallback_tilde_path) # noqa: PTH111
|
||||
|
||||
return media_dir
|
||||
|
||||
|
||||
def _get_user_dirs_folder(key: str) -> str | None:
|
||||
"""
|
||||
Return directory from user-dirs.dirs config file.
|
||||
|
||||
See https://freedesktop.org/wiki/Software/xdg-user-dirs/.
|
||||
|
||||
"""
|
||||
user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
|
||||
if user_dirs_config_path.exists():
|
||||
parser = ConfigParser()
|
||||
|
||||
with user_dirs_config_path.open() as stream:
|
||||
# Add fake section header, so ConfigParser doesn't complain
|
||||
parser.read_string(f"[top]\n{stream.read()}")
|
||||
|
||||
if key not in parser["top"]:
|
||||
return None
|
||||
|
||||
path = parser["top"][key].strip('"')
|
||||
# Handle relative home paths
|
||||
return path.replace("$HOME", os.path.expanduser("~")) # noqa: PTH111
|
||||
|
||||
return None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Unix",
|
||||
]
|
16
site-packages/platformdirs/version.py
Executable file
16
site-packages/platformdirs/version.py
Executable file
@ -0,0 +1,16 @@
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
TYPE_CHECKING = False
|
||||
if TYPE_CHECKING:
|
||||
from typing import Tuple, Union
|
||||
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
||||
else:
|
||||
VERSION_TUPLE = object
|
||||
|
||||
version: str
|
||||
__version__: str
|
||||
__version_tuple__: VERSION_TUPLE
|
||||
version_tuple: VERSION_TUPLE
|
||||
|
||||
__version__ = version = '4.3.6'
|
||||
__version_tuple__ = version_tuple = (4, 3, 6)
|
272
site-packages/platformdirs/windows.py
Executable file
272
site-packages/platformdirs/windows.py
Executable file
@ -0,0 +1,272 @@
|
||||
"""Windows."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .api import PlatformDirsABC
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
class Windows(PlatformDirsABC):
|
||||
"""
|
||||
`MSDN on where to store app data files <https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid>`_.
|
||||
|
||||
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>`, `appauthor
|
||||
<platformdirs.api.PlatformDirsABC.appauthor>`, `version <platformdirs.api.PlatformDirsABC.version>`, `roaming
|
||||
<platformdirs.api.PlatformDirsABC.roaming>`, `opinion <platformdirs.api.PlatformDirsABC.opinion>`, `ensure_exists
|
||||
<platformdirs.api.PlatformDirsABC.ensure_exists>`.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def user_data_dir(self) -> str:
|
||||
"""
|
||||
:return: data directory tied to the user, e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
|
||||
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
|
||||
"""
|
||||
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.normpath(get_win_folder(const))
|
||||
return self._append_parts(path)
|
||||
|
||||
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
|
||||
params = []
|
||||
if self.appname:
|
||||
if self.appauthor is not False:
|
||||
author = self.appauthor or self.appname
|
||||
params.append(author)
|
||||
params.append(self.appname)
|
||||
if opinion_value is not None and self.opinion:
|
||||
params.append(opinion_value)
|
||||
if self.version:
|
||||
params.append(self.version)
|
||||
path = os.path.join(path, *params) # noqa: PTH118
|
||||
self._optionally_create_directory(path)
|
||||
return path
|
||||
|
||||
@property
|
||||
def site_data_dir(self) -> str:
|
||||
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
|
||||
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
return self._append_parts(path)
|
||||
|
||||
@property
|
||||
def user_config_dir(self) -> str:
|
||||
""":return: config directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def site_config_dir(self) -> str:
|
||||
""":return: config directory shared by the users, same as `site_data_dir`"""
|
||||
return self.site_data_dir
|
||||
|
||||
@property
|
||||
def user_cache_dir(self) -> str:
|
||||
"""
|
||||
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
|
||||
"""
|
||||
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
return self._append_parts(path, opinion_value="Cache")
|
||||
|
||||
@property
|
||||
def site_cache_dir(self) -> str:
|
||||
""":return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
|
||||
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
return self._append_parts(path, opinion_value="Cache")
|
||||
|
||||
@property
|
||||
def user_state_dir(self) -> str:
|
||||
""":return: state directory tied to the user, same as `user_data_dir`"""
|
||||
return self.user_data_dir
|
||||
|
||||
@property
|
||||
def user_log_dir(self) -> str:
|
||||
""":return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
|
||||
path = self.user_data_dir
|
||||
if self.opinion:
|
||||
path = os.path.join(path, "Logs") # noqa: PTH118
|
||||
self._optionally_create_directory(path)
|
||||
return path
|
||||
|
||||
@property
|
||||
def user_documents_dir(self) -> str:
|
||||
""":return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
|
||||
|
||||
@property
|
||||
def user_downloads_dir(self) -> str:
|
||||
""":return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
|
||||
|
||||
@property
|
||||
def user_pictures_dir(self) -> str:
|
||||
""":return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
|
||||
|
||||
@property
|
||||
def user_videos_dir(self) -> str:
|
||||
""":return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
|
||||
|
||||
@property
|
||||
def user_music_dir(self) -> str:
|
||||
""":return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
|
||||
|
||||
@property
|
||||
def user_desktop_dir(self) -> str:
|
||||
""":return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
|
||||
return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
|
||||
|
||||
@property
|
||||
def user_runtime_dir(self) -> str:
|
||||
"""
|
||||
:return: runtime directory tied to the user, e.g.
|
||||
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
|
||||
"""
|
||||
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) # noqa: PTH118
|
||||
return self._append_parts(path)
|
||||
|
||||
@property
|
||||
def site_runtime_dir(self) -> str:
|
||||
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
|
||||
return self.user_runtime_dir
|
||||
|
||||
|
||||
def get_win_folder_from_env_vars(csidl_name: str) -> str:
|
||||
"""Get folder from environment variables."""
|
||||
result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
env_var_name = {
|
||||
"CSIDL_APPDATA": "APPDATA",
|
||||
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
|
||||
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
|
||||
}.get(csidl_name)
|
||||
if env_var_name is None:
|
||||
msg = f"Unknown CSIDL name: {csidl_name}"
|
||||
raise ValueError(msg)
|
||||
result = os.environ.get(env_var_name)
|
||||
if result is None:
|
||||
msg = f"Unset environment variable: {env_var_name}"
|
||||
raise ValueError(msg)
|
||||
return result
|
||||
|
||||
|
||||
def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
|
||||
"""Get a folder for a CSIDL name that does not exist as an environment variable."""
|
||||
if csidl_name == "CSIDL_PERSONAL":
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") # noqa: PTH118
|
||||
|
||||
if csidl_name == "CSIDL_DOWNLOADS":
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads") # noqa: PTH118
|
||||
|
||||
if csidl_name == "CSIDL_MYPICTURES":
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures") # noqa: PTH118
|
||||
|
||||
if csidl_name == "CSIDL_MYVIDEO":
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos") # noqa: PTH118
|
||||
|
||||
if csidl_name == "CSIDL_MYMUSIC":
|
||||
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music") # noqa: PTH118
|
||||
return None
|
||||
|
||||
|
||||
def get_win_folder_from_registry(csidl_name: str) -> str:
|
||||
"""
|
||||
Get folder from the registry.
|
||||
|
||||
This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
|
||||
for all CSIDL_* names.
|
||||
|
||||
"""
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
"CSIDL_PERSONAL": "Personal",
|
||||
"CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
|
||||
"CSIDL_MYPICTURES": "My Pictures",
|
||||
"CSIDL_MYVIDEO": "My Video",
|
||||
"CSIDL_MYMUSIC": "My Music",
|
||||
}.get(csidl_name)
|
||||
if shell_folder_name is None:
|
||||
msg = f"Unknown CSIDL name: {csidl_name}"
|
||||
raise ValueError(msg)
|
||||
if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows
|
||||
raise NotImplementedError
|
||||
import winreg # noqa: PLC0415
|
||||
|
||||
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
|
||||
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
|
||||
return str(directory)
|
||||
|
||||
|
||||
def get_win_folder_via_ctypes(csidl_name: str) -> str:
|
||||
"""Get folder with ctypes."""
|
||||
# There is no 'CSIDL_DOWNLOADS'.
|
||||
# Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
|
||||
# https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
|
||||
|
||||
import ctypes # noqa: PLC0415
|
||||
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
"CSIDL_PERSONAL": 5,
|
||||
"CSIDL_MYPICTURES": 39,
|
||||
"CSIDL_MYVIDEO": 14,
|
||||
"CSIDL_MYMUSIC": 13,
|
||||
"CSIDL_DOWNLOADS": 40,
|
||||
"CSIDL_DESKTOPDIRECTORY": 16,
|
||||
}.get(csidl_name)
|
||||
if csidl_const is None:
|
||||
msg = f"Unknown CSIDL name: {csidl_name}"
|
||||
raise ValueError(msg)
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
|
||||
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if it has high-bit chars.
|
||||
if any(ord(c) > 255 for c in buf): # noqa: PLR2004
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
if csidl_name == "CSIDL_DOWNLOADS":
|
||||
return os.path.join(buf.value, "Downloads") # noqa: PTH118
|
||||
|
||||
return buf.value
|
||||
|
||||
|
||||
def _pick_get_win_folder() -> Callable[[str], str]:
|
||||
try:
|
||||
import ctypes # noqa: PLC0415
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
if hasattr(ctypes, "windll"):
|
||||
return get_win_folder_via_ctypes
|
||||
try:
|
||||
import winreg # noqa: PLC0415, F401
|
||||
except ImportError:
|
||||
return get_win_folder_from_env_vars
|
||||
else:
|
||||
return get_win_folder_from_registry
|
||||
|
||||
|
||||
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
|
||||
|
||||
__all__ = [
|
||||
"Windows",
|
||||
]
|
1
site-packages/virtualenv-20.27.1.dist-info/INSTALLER
Executable file
1
site-packages/virtualenv-20.27.1.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
82
site-packages/virtualenv-20.27.1.dist-info/METADATA
Executable file
82
site-packages/virtualenv-20.27.1.dist-info/METADATA
Executable file
@ -0,0 +1,82 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: virtualenv
|
||||
Version: 20.27.1
|
||||
Summary: Virtual Python Environment builder
|
||||
Project-URL: Documentation, https://virtualenv.pypa.io
|
||||
Project-URL: Homepage, https://github.com/pypa/virtualenv
|
||||
Project-URL: Source, https://github.com/pypa/virtualenv
|
||||
Project-URL: Tracker, https://github.com/pypa/virtualenv/issues
|
||||
Maintainer-email: Bernat Gabor <gaborjbernat@gmail.com>
|
||||
License-Expression: MIT
|
||||
License-File: LICENSE
|
||||
Keywords: environments,isolated,virtual
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: MacOS :: MacOS X
|
||||
Classifier: Operating System :: Microsoft :: Windows
|
||||
Classifier: Operating System :: POSIX
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries
|
||||
Classifier: Topic :: Software Development :: Testing
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=3.8
|
||||
Requires-Dist: distlib<1,>=0.3.7
|
||||
Requires-Dist: filelock<4,>=3.12.2
|
||||
Requires-Dist: importlib-metadata>=6.6; python_version < '3.8'
|
||||
Requires-Dist: platformdirs<5,>=3.9.1
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: furo>=2023.7.26; extra == 'docs'
|
||||
Requires-Dist: proselint>=0.13; extra == 'docs'
|
||||
Requires-Dist: sphinx!=7.3,>=7.1.2; extra == 'docs'
|
||||
Requires-Dist: sphinx-argparse>=0.4; extra == 'docs'
|
||||
Requires-Dist: sphinxcontrib-towncrier>=0.2.1a0; extra == 'docs'
|
||||
Requires-Dist: towncrier>=23.6; extra == 'docs'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: covdefaults>=2.3; extra == 'test'
|
||||
Requires-Dist: coverage-enable-subprocess>=1; extra == 'test'
|
||||
Requires-Dist: coverage>=7.2.7; extra == 'test'
|
||||
Requires-Dist: flaky>=3.7; extra == 'test'
|
||||
Requires-Dist: packaging>=23.1; extra == 'test'
|
||||
Requires-Dist: pytest-env>=0.8.2; extra == 'test'
|
||||
Requires-Dist: pytest-freezer>=0.4.8; (platform_python_implementation == 'PyPy' or (platform_python_implementation == 'CPython' and sys_platform == 'win32' and python_version >= '3.13')) and extra == 'test'
|
||||
Requires-Dist: pytest-mock>=3.11.1; extra == 'test'
|
||||
Requires-Dist: pytest-randomly>=3.12; extra == 'test'
|
||||
Requires-Dist: pytest-timeout>=2.1; extra == 'test'
|
||||
Requires-Dist: pytest>=7.4; extra == 'test'
|
||||
Requires-Dist: setuptools>=68; extra == 'test'
|
||||
Requires-Dist: time-machine>=2.10; (platform_python_implementation == 'CPython') and extra == 'test'
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# virtualenv
|
||||
|
||||
[](https://pypi.org/project/virtualenv)
|
||||
[](https://pypi.org/project/virtualenv)
|
||||
[](https://pypi.org/project/virtualenv)
|
||||
[](http://virtualenv.pypa.io)
|
||||
[](https://discord.gg/pypa)
|
||||
[](https://pepy.tech/project/virtualenv)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/pypa/virtualenv/actions/workflows/check.yaml)
|
||||
|
||||
A tool for creating isolated `virtual` python environments.
|
||||
|
||||
- [Installation](https://virtualenv.pypa.io/en/latest/installation.html)
|
||||
- [Documentation](https://virtualenv.pypa.io)
|
||||
- [Changelog](https://virtualenv.pypa.io/en/latest/changelog.html)
|
||||
- [Issues](https://github.com/pypa/virtualenv/issues)
|
||||
- [PyPI](https://pypi.org/project/virtualenv)
|
||||
- [Github](https://github.com/pypa/virtualenv)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Everyone interacting in the virtualenv project's codebases, issue trackers, chat rooms, and mailing lists is expected to
|
||||
follow the [PSF Code of Conduct](https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md).
|
201
site-packages/virtualenv-20.27.1.dist-info/RECORD
Executable file
201
site-packages/virtualenv-20.27.1.dist-info/RECORD
Executable file
@ -0,0 +1,201 @@
|
||||
../../../bin/virtualenv,sha256=jjdOah9gQe8XBlLIZ4y6_rSWuFFa7kf81t_lZ1hJkjA,268
|
||||
virtualenv-20.27.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
virtualenv-20.27.1.dist-info/METADATA,sha256=IM2l0tQVeH_PijUdKbTzQi8Ux1JZFKzTjUW_baGybE0,4466
|
||||
virtualenv-20.27.1.dist-info/RECORD,,
|
||||
virtualenv-20.27.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv-20.27.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
||||
virtualenv-20.27.1.dist-info/entry_points.txt,sha256=AZly5CLXNaFe4Ib7lhw_NffJc6059sN8WYsVXOkdj34,1278
|
||||
virtualenv-20.27.1.dist-info/licenses/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074
|
||||
virtualenv/__init__.py,sha256=Xgxz-UdfWM8x8Waxcn9Lb50VamQJ6VtQi6CYwnSO-nM,183
|
||||
virtualenv/__main__.py,sha256=f1ujGuUms6LLteuAMa3QrCCHbKH_GkY7z1ds9hZCHdY,2662
|
||||
virtualenv/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/__pycache__/__main__.cpython-311.pyc,,
|
||||
virtualenv/__pycache__/info.cpython-311.pyc,,
|
||||
virtualenv/__pycache__/report.cpython-311.pyc,,
|
||||
virtualenv/__pycache__/version.cpython-311.pyc,,
|
||||
virtualenv/activation/__init__.py,sha256=usYC7WoMVgHu2ybcPhz6F2OPkBNS4IiuHW77Gv_s4SM,464
|
||||
virtualenv/activation/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/__pycache__/activator.cpython-311.pyc,,
|
||||
virtualenv/activation/__pycache__/via_template.cpython-311.pyc,,
|
||||
virtualenv/activation/activator.py,sha256=l_KpaUpE-sG82axgF9vz4ng3QtoJox3rkzOn1vynPTM,1419
|
||||
virtualenv/activation/bash/__init__.py,sha256=-gJPcAs3VEY2EP5URBsUaXTHrlQGamSyNO6E4ohe8pw,334
|
||||
virtualenv/activation/bash/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/bash/activate.sh,sha256=YfUoH9KSXNKKIk1FLHxO4fD6VpD3munV81Z164AsY_c,2270
|
||||
virtualenv/activation/batch/__init__.py,sha256=p9VSdsxBxWOZCExupbd5y6XMNynUCgH4JE02U3Tcq1c,756
|
||||
virtualenv/activation/batch/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/batch/activate.bat,sha256=Ry2NIB4tr2x5EezBFiet5KI4cdzlCR_yWMdHI72cSqI,1476
|
||||
virtualenv/activation/batch/deactivate.bat,sha256=07F0HsJ5cs1VpOxPyR8LFqBgNRD2TPhK4NNxF6NIRas,537
|
||||
virtualenv/activation/batch/pydoc.bat,sha256=pVuxn8mn9P_Rd0349fiBEiwIuMvfJQSfgJ2dljUT2fA,24
|
||||
virtualenv/activation/cshell/__init__.py,sha256=ZUCemt69joL0KoBSqHQILHTbVeCXjSMpOL9IIofrw6Y,336
|
||||
virtualenv/activation/cshell/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/cshell/activate.csh,sha256=Ayh2k6bO1T-gLhqSqewMa-TVjIAhxmv6VN5Mk9dInRA,1527
|
||||
virtualenv/activation/fish/__init__.py,sha256=gi0Q2mhmsHLtN_vm2Ck6m_tg6ixmJ5BGQ9SZJDjOHVE,241
|
||||
virtualenv/activation/fish/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/fish/activate.fish,sha256=JB1wsAeB_3fIwDWchEpT-BB9iNemjgl-kJhnmfV_DUs,3089
|
||||
virtualenv/activation/nushell/__init__.py,sha256=suMVpehUTmxv3MpITh7W2fa6MweRjVFs1SY-WzNHVDc,1205
|
||||
virtualenv/activation/nushell/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/nushell/activate.nu,sha256=WAL7OB7ywDM9ZtrgUUrZFlwvhr0V_24Y8ue5gqyijOc,2772
|
||||
virtualenv/activation/powershell/__init__.py,sha256=A3H7keFmOngsn821tnt1Y4Ms3lsxM62jdWVxVb6_OZ0,823
|
||||
virtualenv/activation/powershell/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/powershell/activate.ps1,sha256=PdtKOwW9w4kFwVqNyrCvCLQ1Vtz1-JVcO66WHK-dtWY,1708
|
||||
virtualenv/activation/python/__init__.py,sha256=mLxIDdV7rTjC6pfxzeCbAfsGRryoqIGFQri4hcqWzE4,830
|
||||
virtualenv/activation/python/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/activation/python/__pycache__/activate_this.cpython-311.pyc,,
|
||||
virtualenv/activation/python/activate_this.py,sha256=CHY9wIkzvFIVzSZZPeDGN5VeHgBRdfYf-9xge_h6epw,1305
|
||||
virtualenv/activation/via_template.py,sha256=RLqcAsBa79oQxyYNVbHRSkN64HXx7ItKvSv57_Ee6C4,3043
|
||||
virtualenv/app_data/__init__.py,sha256=iFqz7nzy3rVkvk9zDqkTxhdJlt7yyFnpeABfEUcoKFs,1432
|
||||
virtualenv/app_data/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/app_data/__pycache__/base.cpython-311.pyc,,
|
||||
virtualenv/app_data/__pycache__/na.cpython-311.pyc,,
|
||||
virtualenv/app_data/__pycache__/read_only.cpython-311.pyc,,
|
||||
virtualenv/app_data/__pycache__/via_disk_folder.cpython-311.pyc,,
|
||||
virtualenv/app_data/__pycache__/via_tempdir.cpython-311.pyc,,
|
||||
virtualenv/app_data/base.py,sha256=NTlg2lAEHXV3nehGihy1KLyqTorDcmPvpR9caWepFnw,2083
|
||||
virtualenv/app_data/na.py,sha256=9ye51rBg5Ywz2i58s6b6XxdBeHvuIIgGmXO65P5eLFo,1500
|
||||
virtualenv/app_data/read_only.py,sha256=C4jfyONJLVPAKUKE7QMHBAk_adfhmWK31xX_7TLxCc4,1113
|
||||
virtualenv/app_data/via_disk_folder.py,sha256=VgumA8Qg6VdUQLtfA8rcm8_gf0qd9excoJTbNHe802Q,5501
|
||||
virtualenv/app_data/via_tempdir.py,sha256=BueKPXPl6HiDpQOyEyldJj9mdDqGYLzAa-iNVbvuC2o,775
|
||||
virtualenv/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/config/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/config/__pycache__/convert.cpython-311.pyc,,
|
||||
virtualenv/config/__pycache__/env_var.cpython-311.pyc,,
|
||||
virtualenv/config/__pycache__/ini.cpython-311.pyc,,
|
||||
virtualenv/config/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/config/cli/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/config/cli/__pycache__/parser.cpython-311.pyc,,
|
||||
virtualenv/config/cli/parser.py,sha256=p3f0_8vhCX0P9mGUB6XlgKZD3rRAWevM8n9FLw4pfHE,4638
|
||||
virtualenv/config/convert.py,sha256=UU4qyc5RcCl2c4viY8vMI1rrVvUHHJ1LEPSs9h2Jfrw,2755
|
||||
virtualenv/config/env_var.py,sha256=sEC6c3u63bEZ6AC218QgzgLlxMZggBoTAq9s5xnYm9U,748
|
||||
virtualenv/config/ini.py,sha256=xewh4RqbZA8mZib4ROwQgLgeUZCGwKJH_ayx4KuKPcs,2669
|
||||
virtualenv/create/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/create/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/create/__pycache__/creator.cpython-311.pyc,,
|
||||
virtualenv/create/__pycache__/debug.cpython-311.pyc,,
|
||||
virtualenv/create/__pycache__/describe.cpython-311.pyc,,
|
||||
virtualenv/create/__pycache__/pyenv_cfg.cpython-311.pyc,,
|
||||
virtualenv/create/creator.py,sha256=RoUqm8UPz7ksnOdlImpZDRjuEI5JxmdFBx66OaznOEQ,8496
|
||||
virtualenv/create/debug.py,sha256=tjQToAjvU0MUQZEUOugoB3l7UPIKNJRPRBnaLKQDVcI,3149
|
||||
virtualenv/create/describe.py,sha256=OgGhSQJwM4NV_ECX-U9MgeKWl8qIn3Pj-ModMnumQWY,3154
|
||||
virtualenv/create/pyenv_cfg.py,sha256=RQMJxJdkQ1xyrsr95bPKwaG95g9vS4WQHwifnKysxmY,1789
|
||||
virtualenv/create/via_global_ref/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/create/via_global_ref/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/__pycache__/_virtualenv.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/__pycache__/api.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/__pycache__/store.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/__pycache__/venv.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/_virtualenv.py,sha256=DyA1HqfpADnEbT-r3vmR8oseP_M-sT7m8NXNIeoOixQ,4443
|
||||
virtualenv/create/via_global_ref/api.py,sha256=tvZTLA9YtM2Ra2Mdo4gIHphUPXHJ8bWwzH4cSQlmlcc,4252
|
||||
virtualenv/create/via_global_ref/builtin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/create/via_global_ref/builtin/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/__pycache__/builtin_way.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/__pycache__/ref.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/__pycache__/via_global_self_do.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/builtin_way.py,sha256=gcw1jAjMKLfCVRKbZKRGoqixsmGXt8YuiH2NYPKI-Lg,520
|
||||
virtualenv/create/via_global_ref/builtin/cpython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/create/via_global_ref/builtin/cpython/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/cpython/__pycache__/common.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/cpython/__pycache__/cpython3.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/cpython/__pycache__/mac_os.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/cpython/common.py,sha256=BKvU2bk-t_Ozg-QvTiSp_6bxMH1_wehXevJIspTjs0k,2530
|
||||
virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=HBt9OKqjvAXRWvz6rvyVA9WB-n6gKk_agakVzqGMd6c,4789
|
||||
virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=WRxOBRr-2_03Uo8jhkzDhzk98AoO7J-A_T015LvgzDQ,11633
|
||||
virtualenv/create/via_global_ref/builtin/pypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/create/via_global_ref/builtin/pypy/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/pypy/__pycache__/common.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/pypy/__pycache__/pypy3.cpython-311.pyc,,
|
||||
virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=WwjoRRzPqQKiRdgkLfPKiYOPWMyx4KkG2XyRDCTTZ3I,1716
|
||||
virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=srpqZzZWb7bdGSZk5YNPOPEJZ5cb7Wf8eISEcvlX7FU,2494
|
||||
virtualenv/create/via_global_ref/builtin/ref.py,sha256=96T97fbI3BECmwSHNeV8Sty469GJKYPIiZeSEWNatwc,5433
|
||||
virtualenv/create/via_global_ref/builtin/via_global_self_do.py,sha256=zTjsxnASAwMFgI8tvB3YK0FSfPM08B3O6QM7DGQgF0M,4430
|
||||
virtualenv/create/via_global_ref/store.py,sha256=OVRyaMUBT7Kh_sEy02m5VsHvh0tpxynpKxLjXDDXb1c,667
|
||||
virtualenv/create/via_global_ref/venv.py,sha256=yS4zIlP8qNl7daUhvq7LXd4xKngQahkN95-MoKj16s4,3665
|
||||
virtualenv/discovery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/discovery/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/discovery/__pycache__/builtin.cpython-311.pyc,,
|
||||
virtualenv/discovery/__pycache__/cached_py_info.cpython-311.pyc,,
|
||||
virtualenv/discovery/__pycache__/discover.cpython-311.pyc,,
|
||||
virtualenv/discovery/__pycache__/py_info.cpython-311.pyc,,
|
||||
virtualenv/discovery/__pycache__/py_spec.cpython-311.pyc,,
|
||||
virtualenv/discovery/builtin.py,sha256=OLi0PUxfvFxMZw-o3Y20Trhcno1w05QEOm6GzRNHNDo,8019
|
||||
virtualenv/discovery/cached_py_info.py,sha256=7arKTkXYQyk61mW1R1IO0S4vhH1r14sykzwCQMN2gOg,6498
|
||||
virtualenv/discovery/discover.py,sha256=St6Kh8w5PudqlB1IuAWaNpzfLfB9NrIlqpNU2h6C-lA,1173
|
||||
virtualenv/discovery/py_info.py,sha256=TayrTIOWn9YK4IwlkMPvmNDbvEdjgEXeH0IZwYlJhtY,25482
|
||||
virtualenv/discovery/py_spec.py,sha256=HxddFzlAMF8FqBcbA0ofWCjZKOBGC5bns8zukuklmZ0,4637
|
||||
virtualenv/discovery/windows/__init__.py,sha256=_9NwLhvYvPm2lj-EPUx4-Xtbdj9-buw1AXwC8VynYzA,1855
|
||||
virtualenv/discovery/windows/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/discovery/windows/__pycache__/pep514.cpython-311.pyc,,
|
||||
virtualenv/discovery/windows/pep514.py,sha256=50XtpmeWRyG_ihYMEMnqFW51tmf8bn4MYpkMy4h1-Mo,5059
|
||||
virtualenv/info.py,sha256=mryKv_4EDcEG64RjU2paXufcOPbGMsnZVaoEPothLtY,1982
|
||||
virtualenv/report.py,sha256=7be1LACPNAUQxKa0j8okAkTSxY6MDXaxEQfXDQjHr5o,1355
|
||||
virtualenv/run/__init__.py,sha256=4GXLmPoWqg7krVttq3c70twJycZSes8oeKTl0MonRqY,6244
|
||||
virtualenv/run/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/run/__pycache__/session.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/run/plugin/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__pycache__/activators.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__pycache__/base.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__pycache__/creators.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__pycache__/discovery.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/__pycache__/seeders.cpython-311.pyc,,
|
||||
virtualenv/run/plugin/activators.py,sha256=rNo8gEM7Tqk0NFV8RWX8RlrZcmHc13-8R7WF8pO2bLU,2235
|
||||
virtualenv/run/plugin/base.py,sha256=B9saI-0WZHjsQeu_yx7iIFhNoVfgoMknlKlPDVdA4j0,2096
|
||||
virtualenv/run/plugin/creators.py,sha256=hMQQX51lp-t1HdV7zvJaAIqIg_tXYVF_DYtXh2XXjZo,3626
|
||||
virtualenv/run/plugin/discovery.py,sha256=va7Xj6Bn3QD1oZJ6WqrisH6sB9xHZoK81rHmsFnWhdw,1180
|
||||
virtualenv/run/plugin/seeders.py,sha256=up_Ai5SsBzqSGfWonlQMgiIKs8NJon6WtxI8CINx1vk,1050
|
||||
virtualenv/run/session.py,sha256=ZAWRTo-GXs9JS6hZbzpR4bZJuEH8YkWa7Sg0XCXGjS0,2454
|
||||
virtualenv/seed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/seed/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/__pycache__/seeder.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/seed/embed/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/__pycache__/base_embed.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/__pycache__/pip_invoke.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/base_embed.py,sha256=nVZwY7RnZBA1wGttLC4tvOIUsYBaQUZQh6UFF7gq17s,4219
|
||||
virtualenv/seed/embed/pip_invoke.py,sha256=sPuzgNuwxFL1TUqlsJod5nhl1rasIiAZ1O4WcFQU9lo,2200
|
||||
virtualenv/seed/embed/via_app_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/seed/embed/via_app_data/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/__pycache__/via_app_data.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/pip_install/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/seed/embed/via_app_data/pip_install/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/pip_install/__pycache__/base.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/pip_install/__pycache__/copy.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/pip_install/__pycache__/symlink.cpython-311.pyc,,
|
||||
virtualenv/seed/embed/via_app_data/pip_install/base.py,sha256=KMLz18QFL9DZcod_JRXy3J4si0IDSoh_v1fP9oG9t5c,8301
|
||||
virtualenv/seed/embed/via_app_data/pip_install/copy.py,sha256=g-vzGeo0RuWiV_e5OaMvPL1Oz_oHzSzGfkhAirNFcPE,1240
|
||||
virtualenv/seed/embed/via_app_data/pip_install/symlink.py,sha256=sBSLyrsp244DYIpC2AIWdR-UoqospLEKUTsG3jQAU5g,2015
|
||||
virtualenv/seed/embed/via_app_data/via_app_data.py,sha256=zC-FUMFHH0IUzrHYbt-O6Q3CwByvoqaMGN9FKc8BKMU,5875
|
||||
virtualenv/seed/seeder.py,sha256=Ao-b5mtuQJWfpJigJ8WYHTRdQzYC0btQ1rkHn1fkcig,1155
|
||||
virtualenv/seed/wheels/__init__.py,sha256=rN6_NcnOOrsaIgPv18aYNwoTfeP10c0J5GjF8u6WMtc,204
|
||||
virtualenv/seed/wheels/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/__pycache__/acquire.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/__pycache__/bundle.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/__pycache__/periodic_update.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/__pycache__/util.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/acquire.py,sha256=pJs1Fm4uBznCAw8eig6YERr6R2IewXXfy4vZ0YD-7zg,4552
|
||||
virtualenv/seed/wheels/bundle.py,sha256=Ub8PxmsuovpuB1ns-74BuAGcuxjUEL_gfWoOoMW1Q28,1863
|
||||
virtualenv/seed/wheels/embed/__init__.py,sha256=xfXVs5wmeRT4KC7hKohmKSXSeT4-ga9sEAYTZc-UWOI,1716
|
||||
virtualenv/seed/wheels/embed/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/seed/wheels/embed/pip-24.3.1-py3-none-any.whl,sha256=N5BiR4AII2X0dUnQMvN3DusrHovR97LgLazhr6NhtO0,1822182
|
||||
virtualenv/seed/wheels/embed/setuptools-75.2.0-py3-none-any.whl,sha256=p_y2b2i02ejma0L5h2FQozcVWPmPoyIi_6pbztdkBvg,1249825
|
||||
virtualenv/seed/wheels/embed/wheel-0.44.0-py3-none-any.whl,sha256=I3apDJjMM30YYjUnqXwxeXvQK60AM9QVRwQ6HL--RI8,67059
|
||||
virtualenv/seed/wheels/periodic_update.py,sha256=0bJ0pkRQgPF4PxXVe3kP_wUiGiz5oiEb99ywoFgoQ58,15560
|
||||
virtualenv/seed/wheels/util.py,sha256=cNZBMglOXir5LKC8M1-s2GZHyCt65vJKegyL7kg6Zg0,3962
|
||||
virtualenv/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
virtualenv/util/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/util/__pycache__/error.cpython-311.pyc,,
|
||||
virtualenv/util/__pycache__/lock.cpython-311.pyc,,
|
||||
virtualenv/util/__pycache__/zipapp.cpython-311.pyc,,
|
||||
virtualenv/util/error.py,sha256=qglTAnbZCj5qKaK9iNka44gfOA0vc2S0FRhLTL4gijo,323
|
||||
virtualenv/util/lock.py,sha256=k_WecmIqb0hqrAenHb_tk7mMM9cDFNY88ru67PZDwgc,4749
|
||||
virtualenv/util/path/__init__.py,sha256=Ujv6toeiy-5uw4yPfnG7ZFvoxYs8J4JMQOiT3IWoimg,340
|
||||
virtualenv/util/path/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/util/path/__pycache__/_permission.cpython-311.pyc,,
|
||||
virtualenv/util/path/__pycache__/_sync.cpython-311.pyc,,
|
||||
virtualenv/util/path/__pycache__/_win.cpython-311.pyc,,
|
||||
virtualenv/util/path/_permission.py,sha256=cJntuU9FT_VjaO_AvKArDud4-ZmgdUEY9bBGWd83-cs,665
|
||||
virtualenv/util/path/_sync.py,sha256=u2Hqn_fd85WgeSpcVin0fHV_K-erJBb3f8QaUlExRE0,2088
|
||||
virtualenv/util/path/_win.py,sha256=sSbUxCZ2Bi02oTka3SThR91c3vHadI-OWzy3Mh5ip5g,807
|
||||
virtualenv/util/subprocess/__init__.py,sha256=BUsxWu7PUugQGXTl9dq0eem3tjkug7_1H0yWgCV2VVM,735
|
||||
virtualenv/util/subprocess/__pycache__/__init__.cpython-311.pyc,,
|
||||
virtualenv/util/zipapp.py,sha256=W0Dwiq6bPIWV6Z1L42XnbEK9kRYdMr2zpJG6CgKXz2I,1209
|
||||
virtualenv/version.py,sha256=BC6J3C_bphUOD8syKbRH6QwWsoJ7zW7Z5nA7U0UBxW4,415
|
0
site-packages/virtualenv-20.27.1.dist-info/REQUESTED
Executable file
0
site-packages/virtualenv-20.27.1.dist-info/REQUESTED
Executable file
4
site-packages/virtualenv-20.27.1.dist-info/WHEEL
Executable file
4
site-packages/virtualenv-20.27.1.dist-info/WHEEL
Executable file
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.25.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
27
site-packages/virtualenv-20.27.1.dist-info/entry_points.txt
Executable file
27
site-packages/virtualenv-20.27.1.dist-info/entry_points.txt
Executable file
@ -0,0 +1,27 @@
|
||||
[console_scripts]
|
||||
virtualenv = virtualenv.__main__:run_with_catch
|
||||
|
||||
[virtualenv.activate]
|
||||
bash = virtualenv.activation.bash:BashActivator
|
||||
batch = virtualenv.activation.batch:BatchActivator
|
||||
cshell = virtualenv.activation.cshell:CShellActivator
|
||||
fish = virtualenv.activation.fish:FishActivator
|
||||
nushell = virtualenv.activation.nushell:NushellActivator
|
||||
powershell = virtualenv.activation.powershell:PowerShellActivator
|
||||
python = virtualenv.activation.python:PythonActivator
|
||||
|
||||
[virtualenv.create]
|
||||
cpython3-mac-brew = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython3macOsBrew
|
||||
cpython3-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython3macOsFramework
|
||||
cpython3-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Posix
|
||||
cpython3-win = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Windows
|
||||
pypy3-posix = virtualenv.create.via_global_ref.builtin.pypy.pypy3:PyPy3Posix
|
||||
pypy3-win = virtualenv.create.via_global_ref.builtin.pypy.pypy3:Pypy3Windows
|
||||
venv = virtualenv.create.via_global_ref.venv:Venv
|
||||
|
||||
[virtualenv.discovery]
|
||||
builtin = virtualenv.discovery.builtin:Builtin
|
||||
|
||||
[virtualenv.seed]
|
||||
app-data = virtualenv.seed.embed.via_app_data.via_app_data:FromAppData
|
||||
pip = virtualenv.seed.embed.pip_invoke:PipInvoke
|
20
site-packages/virtualenv-20.27.1.dist-info/licenses/LICENSE
Executable file
20
site-packages/virtualenv-20.27.1.dist-info/licenses/LICENSE
Executable file
@ -0,0 +1,20 @@
|
||||
Copyright (c) 2020-202x The virtualenv developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
10
site-packages/virtualenv/__init__.py
Executable file
10
site-packages/virtualenv/__init__.py
Executable file
@ -0,0 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .run import cli_run, session_via_cli
|
||||
from .version import __version__
|
||||
|
||||
__all__ = [
|
||||
"__version__",
|
||||
"cli_run",
|
||||
"session_via_cli",
|
||||
]
|
70
site-packages/virtualenv/__main__.py
Executable file
70
site-packages/virtualenv/__main__.py
Executable file
@ -0,0 +1,70 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from timeit import default_timer
|
||||
|
||||
|
||||
def run(args=None, options=None, env=None):
|
||||
env = os.environ if env is None else env
|
||||
start = default_timer()
|
||||
from virtualenv.run import cli_run # noqa: PLC0415
|
||||
from virtualenv.util.error import ProcessCallFailedError # noqa: PLC0415
|
||||
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
try:
|
||||
session = cli_run(args, options, env)
|
||||
logging.warning(LogSession(session, start))
|
||||
except ProcessCallFailedError as exception:
|
||||
print(f"subprocess call failed for {exception.cmd} with code {exception.code}") # noqa: T201
|
||||
print(exception.out, file=sys.stdout, end="") # noqa: T201
|
||||
print(exception.err, file=sys.stderr, end="") # noqa: T201
|
||||
raise SystemExit(exception.code) # noqa: B904
|
||||
|
||||
|
||||
class LogSession:
|
||||
def __init__(self, session, start) -> None:
|
||||
self.session = session
|
||||
self.start = start
|
||||
|
||||
def __str__(self) -> str:
|
||||
spec = self.session.creator.interpreter.spec
|
||||
elapsed = (default_timer() - self.start) * 1000
|
||||
lines = [
|
||||
f"created virtual environment {spec} in {elapsed:.0f}ms",
|
||||
f" creator {self.session.creator!s}",
|
||||
]
|
||||
if self.session.seeder.enabled:
|
||||
lines.append(f" seeder {self.session.seeder!s}")
|
||||
path = self.session.creator.purelib.iterdir()
|
||||
packages = sorted("==".join(i.stem.split("-")) for i in path if i.suffix == ".dist-info")
|
||||
lines.append(f" added seed packages: {', '.join(packages)}")
|
||||
|
||||
if self.session.activators:
|
||||
lines.append(f" activators {','.join(i.__class__.__name__ for i in self.session.activators)}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def run_with_catch(args=None, env=None):
|
||||
from virtualenv.config.cli.parser import VirtualEnvOptions # noqa: PLC0415
|
||||
|
||||
env = os.environ if env is None else env
|
||||
options = VirtualEnvOptions()
|
||||
try:
|
||||
run(args, options, env)
|
||||
except (KeyboardInterrupt, SystemExit, Exception) as exception:
|
||||
try:
|
||||
if getattr(options, "with_traceback", False):
|
||||
raise
|
||||
if not (isinstance(exception, SystemExit) and exception.code == 0):
|
||||
logging.error("%s: %s", type(exception).__name__, exception) # noqa: TRY400
|
||||
code = exception.code if isinstance(exception, SystemExit) else 1
|
||||
sys.exit(code)
|
||||
finally:
|
||||
logging.shutdown() # force flush of log messages before the trace is printed
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cov
|
||||
run_with_catch() # pragma: no cov
|
19
site-packages/virtualenv/activation/__init__.py
Executable file
19
site-packages/virtualenv/activation/__init__.py
Executable file
@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .bash import BashActivator
|
||||
from .batch import BatchActivator
|
||||
from .cshell import CShellActivator
|
||||
from .fish import FishActivator
|
||||
from .nushell import NushellActivator
|
||||
from .powershell import PowerShellActivator
|
||||
from .python import PythonActivator
|
||||
|
||||
__all__ = [
|
||||
"BashActivator",
|
||||
"BatchActivator",
|
||||
"CShellActivator",
|
||||
"FishActivator",
|
||||
"NushellActivator",
|
||||
"PowerShellActivator",
|
||||
"PythonActivator",
|
||||
]
|
50
site-packages/virtualenv/activation/activator.py
Executable file
50
site-packages/virtualenv/activation/activator.py
Executable file
@ -0,0 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Activator(ABC):
|
||||
"""Generates activate script for the virtual environment."""
|
||||
|
||||
def __init__(self, options) -> None:
|
||||
"""
|
||||
Create a new activator generator.
|
||||
|
||||
:param options: the parsed options as defined within :meth:`add_parser_arguments`
|
||||
"""
|
||||
self.flag_prompt = os.path.basename(os.getcwd()) if options.prompt == "." else options.prompt
|
||||
|
||||
@classmethod
|
||||
def supports(cls, interpreter): # noqa: ARG003
|
||||
"""
|
||||
Check if the activation script is supported in the given interpreter.
|
||||
|
||||
:param interpreter: the interpreter we need to support
|
||||
:return: ``True`` if supported, ``False`` otherwise
|
||||
"""
|
||||
return True
|
||||
|
||||
@classmethod # noqa: B027
|
||||
def add_parser_arguments(cls, parser, interpreter):
|
||||
"""
|
||||
Add CLI arguments for this activation script.
|
||||
|
||||
:param parser: the CLI parser
|
||||
:param interpreter: the interpreter this virtual environment is based of
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, creator):
|
||||
"""
|
||||
Generate activate script for the given creator.
|
||||
|
||||
:param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \
|
||||
virtual environment
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Activator",
|
||||
]
|
18
site-packages/virtualenv/activation/bash/__init__.py
Executable file
18
site-packages/virtualenv/activation/bash/__init__.py
Executable file
@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class BashActivator(ViaTemplateActivator):
|
||||
def templates(self):
|
||||
yield "activate.sh"
|
||||
|
||||
def as_name(self, template):
|
||||
return Path(template).stem
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BashActivator",
|
||||
]
|
87
site-packages/virtualenv/activation/bash/activate.sh
Executable file
87
site-packages/virtualenv/activation/bash/activate.sh
Executable file
@ -0,0 +1,87 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
|
||||
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
|
||||
deactivate () {
|
||||
unset -f pydoc >/dev/null 2>&1 || true
|
||||
|
||||
# reset old environment variables
|
||||
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
|
||||
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
|
||||
PATH="$_OLD_VIRTUAL_PATH"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
|
||||
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# The hash command must be called to get it to forget past
|
||||
# commands. Without forgetting past commands the $PATH changes
|
||||
# we made may not be respected
|
||||
hash -r 2>/dev/null
|
||||
|
||||
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
|
||||
PS1="$_OLD_VIRTUAL_PS1"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=__VIRTUAL_ENV__
|
||||
if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then
|
||||
VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV")
|
||||
fi
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"__BIN_NAME__":$PATH"
|
||||
export PATH
|
||||
|
||||
if [ "x"__VIRTUAL_PROMPT__ != x ] ; then
|
||||
VIRTUAL_ENV_PROMPT=__VIRTUAL_PROMPT__
|
||||
else
|
||||
VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV")
|
||||
fi
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if ! [ -z "${PYTHONHOME+_}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1-}"
|
||||
PS1="(${VIRTUAL_ENV_PROMPT}) ${PS1-}"
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# Make sure to unalias pydoc if it's already there
|
||||
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
|
||||
|
||||
pydoc () {
|
||||
python -m pydoc "$@"
|
||||
}
|
||||
|
||||
# The hash command must be called to get it to forget past
|
||||
# commands. Without forgetting past commands the $PATH changes
|
||||
# we made may not be respected
|
||||
hash -r 2>/dev/null || true
|
30
site-packages/virtualenv/activation/batch/__init__.py
Executable file
30
site-packages/virtualenv/activation/batch/__init__.py
Executable file
@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class BatchActivator(ViaTemplateActivator):
|
||||
@classmethod
|
||||
def supports(cls, interpreter):
|
||||
return interpreter.os == "nt"
|
||||
|
||||
def templates(self):
|
||||
yield "activate.bat"
|
||||
yield "deactivate.bat"
|
||||
yield "pydoc.bat"
|
||||
|
||||
@staticmethod
|
||||
def quote(string):
|
||||
return string
|
||||
|
||||
def instantiate_template(self, replacements, template, creator):
|
||||
# ensure the text has all newlines as \r\n - required by batch
|
||||
base = super().instantiate_template(replacements, template, creator)
|
||||
return base.replace(os.linesep, "\n").replace("\n", os.linesep)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BatchActivator",
|
||||
]
|
52
site-packages/virtualenv/activation/batch/activate.bat
Executable file
52
site-packages/virtualenv/activation/batch/activate.bat
Executable file
@ -0,0 +1,52 @@
|
||||
@REM This file is UTF-8 encoded, so we need to update the current code page while executing it
|
||||
@echo off
|
||||
@for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do (
|
||||
@set _OLD_CODEPAGE=%%a
|
||||
)
|
||||
@if defined _OLD_CODEPAGE (
|
||||
"%SystemRoot%\System32\chcp.com" 65001 > nul
|
||||
)
|
||||
|
||||
@set "VIRTUAL_ENV=__VIRTUAL_ENV__"
|
||||
|
||||
@set "VIRTUAL_ENV_PROMPT=__VIRTUAL_PROMPT__"
|
||||
@if NOT DEFINED VIRTUAL_ENV_PROMPT (
|
||||
@for %%d in ("%VIRTUAL_ENV%") do @set "VIRTUAL_ENV_PROMPT=%%~nxd"
|
||||
)
|
||||
|
||||
@if defined _OLD_VIRTUAL_PROMPT (
|
||||
@set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
|
||||
) else (
|
||||
@if not defined PROMPT (
|
||||
@set "PROMPT=$P$G"
|
||||
)
|
||||
@if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
||||
@set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
|
||||
)
|
||||
)
|
||||
@if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
||||
@set "PROMPT=(%VIRTUAL_ENV_PROMPT%) %PROMPT%"
|
||||
)
|
||||
|
||||
@REM Don't use () to avoid problems with them in %PATH%
|
||||
@if defined _OLD_VIRTUAL_PYTHONHOME @goto ENDIFVHOME
|
||||
@set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
|
||||
:ENDIFVHOME
|
||||
|
||||
@set PYTHONHOME=
|
||||
|
||||
@REM if defined _OLD_VIRTUAL_PATH (
|
||||
@if not defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH1
|
||||
@set "PATH=%_OLD_VIRTUAL_PATH%"
|
||||
:ENDIFVPATH1
|
||||
@REM ) else (
|
||||
@if defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH2
|
||||
@set "_OLD_VIRTUAL_PATH=%PATH%"
|
||||
:ENDIFVPATH2
|
||||
|
||||
@set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"
|
||||
|
||||
@if defined _OLD_CODEPAGE (
|
||||
"%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul
|
||||
@set _OLD_CODEPAGE=
|
||||
)
|
18
site-packages/virtualenv/activation/batch/deactivate.bat
Executable file
18
site-packages/virtualenv/activation/batch/deactivate.bat
Executable file
@ -0,0 +1,18 @@
|
||||
@set VIRTUAL_ENV=
|
||||
@set VIRTUAL_ENV_PROMPT=
|
||||
|
||||
@REM Don't use () to avoid problems with them in %PATH%
|
||||
@if not defined _OLD_VIRTUAL_PROMPT @goto ENDIFVPROMPT
|
||||
@set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
|
||||
@set _OLD_VIRTUAL_PROMPT=
|
||||
:ENDIFVPROMPT
|
||||
|
||||
@if not defined _OLD_VIRTUAL_PYTHONHOME @goto ENDIFVHOME
|
||||
@set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
|
||||
@set _OLD_VIRTUAL_PYTHONHOME=
|
||||
:ENDIFVHOME
|
||||
|
||||
@if not defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH
|
||||
@set "PATH=%_OLD_VIRTUAL_PATH%"
|
||||
@set _OLD_VIRTUAL_PATH=
|
||||
:ENDIFVPATH
|
1
site-packages/virtualenv/activation/batch/pydoc.bat
Executable file
1
site-packages/virtualenv/activation/batch/pydoc.bat
Executable file
@ -0,0 +1 @@
|
||||
python.exe -m pydoc %*
|
17
site-packages/virtualenv/activation/cshell/__init__.py
Executable file
17
site-packages/virtualenv/activation/cshell/__init__.py
Executable file
@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class CShellActivator(ViaTemplateActivator):
|
||||
@classmethod
|
||||
def supports(cls, interpreter):
|
||||
return interpreter.os != "nt"
|
||||
|
||||
def templates(self):
|
||||
yield "activate.csh"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"CShellActivator",
|
||||
]
|
55
site-packages/virtualenv/activation/cshell/activate.csh
Executable file
55
site-packages/virtualenv/activation/cshell/activate.csh
Executable file
@ -0,0 +1,55 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
|
||||
set newline='\
|
||||
'
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV __VIRTUAL_ENV__
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH:q"
|
||||
setenv PATH "$VIRTUAL_ENV:q/"__BIN_NAME__":$PATH:q"
|
||||
|
||||
|
||||
|
||||
if (__VIRTUAL_PROMPT__ != "") then
|
||||
setenv VIRTUAL_ENV_PROMPT __VIRTUAL_PROMPT__
|
||||
else
|
||||
setenv VIRTUAL_ENV_PROMPT "$VIRTUAL_ENV:t:q"
|
||||
endif
|
||||
|
||||
if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then
|
||||
if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then
|
||||
set do_prompt = "1"
|
||||
else
|
||||
set do_prompt = "0"
|
||||
endif
|
||||
else
|
||||
set do_prompt = "1"
|
||||
endif
|
||||
|
||||
if ( $do_prompt == "1" ) then
|
||||
# Could be in a non-interactive environment,
|
||||
# in which case, $prompt is undefined and we wouldn't
|
||||
# care about the prompt anyway.
|
||||
if ( $?prompt ) then
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt:q"
|
||||
if ( "$prompt:q" =~ *"$newline:q"* ) then
|
||||
:
|
||||
else
|
||||
set prompt = '('"$VIRTUAL_ENV_PROMPT:q"') '"$prompt:q"
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
unset env_name
|
||||
unset do_prompt
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
13
site-packages/virtualenv/activation/fish/__init__.py
Executable file
13
site-packages/virtualenv/activation/fish/__init__.py
Executable file
@ -0,0 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class FishActivator(ViaTemplateActivator):
|
||||
def templates(self):
|
||||
yield "activate.fish"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"FishActivator",
|
||||
]
|
103
site-packages/virtualenv/activation/fish/activate.fish
Executable file
103
site-packages/virtualenv/activation/fish/activate.fish
Executable file
@ -0,0 +1,103 @@
|
||||
# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
|
||||
# Do not run it directly.
|
||||
|
||||
function _bashify_path -d "Converts a fish path to something bash can recognize"
|
||||
set fishy_path $argv
|
||||
set bashy_path $fishy_path[1]
|
||||
for path_part in $fishy_path[2..-1]
|
||||
set bashy_path "$bashy_path:$path_part"
|
||||
end
|
||||
echo $bashy_path
|
||||
end
|
||||
|
||||
function _fishify_path -d "Converts a bash path to something fish can recognize"
|
||||
echo $argv | tr ':' '\n'
|
||||
end
|
||||
|
||||
function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
|
||||
else
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
end
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
and functions -q _old_fish_prompt
|
||||
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
|
||||
set -l fish_function_path
|
||||
|
||||
# Erase virtualenv's `fish_prompt` and restore the original.
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
|
||||
if test "$argv[1]" != 'nondestructive'
|
||||
# Self-destruct!
|
||||
functions -e pydoc
|
||||
functions -e deactivate
|
||||
functions -e _bashify_path
|
||||
functions -e _fishify_path
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV __VIRTUAL_ENV__
|
||||
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
||||
else
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
end
|
||||
set -gx PATH "$VIRTUAL_ENV"'/'__BIN_NAME__ $PATH
|
||||
|
||||
# Prompt override provided?
|
||||
# If not, just use the environment name.
|
||||
if test -n __VIRTUAL_PROMPT__
|
||||
set -gx VIRTUAL_ENV_PROMPT __VIRTUAL_PROMPT__
|
||||
else
|
||||
set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV")
|
||||
end
|
||||
|
||||
# Unset `$PYTHONHOME` if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
function pydoc
|
||||
python -m pydoc $argv
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# Copy the current `fish_prompt` function as `_old_fish_prompt`.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
function fish_prompt
|
||||
# Run the user's prompt first; it might depend on (pipe)status.
|
||||
set -l prompt (_old_fish_prompt)
|
||||
|
||||
printf '(%s) ' $VIRTUAL_ENV_PROMPT
|
||||
|
||||
string join -- \n $prompt # handle multi-line prompts
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
40
site-packages/virtualenv/activation/nushell/__init__.py
Executable file
40
site-packages/virtualenv/activation/nushell/__init__.py
Executable file
@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class NushellActivator(ViaTemplateActivator):
|
||||
def templates(self):
|
||||
yield "activate.nu"
|
||||
|
||||
@staticmethod
|
||||
def quote(string):
|
||||
"""
|
||||
Nushell supports raw strings like: r###'this is a string'###.
|
||||
|
||||
This method finds the maximum continuous sharps in the string and then
|
||||
quote it with an extra sharp.
|
||||
"""
|
||||
max_sharps = 0
|
||||
current_sharps = 0
|
||||
for char in string:
|
||||
if char == "#":
|
||||
current_sharps += 1
|
||||
max_sharps = max(current_sharps, max_sharps)
|
||||
else:
|
||||
current_sharps = 0
|
||||
wrapping = "#" * (max_sharps + 1)
|
||||
return f"r{wrapping}'{string}'{wrapping}"
|
||||
|
||||
def replacements(self, creator, dest_folder): # noqa: ARG002
|
||||
return {
|
||||
"__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt,
|
||||
"__VIRTUAL_ENV__": str(creator.dest),
|
||||
"__VIRTUAL_NAME__": creator.env_name,
|
||||
"__BIN_NAME__": str(creator.bin_dir.relative_to(creator.dest)),
|
||||
}
|
||||
|
||||
|
||||
__all__ = [
|
||||
"NushellActivator",
|
||||
]
|
96
site-packages/virtualenv/activation/nushell/activate.nu
Executable file
96
site-packages/virtualenv/activation/nushell/activate.nu
Executable file
@ -0,0 +1,96 @@
|
||||
# virtualenv activation module
|
||||
# Activate with `overlay use activate.nu`
|
||||
# Deactivate with `deactivate`, as usual
|
||||
#
|
||||
# To customize the overlay name, you can call `overlay use activate.nu as foo`,
|
||||
# but then simply `deactivate` won't work because it is just an alias to hide
|
||||
# the "activate" overlay. You'd need to call `overlay hide foo` manually.
|
||||
|
||||
export-env {
|
||||
def is-string [x] {
|
||||
($x | describe) == 'string'
|
||||
}
|
||||
|
||||
def has-env [...names] {
|
||||
$names | each {|n|
|
||||
$n in $env
|
||||
} | all {|i| $i == true}
|
||||
}
|
||||
|
||||
# Emulates a `test -z`, but better as it handles e.g 'false'
|
||||
def is-env-true [name: string] {
|
||||
if (has-env $name) {
|
||||
# Try to parse 'true', '0', '1', and fail if not convertible
|
||||
let parsed = (do -i { $env | get $name | into bool })
|
||||
if ($parsed | describe) == 'bool' {
|
||||
$parsed
|
||||
} else {
|
||||
not ($env | get -i $name | is-empty)
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
let virtual_env = __VIRTUAL_ENV__
|
||||
let bin = __BIN_NAME__
|
||||
|
||||
let is_windows = ($nu.os-info.family) == 'windows'
|
||||
let path_name = (if (has-env 'Path') {
|
||||
'Path'
|
||||
} else {
|
||||
'PATH'
|
||||
}
|
||||
)
|
||||
|
||||
let venv_path = ([$virtual_env $bin] | path join)
|
||||
let new_path = ($env | get $path_name | prepend $venv_path)
|
||||
|
||||
# If there is no default prompt, then use the env name instead
|
||||
let virtual_env_prompt = (if (__VIRTUAL_PROMPT__ | is-empty) {
|
||||
($virtual_env | path basename)
|
||||
} else {
|
||||
__VIRTUAL_PROMPT__
|
||||
})
|
||||
|
||||
let new_env = {
|
||||
$path_name : $new_path
|
||||
VIRTUAL_ENV : $virtual_env
|
||||
VIRTUAL_ENV_PROMPT : $virtual_env_prompt
|
||||
}
|
||||
|
||||
let new_env = (if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') {
|
||||
$new_env
|
||||
} else {
|
||||
# Creating the new prompt for the session
|
||||
let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) '
|
||||
|
||||
# Back up the old prompt builder
|
||||
let old_prompt_command = (if (has-env 'PROMPT_COMMAND') {
|
||||
$env.PROMPT_COMMAND
|
||||
} else {
|
||||
''
|
||||
})
|
||||
|
||||
let new_prompt = (if (has-env 'PROMPT_COMMAND') {
|
||||
if 'closure' in ($old_prompt_command | describe) {
|
||||
{|| $'($virtual_prefix)(do $old_prompt_command)' }
|
||||
} else {
|
||||
{|| $'($virtual_prefix)($old_prompt_command)' }
|
||||
}
|
||||
} else {
|
||||
{|| $'($virtual_prefix)' }
|
||||
})
|
||||
|
||||
$new_env | merge {
|
||||
PROMPT_COMMAND : $new_prompt
|
||||
VIRTUAL_PREFIX : $virtual_prefix
|
||||
}
|
||||
})
|
||||
|
||||
# Environment variables that will be loaded as the virtual env
|
||||
load-env $new_env
|
||||
}
|
||||
|
||||
export alias pydoc = python -m pydoc
|
||||
export alias deactivate = overlay hide activate
|
25
site-packages/virtualenv/activation/powershell/__init__.py
Executable file
25
site-packages/virtualenv/activation/powershell/__init__.py
Executable file
@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class PowerShellActivator(ViaTemplateActivator):
|
||||
def templates(self):
|
||||
yield "activate.ps1"
|
||||
|
||||
@staticmethod
|
||||
def quote(string):
|
||||
"""
|
||||
This should satisfy PowerShell quoting rules [1], unless the quoted
|
||||
string is passed directly to Windows native commands [2].
|
||||
|
||||
[1]: https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules
|
||||
[2]: https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_parsing#passing-arguments-that-contain-quote-characters
|
||||
""" # noqa: D205
|
||||
string = string.replace("'", "''")
|
||||
return f"'{string}'"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PowerShellActivator",
|
||||
]
|
61
site-packages/virtualenv/activation/powershell/activate.ps1
Executable file
61
site-packages/virtualenv/activation/powershell/activate.ps1
Executable file
@ -0,0 +1,61 @@
|
||||
$script:THIS_PATH = $myinvocation.mycommand.path
|
||||
$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent
|
||||
|
||||
function global:deactivate([switch] $NonDestructive) {
|
||||
if (Test-Path variable:_OLD_VIRTUAL_PATH) {
|
||||
$env:PATH = $variable:_OLD_VIRTUAL_PATH
|
||||
Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global
|
||||
}
|
||||
|
||||
if (Test-Path function:_old_virtual_prompt) {
|
||||
$function:prompt = $function:_old_virtual_prompt
|
||||
Remove-Item function:\_old_virtual_prompt
|
||||
}
|
||||
|
||||
if ($env:VIRTUAL_ENV) {
|
||||
Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
if ($env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item env:VIRTUAL_ENV_PROMPT -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
if (!$NonDestructive) {
|
||||
# Self destruct!
|
||||
Remove-Item function:deactivate
|
||||
Remove-Item function:pydoc
|
||||
}
|
||||
}
|
||||
|
||||
function global:pydoc {
|
||||
python -m pydoc $args
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate -nondestructive
|
||||
|
||||
$VIRTUAL_ENV = $BASE_DIR
|
||||
$env:VIRTUAL_ENV = $VIRTUAL_ENV
|
||||
|
||||
if (__VIRTUAL_PROMPT__ -ne "") {
|
||||
$env:VIRTUAL_ENV_PROMPT = __VIRTUAL_PROMPT__
|
||||
}
|
||||
else {
|
||||
$env:VIRTUAL_ENV_PROMPT = $( Split-Path $env:VIRTUAL_ENV -Leaf )
|
||||
}
|
||||
|
||||
New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH
|
||||
|
||||
$env:PATH = "$env:VIRTUAL_ENV/" + __BIN_NAME__ + __PATH_SEP__ + $env:PATH
|
||||
if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
function global:_old_virtual_prompt {
|
||||
""
|
||||
}
|
||||
$function:_old_virtual_prompt = $function:prompt
|
||||
|
||||
function global:prompt {
|
||||
# Add the custom prefix to the existing prompt
|
||||
$previous_prompt_value = & $function:_old_virtual_prompt
|
||||
("(" + $env:VIRTUAL_ENV_PROMPT + ") " + $previous_prompt_value)
|
||||
}
|
||||
}
|
32
site-packages/virtualenv/activation/python/__init__.py
Executable file
32
site-packages/virtualenv/activation/python/__init__.py
Executable file
@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
|
||||
from virtualenv.activation.via_template import ViaTemplateActivator
|
||||
|
||||
|
||||
class PythonActivator(ViaTemplateActivator):
|
||||
def templates(self):
|
||||
yield "activate_this.py"
|
||||
|
||||
@staticmethod
|
||||
def quote(string):
|
||||
return repr(string)
|
||||
|
||||
def replacements(self, creator, dest_folder):
|
||||
replacements = super().replacements(creator, dest_folder)
|
||||
lib_folders = OrderedDict((os.path.relpath(str(i), str(dest_folder)), None) for i in creator.libs)
|
||||
lib_folders = os.pathsep.join(lib_folders.keys())
|
||||
replacements.update(
|
||||
{
|
||||
"__LIB_FOLDERS__": lib_folders,
|
||||
"__DECODE_PATH__": "",
|
||||
},
|
||||
)
|
||||
return replacements
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PythonActivator",
|
||||
]
|
38
site-packages/virtualenv/activation/python/activate_this.py
Executable file
38
site-packages/virtualenv/activation/python/activate_this.py
Executable file
@ -0,0 +1,38 @@
|
||||
"""
|
||||
Activate virtualenv for current interpreter:
|
||||
|
||||
import runpy
|
||||
runpy.run_path(this_file)
|
||||
|
||||
This can be used when you must use an existing Python interpreter, not the virtualenv bin/python.
|
||||
""" # noqa: D415
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import site
|
||||
import sys
|
||||
|
||||
try:
|
||||
abs_file = os.path.abspath(__file__)
|
||||
except NameError as exc:
|
||||
msg = "You must use import runpy; runpy.run_path(this_file)"
|
||||
raise AssertionError(msg) from exc
|
||||
|
||||
bin_dir = os.path.dirname(abs_file)
|
||||
base = bin_dir[: -len(__BIN_NAME__) - 1] # strip away the bin part from the __file__, plus the path separator
|
||||
|
||||
# prepend bin to PATH (this file is inside the bin directory)
|
||||
os.environ["PATH"] = os.pathsep.join([bin_dir, *os.environ.get("PATH", "").split(os.pathsep)])
|
||||
os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory
|
||||
os.environ["VIRTUAL_ENV_PROMPT"] = __VIRTUAL_PROMPT__ or os.path.basename(base)
|
||||
|
||||
# add the virtual environments libraries to the host python import mechanism
|
||||
prev_length = len(sys.path)
|
||||
for lib in __LIB_FOLDERS__.split(os.pathsep):
|
||||
path = os.path.realpath(os.path.join(bin_dir, lib))
|
||||
site.addsitedir(path.decode("utf-8") if __DECODE_PATH__ else path)
|
||||
sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length]
|
||||
|
||||
sys.real_prefix = sys.prefix
|
||||
sys.prefix = base
|
87
site-packages/virtualenv/activation/via_template.py
Executable file
87
site-packages/virtualenv/activation/via_template.py
Executable file
@ -0,0 +1,87 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from .activator import Activator
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from importlib.resources import files
|
||||
|
||||
def read_binary(module_name: str, filename: str) -> bytes:
|
||||
return (files(module_name) / filename).read_bytes()
|
||||
|
||||
else:
|
||||
from importlib.resources import read_binary
|
||||
|
||||
|
||||
class ViaTemplateActivator(Activator, ABC):
|
||||
@abstractmethod
|
||||
def templates(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def quote(string):
|
||||
"""
|
||||
Quote strings in the activation script.
|
||||
|
||||
:param string: the string to quote
|
||||
:return: quoted string that works in the activation script
|
||||
"""
|
||||
return shlex.quote(string)
|
||||
|
||||
def generate(self, creator):
|
||||
dest_folder = creator.bin_dir
|
||||
replacements = self.replacements(creator, dest_folder)
|
||||
generated = self._generate(replacements, self.templates(), dest_folder, creator)
|
||||
if self.flag_prompt is not None:
|
||||
creator.pyenv_cfg["prompt"] = self.flag_prompt
|
||||
return generated
|
||||
|
||||
def replacements(self, creator, dest_folder): # noqa: ARG002
|
||||
return {
|
||||
"__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt,
|
||||
"__VIRTUAL_ENV__": str(creator.dest),
|
||||
"__VIRTUAL_NAME__": creator.env_name,
|
||||
"__BIN_NAME__": str(creator.bin_dir.relative_to(creator.dest)),
|
||||
"__PATH_SEP__": os.pathsep,
|
||||
}
|
||||
|
||||
def _generate(self, replacements, templates, to_folder, creator):
|
||||
generated = []
|
||||
for template in templates:
|
||||
text = self.instantiate_template(replacements, template, creator)
|
||||
dest = to_folder / self.as_name(template)
|
||||
# remove the file if it already exists - this prevents permission
|
||||
# errors when the dest is not writable
|
||||
if dest.exists():
|
||||
dest.unlink()
|
||||
# Powershell assumes Windows 1252 encoding when reading files without BOM
|
||||
encoding = "utf-8-sig" if str(template).endswith(".ps1") else "utf-8"
|
||||
# use write_bytes to avoid platform specific line normalization (\n -> \r\n)
|
||||
dest.write_bytes(text.encode(encoding))
|
||||
generated.append(dest)
|
||||
return generated
|
||||
|
||||
def as_name(self, template):
|
||||
return template
|
||||
|
||||
def instantiate_template(self, replacements, template, creator):
|
||||
# read content as binary to avoid platform specific line normalization (\n -> \r\n)
|
||||
binary = read_binary(self.__module__, template)
|
||||
text = binary.decode("utf-8", errors="strict")
|
||||
for key, value in replacements.items():
|
||||
value_uni = self._repr_unicode(creator, value)
|
||||
text = text.replace(key, self.quote(value_uni))
|
||||
return text
|
||||
|
||||
@staticmethod
|
||||
def _repr_unicode(creator, value): # noqa: ARG004
|
||||
return value # by default, we just let it be unicode
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ViaTemplateActivator",
|
||||
]
|
56
site-packages/virtualenv/app_data/__init__.py
Executable file
56
site-packages/virtualenv/app_data/__init__.py
Executable file
@ -0,0 +1,56 @@
|
||||
"""Application data stored by virtualenv."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from platformdirs import user_data_dir
|
||||
|
||||
from .na import AppDataDisabled
|
||||
from .read_only import ReadOnlyAppData
|
||||
from .via_disk_folder import AppDataDiskFolder
|
||||
from .via_tempdir import TempAppData
|
||||
|
||||
|
||||
def _default_app_data_dir(env):
|
||||
key = "VIRTUALENV_OVERRIDE_APP_DATA"
|
||||
if key in env:
|
||||
return env[key]
|
||||
return user_data_dir(appname="virtualenv", appauthor="pypa")
|
||||
|
||||
|
||||
def make_app_data(folder, **kwargs):
|
||||
is_read_only = kwargs.pop("read_only")
|
||||
env = kwargs.pop("env")
|
||||
if kwargs: # py3+ kwonly
|
||||
msg = "unexpected keywords: {}"
|
||||
raise TypeError(msg)
|
||||
|
||||
if folder is None:
|
||||
folder = _default_app_data_dir(env)
|
||||
folder = os.path.abspath(folder)
|
||||
|
||||
if is_read_only:
|
||||
return ReadOnlyAppData(folder)
|
||||
|
||||
if not os.path.isdir(folder):
|
||||
try:
|
||||
os.makedirs(folder)
|
||||
logging.debug("created app data folder %s", folder)
|
||||
except OSError as exception:
|
||||
logging.info("could not create app data folder %s due to %r", folder, exception)
|
||||
|
||||
if os.access(folder, os.W_OK):
|
||||
return AppDataDiskFolder(folder)
|
||||
logging.debug("app data folder %s has no write access", folder)
|
||||
return TempAppData()
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AppDataDisabled",
|
||||
"AppDataDiskFolder",
|
||||
"ReadOnlyAppData",
|
||||
"TempAppData",
|
||||
"make_app_data",
|
||||
)
|
96
site-packages/virtualenv/app_data/base.py
Executable file
96
site-packages/virtualenv/app_data/base.py
Executable file
@ -0,0 +1,96 @@
|
||||
"""Application data stored by virtualenv."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import contextmanager
|
||||
|
||||
from virtualenv.info import IS_ZIPAPP
|
||||
|
||||
|
||||
class AppData(ABC):
|
||||
"""Abstract storage interface for the virtualenv application."""
|
||||
|
||||
@abstractmethod
|
||||
def close(self):
|
||||
"""Called before virtualenv exits."""
|
||||
|
||||
@abstractmethod
|
||||
def reset(self):
|
||||
"""Called when the user passes in the reset app data."""
|
||||
|
||||
@abstractmethod
|
||||
def py_info(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def py_info_clear(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def can_update(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def embed_update_log(self, distribution, for_py_version):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def house(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def transient(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def wheel_image(self, for_py_version, name):
|
||||
raise NotImplementedError
|
||||
|
||||
@contextmanager
|
||||
def ensure_extracted(self, path, to_folder=None):
|
||||
"""Some paths might be within the zipapp, unzip these to a path on the disk."""
|
||||
if IS_ZIPAPP:
|
||||
with self.extract(path, to_folder) as result:
|
||||
yield result
|
||||
else:
|
||||
yield path
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def extract(self, path, to_folder):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def locked(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ContentStore(ABC):
|
||||
@abstractmethod
|
||||
def exists(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def read(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def write(self, content):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def remove(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def locked(self):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AppData",
|
||||
"ContentStore",
|
||||
]
|
72
site-packages/virtualenv/app_data/na.py
Executable file
72
site-packages/virtualenv/app_data/na.py
Executable file
@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .base import AppData, ContentStore
|
||||
|
||||
|
||||
class AppDataDisabled(AppData):
|
||||
"""No application cache available (most likely as we don't have write permissions)."""
|
||||
|
||||
transient = True
|
||||
can_update = False
|
||||
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
error = RuntimeError("no app data folder available, probably no write access to the folder")
|
||||
|
||||
def close(self):
|
||||
"""Do nothing."""
|
||||
|
||||
def reset(self):
|
||||
"""Do nothing."""
|
||||
|
||||
def py_info(self, path): # noqa: ARG002
|
||||
return ContentStoreNA()
|
||||
|
||||
def embed_update_log(self, distribution, for_py_version): # noqa: ARG002
|
||||
return ContentStoreNA()
|
||||
|
||||
def extract(self, path, to_folder): # noqa: ARG002
|
||||
raise self.error
|
||||
|
||||
@contextmanager
|
||||
def locked(self, path): # noqa: ARG002
|
||||
"""Do nothing."""
|
||||
yield
|
||||
|
||||
@property
|
||||
def house(self):
|
||||
raise self.error
|
||||
|
||||
def wheel_image(self, for_py_version, name): # noqa: ARG002
|
||||
raise self.error
|
||||
|
||||
def py_info_clear(self):
|
||||
"""Nothing to clear."""
|
||||
|
||||
|
||||
class ContentStoreNA(ContentStore):
|
||||
def exists(self):
|
||||
return False
|
||||
|
||||
def read(self):
|
||||
"""Nothing to read."""
|
||||
return
|
||||
|
||||
def write(self, content):
|
||||
"""Nothing to write."""
|
||||
|
||||
def remove(self):
|
||||
"""Nothing to remove."""
|
||||
|
||||
@contextmanager
|
||||
def locked(self):
|
||||
yield
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AppDataDisabled",
|
||||
"ContentStoreNA",
|
||||
]
|
42
site-packages/virtualenv/app_data/read_only.py
Executable file
42
site-packages/virtualenv/app_data/read_only.py
Executable file
@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os.path
|
||||
|
||||
from virtualenv.util.lock import NoOpFileLock
|
||||
|
||||
from .via_disk_folder import AppDataDiskFolder, PyInfoStoreDisk
|
||||
|
||||
|
||||
class ReadOnlyAppData(AppDataDiskFolder):
|
||||
can_update = False
|
||||
|
||||
def __init__(self, folder: str) -> None:
|
||||
if not os.path.isdir(folder):
|
||||
msg = f"read-only app data directory {folder} does not exist"
|
||||
raise RuntimeError(msg)
|
||||
super().__init__(folder)
|
||||
self.lock = NoOpFileLock(folder)
|
||||
|
||||
def reset(self) -> None:
|
||||
msg = "read-only app data does not support reset"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def py_info_clear(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def py_info(self, path):
|
||||
return _PyInfoStoreDiskReadOnly(self.py_info_at, path)
|
||||
|
||||
def embed_update_log(self, distribution, for_py_version):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _PyInfoStoreDiskReadOnly(PyInfoStoreDisk):
|
||||
def write(self, content): # noqa: ARG002
|
||||
msg = "read-only app data python info cannot be updated"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ReadOnlyAppData",
|
||||
]
|
174
site-packages/virtualenv/app_data/via_disk_folder.py
Executable file
174
site-packages/virtualenv/app_data/via_disk_folder.py
Executable file
@ -0,0 +1,174 @@
|
||||
"""
|
||||
A rough layout of the current storage goes as:
|
||||
|
||||
virtualenv-app-data
|
||||
├── py - <version> <cache information about python interpreters>
|
||||
│ └── *.json/lock
|
||||
├── wheel <cache wheels used for seeding>
|
||||
│ ├── house
|
||||
│ │ └── *.whl <wheels downloaded go here>
|
||||
│ └── <python major.minor> -> 3.9
|
||||
│ ├── img-<version>
|
||||
│ │ └── image
|
||||
│ │ └── <install class> -> CopyPipInstall / SymlinkPipInstall
|
||||
│ │ └── <wheel name> -> pip-20.1.1-py2.py3-none-any
|
||||
│ └── embed
|
||||
│ └── 3 -> json format versioning
|
||||
│ └── *.json -> for every distribution contains data about newer embed versions and releases
|
||||
└─── unzip <in zip app we cannot refer to some internal files, so first extract them>
|
||||
└── <virtualenv version>
|
||||
├── py_info.py
|
||||
├── debug.py
|
||||
└── _virtualenv.py
|
||||
""" # noqa: D415
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from abc import ABC
|
||||
from contextlib import contextmanager, suppress
|
||||
from hashlib import sha256
|
||||
|
||||
from virtualenv.util.lock import ReentrantFileLock
|
||||
from virtualenv.util.path import safe_delete
|
||||
from virtualenv.util.zipapp import extract
|
||||
from virtualenv.version import __version__
|
||||
|
||||
from .base import AppData, ContentStore
|
||||
|
||||
|
||||
class AppDataDiskFolder(AppData):
|
||||
"""Store the application data on the disk within a folder layout."""
|
||||
|
||||
transient = False
|
||||
can_update = True
|
||||
|
||||
def __init__(self, folder) -> None:
|
||||
self.lock = ReentrantFileLock(folder)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{type(self).__name__}({self.lock.path})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.lock.path)
|
||||
|
||||
def reset(self):
|
||||
logging.debug("reset app data folder %s", self.lock.path)
|
||||
safe_delete(self.lock.path)
|
||||
|
||||
def close(self):
|
||||
"""Do nothing."""
|
||||
|
||||
@contextmanager
|
||||
def locked(self, path):
|
||||
path_lock = self.lock / path
|
||||
with path_lock:
|
||||
yield path_lock.path
|
||||
|
||||
@contextmanager
|
||||
def extract(self, path, to_folder):
|
||||
root = ReentrantFileLock(to_folder()) if to_folder is not None else self.lock / "unzip" / __version__
|
||||
with root.lock_for_key(path.name):
|
||||
dest = root.path / path.name
|
||||
if not dest.exists():
|
||||
extract(path, dest)
|
||||
yield dest
|
||||
|
||||
@property
|
||||
def py_info_at(self):
|
||||
return self.lock / "py_info" / "1"
|
||||
|
||||
def py_info(self, path):
|
||||
return PyInfoStoreDisk(self.py_info_at, path)
|
||||
|
||||
def py_info_clear(self):
|
||||
"""clear py info."""
|
||||
py_info_folder = self.py_info_at
|
||||
with py_info_folder:
|
||||
for filename in py_info_folder.path.iterdir():
|
||||
if filename.suffix == ".json":
|
||||
with py_info_folder.lock_for_key(filename.stem):
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
|
||||
def embed_update_log(self, distribution, for_py_version):
|
||||
return EmbedDistributionUpdateStoreDisk(self.lock / "wheel" / for_py_version / "embed" / "3", distribution)
|
||||
|
||||
@property
|
||||
def house(self):
|
||||
path = self.lock.path / "wheel" / "house"
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
def wheel_image(self, for_py_version, name):
|
||||
return self.lock.path / "wheel" / for_py_version / "image" / "1" / name
|
||||
|
||||
|
||||
class JSONStoreDisk(ContentStore, ABC):
|
||||
def __init__(self, in_folder, key, msg, msg_args) -> None:
|
||||
self.in_folder = in_folder
|
||||
self.key = key
|
||||
self.msg = msg
|
||||
self.msg_args = (*msg_args, self.file)
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
return self.in_folder.path / f"{self.key}.json"
|
||||
|
||||
def exists(self):
|
||||
return self.file.exists()
|
||||
|
||||
def read(self):
|
||||
data, bad_format = None, False
|
||||
try:
|
||||
data = json.loads(self.file.read_text(encoding="utf-8"))
|
||||
except ValueError:
|
||||
bad_format = True
|
||||
except Exception: # noqa: BLE001, S110
|
||||
pass
|
||||
else:
|
||||
logging.debug("got %s from %s", self.msg, self.msg_args)
|
||||
return data
|
||||
if bad_format:
|
||||
with suppress(OSError): # reading and writing on the same file may cause race on multiple processes
|
||||
self.remove()
|
||||
return None
|
||||
|
||||
def remove(self):
|
||||
self.file.unlink()
|
||||
logging.debug("removed %s at %s", self.msg, self.msg_args)
|
||||
|
||||
@contextmanager
|
||||
def locked(self):
|
||||
with self.in_folder.lock_for_key(self.key):
|
||||
yield
|
||||
|
||||
def write(self, content):
|
||||
folder = self.file.parent
|
||||
folder.mkdir(parents=True, exist_ok=True)
|
||||
self.file.write_text(json.dumps(content, sort_keys=True, indent=2), encoding="utf-8")
|
||||
logging.debug("wrote %s at %s", self.msg, self.msg_args)
|
||||
|
||||
|
||||
class PyInfoStoreDisk(JSONStoreDisk):
|
||||
def __init__(self, in_folder, path) -> None:
|
||||
key = sha256(str(path).encode("utf-8")).hexdigest()
|
||||
super().__init__(in_folder, key, "python info of %s", (path,))
|
||||
|
||||
|
||||
class EmbedDistributionUpdateStoreDisk(JSONStoreDisk):
|
||||
def __init__(self, in_folder, distribution) -> None:
|
||||
super().__init__(
|
||||
in_folder,
|
||||
distribution,
|
||||
"embed update of distribution %s",
|
||||
(distribution,),
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AppDataDiskFolder",
|
||||
"JSONStoreDisk",
|
||||
"PyInfoStoreDisk",
|
||||
]
|
32
site-packages/virtualenv/app_data/via_tempdir.py
Executable file
32
site-packages/virtualenv/app_data/via_tempdir.py
Executable file
@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from virtualenv.util.path import safe_delete
|
||||
|
||||
from .via_disk_folder import AppDataDiskFolder
|
||||
|
||||
|
||||
class TempAppData(AppDataDiskFolder):
|
||||
transient = True
|
||||
can_update = False
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(folder=mkdtemp())
|
||||
logging.debug("created temporary app data folder %s", self.lock.path)
|
||||
|
||||
def reset(self):
|
||||
"""This is a temporary folder, is already empty to start with."""
|
||||
|
||||
def close(self):
|
||||
logging.debug("remove temporary app data folder %s", self.lock.path)
|
||||
safe_delete(self.lock.path)
|
||||
|
||||
def embed_update_log(self, distribution, for_py_version):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"TempAppData",
|
||||
]
|
0
site-packages/virtualenv/config/__init__.py
Executable file
0
site-packages/virtualenv/config/__init__.py
Executable file
0
site-packages/virtualenv/config/cli/__init__.py
Executable file
0
site-packages/virtualenv/config/cli/__init__.py
Executable file
126
site-packages/virtualenv/config/cli/parser.py
Executable file
126
site-packages/virtualenv/config/cli/parser.py
Executable file
@ -0,0 +1,126 @@
|
||||
from __future__ import annotations # noqa: A005
|
||||
|
||||
import os
|
||||
from argparse import SUPPRESS, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
|
||||
from collections import OrderedDict
|
||||
|
||||
from virtualenv.config.convert import get_type
|
||||
from virtualenv.config.env_var import get_env_var
|
||||
from virtualenv.config.ini import IniConfig
|
||||
|
||||
|
||||
class VirtualEnvOptions(Namespace):
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self._src = None
|
||||
self._sources = {}
|
||||
|
||||
def set_src(self, key, value, src):
|
||||
setattr(self, key, value)
|
||||
if src.startswith("env var"):
|
||||
src = "env var"
|
||||
self._sources[key] = src
|
||||
|
||||
def __setattr__(self, key, value) -> None:
|
||||
if getattr(self, "_src", None) is not None:
|
||||
self._sources[key] = self._src
|
||||
super().__setattr__(key, value)
|
||||
|
||||
def get_source(self, key):
|
||||
return self._sources.get(key)
|
||||
|
||||
@property
|
||||
def verbosity(self):
|
||||
if not hasattr(self, "verbose") and not hasattr(self, "quiet"):
|
||||
return None
|
||||
return max(self.verbose - self.quiet, 0)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{type(self).__name__}({', '.join(f'{k}={v}' for k, v in vars(self).items() if not k.startswith('_'))})"
|
||||
|
||||
|
||||
class VirtualEnvConfigParser(ArgumentParser):
|
||||
"""Custom option parser which updates its defaults by checking the configuration files and environmental vars."""
|
||||
|
||||
def __init__(self, options=None, env=None, *args, **kwargs) -> None:
|
||||
env = os.environ if env is None else env
|
||||
self.file_config = IniConfig(env)
|
||||
self.epilog_list = []
|
||||
self.env = env
|
||||
kwargs["epilog"] = self.file_config.epilog
|
||||
kwargs["add_help"] = False
|
||||
kwargs["formatter_class"] = HelpFormatter
|
||||
kwargs["prog"] = "virtualenv"
|
||||
super().__init__(*args, **kwargs)
|
||||
self._fixed = set()
|
||||
if options is not None and not isinstance(options, VirtualEnvOptions):
|
||||
msg = "options must be of type VirtualEnvOptions"
|
||||
raise TypeError(msg)
|
||||
self.options = VirtualEnvOptions() if options is None else options
|
||||
self._interpreter = None
|
||||
self._app_data = None
|
||||
|
||||
def _fix_defaults(self):
|
||||
for action in self._actions:
|
||||
action_id = id(action)
|
||||
if action_id not in self._fixed:
|
||||
self._fix_default(action)
|
||||
self._fixed.add(action_id)
|
||||
|
||||
def _fix_default(self, action):
|
||||
if hasattr(action, "default") and hasattr(action, "dest") and action.default != SUPPRESS:
|
||||
as_type = get_type(action)
|
||||
names = OrderedDict((i.lstrip("-").replace("-", "_"), None) for i in action.option_strings)
|
||||
outcome = None
|
||||
for name in names:
|
||||
outcome = get_env_var(name, as_type, self.env)
|
||||
if outcome is not None:
|
||||
break
|
||||
if outcome is None and self.file_config:
|
||||
for name in names:
|
||||
outcome = self.file_config.get(name, as_type)
|
||||
if outcome is not None:
|
||||
break
|
||||
if outcome is not None:
|
||||
action.default, action.default_source = outcome
|
||||
else:
|
||||
outcome = action.default, "default"
|
||||
self.options.set_src(action.dest, *outcome)
|
||||
|
||||
def enable_help(self):
|
||||
self._fix_defaults()
|
||||
self.add_argument("-h", "--help", action="help", default=SUPPRESS, help="show this help message and exit")
|
||||
|
||||
def parse_known_args(self, args=None, namespace=None):
|
||||
if namespace is None:
|
||||
namespace = self.options
|
||||
elif namespace is not self.options:
|
||||
msg = "can only pass in parser.options"
|
||||
raise ValueError(msg)
|
||||
self._fix_defaults()
|
||||
self.options._src = "cli" # noqa: SLF001
|
||||
try:
|
||||
namespace.env = self.env
|
||||
return super().parse_known_args(args, namespace=namespace)
|
||||
finally:
|
||||
self.options._src = None # noqa: SLF001
|
||||
|
||||
|
||||
class HelpFormatter(ArgumentDefaultsHelpFormatter):
|
||||
def __init__(self, prog) -> None:
|
||||
super().__init__(prog, max_help_position=32, width=240)
|
||||
|
||||
def _get_help_string(self, action):
|
||||
text = super()._get_help_string(action)
|
||||
if hasattr(action, "default_source"):
|
||||
default = " (default: %(default)s)"
|
||||
if text.endswith(default):
|
||||
text = f"{text[: -len(default)]} (default: %(default)s -> from %(default_source)s)"
|
||||
return text
|
||||
|
||||
|
||||
__all__ = [
|
||||
"HelpFormatter",
|
||||
"VirtualEnvConfigParser",
|
||||
"VirtualEnvOptions",
|
||||
]
|
100
site-packages/virtualenv/config/convert.py
Executable file
100
site-packages/virtualenv/config/convert.py
Executable file
@ -0,0 +1,100 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import ClassVar
|
||||
|
||||
|
||||
class TypeData:
|
||||
def __init__(self, default_type, as_type) -> None:
|
||||
self.default_type = default_type
|
||||
self.as_type = as_type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(base={self.default_type}, as={self.as_type})"
|
||||
|
||||
def convert(self, value):
|
||||
return self.default_type(value)
|
||||
|
||||
|
||||
class BoolType(TypeData):
|
||||
BOOLEAN_STATES: ClassVar[dict[str, bool]] = {
|
||||
"1": True,
|
||||
"yes": True,
|
||||
"true": True,
|
||||
"on": True,
|
||||
"0": False,
|
||||
"no": False,
|
||||
"false": False,
|
||||
"off": False,
|
||||
}
|
||||
|
||||
def convert(self, value):
|
||||
if value.lower() not in self.BOOLEAN_STATES:
|
||||
msg = f"Not a boolean: {value}"
|
||||
raise ValueError(msg)
|
||||
return self.BOOLEAN_STATES[value.lower()]
|
||||
|
||||
|
||||
class NoneType(TypeData):
|
||||
def convert(self, value):
|
||||
if not value:
|
||||
return None
|
||||
return str(value)
|
||||
|
||||
|
||||
class ListType(TypeData):
|
||||
def _validate(self):
|
||||
"""no op."""
|
||||
|
||||
def convert(self, value, flatten=True): # noqa: ARG002, FBT002
|
||||
values = self.split_values(value)
|
||||
result = []
|
||||
for a_value in values:
|
||||
sub_values = a_value.split(os.pathsep)
|
||||
result.extend(sub_values)
|
||||
return [self.as_type(i) for i in result]
|
||||
|
||||
def split_values(self, value):
|
||||
"""
|
||||
Split the provided value into a list.
|
||||
|
||||
First this is done by newlines. If there were no newlines in the text,
|
||||
then we next try to split by comma.
|
||||
"""
|
||||
if isinstance(value, (str, bytes)):
|
||||
# Use `splitlines` rather than a custom check for whether there is
|
||||
# more than one line. This ensures that the full `splitlines()`
|
||||
# logic is supported here.
|
||||
values = value.splitlines()
|
||||
if len(values) <= 1:
|
||||
values = value.split(",")
|
||||
values = filter(None, [x.strip() for x in values])
|
||||
else:
|
||||
values = list(value)
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def convert(value, as_type, source):
|
||||
"""Convert the value as a given type where the value comes from the given source."""
|
||||
try:
|
||||
return as_type.convert(value)
|
||||
except Exception as exception:
|
||||
logging.warning("%s failed to convert %r as %r because %r", source, value, as_type, exception)
|
||||
raise
|
||||
|
||||
|
||||
_CONVERT = {bool: BoolType, type(None): NoneType, list: ListType}
|
||||
|
||||
|
||||
def get_type(action):
|
||||
default_type = type(action.default)
|
||||
as_type = default_type if action.type is None else action.type
|
||||
return _CONVERT.get(default_type, TypeData)(default_type, as_type)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"convert",
|
||||
"get_type",
|
||||
]
|
30
site-packages/virtualenv/config/env_var.py
Executable file
30
site-packages/virtualenv/config/env_var.py
Executable file
@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
|
||||
from .convert import convert
|
||||
|
||||
|
||||
def get_env_var(key, as_type, env):
|
||||
"""
|
||||
Get the environment variable option.
|
||||
|
||||
:param key: the config key requested
|
||||
:param as_type: the type we would like to convert it to
|
||||
:param env: environment variables to use
|
||||
:return:
|
||||
"""
|
||||
environ_key = f"VIRTUALENV_{key.upper()}"
|
||||
if env.get(environ_key):
|
||||
value = env[environ_key]
|
||||
|
||||
with suppress(Exception): # note the converter already logs a warning when failures happen
|
||||
source = f"env var {environ_key}"
|
||||
as_type = convert(value, as_type, source)
|
||||
return as_type, source
|
||||
return None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"get_env_var",
|
||||
]
|
75
site-packages/virtualenv/config/ini.py
Executable file
75
site-packages/virtualenv/config/ini.py
Executable file
@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
from typing import ClassVar
|
||||
|
||||
from platformdirs import user_config_dir
|
||||
|
||||
from .convert import convert
|
||||
|
||||
|
||||
class IniConfig:
|
||||
VIRTUALENV_CONFIG_FILE_ENV_VAR: ClassVar[str] = "VIRTUALENV_CONFIG_FILE"
|
||||
STATE: ClassVar[dict[bool | None, str]] = {None: "failed to parse", True: "active", False: "missing"}
|
||||
|
||||
section = "virtualenv"
|
||||
|
||||
def __init__(self, env=None) -> None:
|
||||
env = os.environ if env is None else env
|
||||
config_file = env.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None)
|
||||
self.is_env_var = config_file is not None
|
||||
if config_file is None:
|
||||
config_file = Path(user_config_dir(appname="virtualenv", appauthor="pypa")) / "virtualenv.ini"
|
||||
else:
|
||||
config_file = Path(config_file)
|
||||
self.config_file = config_file
|
||||
self._cache = {}
|
||||
|
||||
exception = None
|
||||
self.has_config_file = None
|
||||
try:
|
||||
self.has_config_file = self.config_file.exists()
|
||||
except OSError as exc:
|
||||
exception = exc
|
||||
else:
|
||||
if self.has_config_file:
|
||||
self.config_file = self.config_file.resolve()
|
||||
self.config_parser = ConfigParser()
|
||||
try:
|
||||
self._load()
|
||||
self.has_virtualenv_section = self.config_parser.has_section(self.section)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
exception = exc
|
||||
if exception is not None:
|
||||
logging.error("failed to read config file %s because %r", config_file, exception)
|
||||
|
||||
def _load(self):
|
||||
with self.config_file.open("rt", encoding="utf-8") as file_handler:
|
||||
return self.config_parser.read_file(file_handler)
|
||||
|
||||
def get(self, key, as_type):
|
||||
cache_key = key, as_type
|
||||
if cache_key in self._cache:
|
||||
return self._cache[cache_key]
|
||||
try:
|
||||
source = "file"
|
||||
raw_value = self.config_parser.get(self.section, key.lower())
|
||||
value = convert(raw_value, as_type, source)
|
||||
result = value, source
|
||||
except Exception: # noqa: BLE001
|
||||
result = None
|
||||
self._cache[cache_key] = result
|
||||
return result
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.has_config_file) and bool(self.has_virtualenv_section)
|
||||
|
||||
@property
|
||||
def epilog(self):
|
||||
return (
|
||||
f"\nconfig file {self.config_file} {self.STATE[self.has_config_file]} "
|
||||
f"(change{'d' if self.is_env_var else ''} via env var {self.VIRTUALENV_CONFIG_FILE_ENV_VAR})"
|
||||
)
|
0
site-packages/virtualenv/create/__init__.py
Executable file
0
site-packages/virtualenv/create/__init__.py
Executable file
226
site-packages/virtualenv/create/creator.py
Executable file
226
site-packages/virtualenv/create/creator.py
Executable file
@ -0,0 +1,226 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from argparse import ArgumentTypeError
|
||||
from ast import literal_eval
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.discovery.cached_py_info import LogCmd
|
||||
from virtualenv.util.path import safe_delete
|
||||
from virtualenv.util.subprocess import run_cmd
|
||||
from virtualenv.version import __version__
|
||||
|
||||
from .pyenv_cfg import PyEnvCfg
|
||||
|
||||
HERE = Path(os.path.abspath(__file__)).parent
|
||||
DEBUG_SCRIPT = HERE / "debug.py"
|
||||
|
||||
|
||||
class CreatorMeta:
|
||||
def __init__(self) -> None:
|
||||
self.error = None
|
||||
|
||||
|
||||
class Creator(ABC):
|
||||
"""A class that given a python Interpreter creates a virtual environment."""
|
||||
|
||||
def __init__(self, options, interpreter) -> None:
|
||||
"""
|
||||
Construct a new virtual environment creator.
|
||||
|
||||
:param options: the CLI option as parsed from :meth:`add_parser_arguments`
|
||||
:param interpreter: the interpreter to create virtual environment from
|
||||
"""
|
||||
self.interpreter = interpreter
|
||||
self._debug = None
|
||||
self.dest = Path(options.dest)
|
||||
self.clear = options.clear
|
||||
self.no_vcs_ignore = options.no_vcs_ignore
|
||||
self.pyenv_cfg = PyEnvCfg.from_folder(self.dest)
|
||||
self.app_data = options.app_data
|
||||
self.env = options.env
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({', '.join(f'{k}={v}' for k, v in self._args())})"
|
||||
|
||||
def _args(self):
|
||||
return [
|
||||
("dest", str(self.dest)),
|
||||
("clear", self.clear),
|
||||
("no_vcs_ignore", self.no_vcs_ignore),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def can_create(cls, interpreter): # noqa: ARG003
|
||||
"""
|
||||
Determine if we can create a virtual environment.
|
||||
|
||||
:param interpreter: the interpreter in question
|
||||
:return: ``None`` if we can't create, any other object otherwise that will be forwarded to \
|
||||
:meth:`add_parser_arguments`
|
||||
"""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, meta, app_data): # noqa: ARG003
|
||||
"""
|
||||
Add CLI arguments for the creator.
|
||||
|
||||
:param parser: the CLI parser
|
||||
:param app_data: the application data folder
|
||||
:param interpreter: the interpreter we're asked to create virtual environment for
|
||||
:param meta: value as returned by :meth:`can_create`
|
||||
"""
|
||||
parser.add_argument(
|
||||
"dest",
|
||||
help="directory to create virtualenv at",
|
||||
type=cls.validate_dest,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--clear",
|
||||
dest="clear",
|
||||
action="store_true",
|
||||
help="remove the destination directory if exist before starting (will overwrite files otherwise)",
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-vcs-ignore",
|
||||
dest="no_vcs_ignore",
|
||||
action="store_true",
|
||||
help="don't create VCS ignore directive in the destination directory",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
def create(self):
|
||||
"""Perform the virtual environment creation."""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def validate_dest(cls, raw_value): # noqa: C901
|
||||
"""No path separator in the path, valid chars and must be write-able."""
|
||||
|
||||
def non_write_able(dest, value):
|
||||
common = Path(*os.path.commonprefix([value.parts, dest.parts]))
|
||||
msg = f"the destination {dest.relative_to(common)} is not write-able at {common}"
|
||||
raise ArgumentTypeError(msg)
|
||||
|
||||
# the file system must be able to encode
|
||||
# note in newer CPython this is always utf-8 https://www.python.org/dev/peps/pep-0529/
|
||||
encoding = sys.getfilesystemencoding()
|
||||
refused = OrderedDict()
|
||||
kwargs = {"errors": "ignore"} if encoding != "mbcs" else {}
|
||||
for char in str(raw_value):
|
||||
try:
|
||||
trip = char.encode(encoding, **kwargs).decode(encoding)
|
||||
if trip == char:
|
||||
continue
|
||||
raise ValueError(trip) # noqa: TRY301
|
||||
except ValueError:
|
||||
refused[char] = None
|
||||
if refused:
|
||||
bad = "".join(refused.keys())
|
||||
msg = f"the file system codec ({encoding}) cannot handle characters {bad!r} within {raw_value!r}"
|
||||
raise ArgumentTypeError(msg)
|
||||
if os.pathsep in raw_value:
|
||||
msg = (
|
||||
f"destination {raw_value!r} must not contain the path separator ({os.pathsep})"
|
||||
f" as this would break the activation scripts"
|
||||
)
|
||||
raise ArgumentTypeError(msg)
|
||||
|
||||
value = Path(raw_value)
|
||||
if value.exists() and value.is_file():
|
||||
msg = f"the destination {value} already exists and is a file"
|
||||
raise ArgumentTypeError(msg)
|
||||
dest = Path(os.path.abspath(str(value))).resolve() # on Windows absolute does not imply resolve so use both
|
||||
value = dest
|
||||
while dest:
|
||||
if dest.exists():
|
||||
if os.access(str(dest), os.W_OK):
|
||||
break
|
||||
non_write_able(dest, value)
|
||||
base, _ = dest.parent, dest.name
|
||||
if base == dest:
|
||||
non_write_able(dest, value) # pragma: no cover
|
||||
dest = base
|
||||
return str(value)
|
||||
|
||||
def run(self):
|
||||
if self.dest.exists() and self.clear:
|
||||
logging.debug("delete %s", self.dest)
|
||||
safe_delete(self.dest)
|
||||
self.create()
|
||||
self.set_pyenv_cfg()
|
||||
if not self.no_vcs_ignore:
|
||||
self.setup_ignore_vcs()
|
||||
|
||||
def set_pyenv_cfg(self):
|
||||
self.pyenv_cfg.content = OrderedDict()
|
||||
self.pyenv_cfg["home"] = os.path.dirname(os.path.abspath(self.interpreter.system_executable))
|
||||
self.pyenv_cfg["implementation"] = self.interpreter.implementation
|
||||
self.pyenv_cfg["version_info"] = ".".join(str(i) for i in self.interpreter.version_info)
|
||||
self.pyenv_cfg["virtualenv"] = __version__
|
||||
|
||||
def setup_ignore_vcs(self):
|
||||
"""Generate ignore instructions for version control systems."""
|
||||
# mark this folder to be ignored by VCS, handle https://www.python.org/dev/peps/pep-0610/#registered-vcs
|
||||
git_ignore = self.dest / ".gitignore"
|
||||
if not git_ignore.exists():
|
||||
git_ignore.write_text("# created by virtualenv automatically\n*\n", encoding="utf-8")
|
||||
# Mercurial - does not support the .hgignore file inside a subdirectory directly, but only if included via the
|
||||
# subinclude directive from root, at which point on might as well ignore the directory itself, see
|
||||
# https://www.selenic.com/mercurial/hgignore.5.html for more details
|
||||
# Bazaar - does not support ignore files in sub-directories, only at root level via .bzrignore
|
||||
# Subversion - does not support ignore files, requires direct manipulation with the svn tool
|
||||
|
||||
@property
|
||||
def debug(self):
|
||||
""":return: debug information about the virtual environment (only valid after :meth:`create` has run)"""
|
||||
if self._debug is None and self.exe is not None:
|
||||
self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data, self.env)
|
||||
return self._debug
|
||||
|
||||
@staticmethod
|
||||
def debug_script():
|
||||
return DEBUG_SCRIPT
|
||||
|
||||
|
||||
def get_env_debug_info(env_exe, debug_script, app_data, env):
|
||||
env = env.copy()
|
||||
env.pop("PYTHONPATH", None)
|
||||
|
||||
with app_data.ensure_extracted(debug_script) as debug_script_extracted:
|
||||
cmd = [str(env_exe), str(debug_script_extracted)]
|
||||
logging.debug("debug via %r", LogCmd(cmd))
|
||||
code, out, err = run_cmd(cmd)
|
||||
|
||||
try:
|
||||
if code != 0:
|
||||
if out:
|
||||
result = literal_eval(out)
|
||||
else:
|
||||
if code == 2 and "file" in err: # noqa: PLR2004
|
||||
# Re-raise FileNotFoundError from `run_cmd()`
|
||||
raise OSError(err) # noqa: TRY301
|
||||
raise Exception(err) # noqa: TRY002, TRY301
|
||||
else:
|
||||
result = json.loads(out)
|
||||
if err:
|
||||
result["err"] = err
|
||||
except Exception as exception: # noqa: BLE001
|
||||
return {"out": out, "err": err, "returncode": code, "exception": repr(exception)}
|
||||
if "sys" in result and "path" in result["sys"]:
|
||||
del result["sys"]["path"][0]
|
||||
return result
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Creator",
|
||||
"CreatorMeta",
|
||||
]
|
102
site-packages/virtualenv/create/debug.py
Executable file
102
site-packages/virtualenv/create/debug.py
Executable file
@ -0,0 +1,102 @@
|
||||
"""Inspect a target Python interpreter virtual environment wise."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys # built-in
|
||||
|
||||
|
||||
def encode_path(value):
|
||||
if value is None:
|
||||
return None
|
||||
if not isinstance(value, (str, bytes)):
|
||||
value = repr(value) if isinstance(value, type) else repr(type(value))
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(sys.getfilesystemencoding())
|
||||
return value
|
||||
|
||||
|
||||
def encode_list_path(value):
|
||||
return [encode_path(i) for i in value]
|
||||
|
||||
|
||||
def run():
|
||||
"""Print debug data about the virtual environment."""
|
||||
try:
|
||||
from collections import OrderedDict # noqa: PLC0415
|
||||
except ImportError: # pragma: no cover
|
||||
# this is possible if the standard library cannot be accessed
|
||||
|
||||
OrderedDict = dict # pragma: no cover # noqa: N806
|
||||
result = OrderedDict([("sys", OrderedDict())])
|
||||
path_keys = (
|
||||
"executable",
|
||||
"_base_executable",
|
||||
"prefix",
|
||||
"base_prefix",
|
||||
"real_prefix",
|
||||
"exec_prefix",
|
||||
"base_exec_prefix",
|
||||
"path",
|
||||
"meta_path",
|
||||
)
|
||||
for key in path_keys:
|
||||
value = getattr(sys, key, None)
|
||||
value = encode_list_path(value) if isinstance(value, list) else encode_path(value)
|
||||
result["sys"][key] = value
|
||||
result["sys"]["fs_encoding"] = sys.getfilesystemencoding()
|
||||
result["sys"]["io_encoding"] = getattr(sys.stdout, "encoding", None)
|
||||
result["version"] = sys.version
|
||||
|
||||
try:
|
||||
import sysconfig # noqa: PLC0415
|
||||
|
||||
# https://bugs.python.org/issue22199
|
||||
makefile = getattr(sysconfig, "get_makefile_filename", getattr(sysconfig, "_get_makefile_filename", None))
|
||||
result["makefile_filename"] = encode_path(makefile())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import os # landmark # noqa: PLC0415
|
||||
|
||||
result["os"] = repr(os)
|
||||
|
||||
try:
|
||||
import site # site # noqa: PLC0415
|
||||
|
||||
result["site"] = repr(site)
|
||||
except ImportError as exception: # pragma: no cover
|
||||
result["site"] = repr(exception) # pragma: no cover
|
||||
|
||||
try:
|
||||
import datetime # site # noqa: PLC0415
|
||||
|
||||
result["datetime"] = repr(datetime)
|
||||
except ImportError as exception: # pragma: no cover
|
||||
result["datetime"] = repr(exception) # pragma: no cover
|
||||
|
||||
try:
|
||||
import math # site # noqa: PLC0415
|
||||
|
||||
result["math"] = repr(math)
|
||||
except ImportError as exception: # pragma: no cover
|
||||
result["math"] = repr(exception) # pragma: no cover
|
||||
|
||||
# try to print out, this will validate if other core modules are available (json in this case)
|
||||
try:
|
||||
import json # noqa: PLC0415
|
||||
|
||||
result["json"] = repr(json)
|
||||
except ImportError as exception:
|
||||
result["json"] = repr(exception)
|
||||
else:
|
||||
try:
|
||||
content = json.dumps(result, indent=2)
|
||||
sys.stdout.write(content)
|
||||
except (ValueError, TypeError) as exception: # pragma: no cover
|
||||
sys.stderr.write(repr(exception))
|
||||
sys.stdout.write(repr(result)) # pragma: no cover
|
||||
raise SystemExit(1) # noqa: B904 # pragma: no cover
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
110
site-packages/virtualenv/create/describe.py
Executable file
110
site-packages/virtualenv/create/describe.py
Executable file
@ -0,0 +1,110 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.info import IS_WIN
|
||||
|
||||
|
||||
class Describe:
|
||||
"""Given a host interpreter tell us information about what the created interpreter might look like."""
|
||||
|
||||
suffix = ".exe" if IS_WIN else ""
|
||||
|
||||
def __init__(self, dest, interpreter) -> None:
|
||||
self.interpreter = interpreter
|
||||
self.dest = dest
|
||||
self._stdlib = None
|
||||
self._stdlib_platform = None
|
||||
self._system_stdlib = None
|
||||
self._conf_vars = None
|
||||
|
||||
@property
|
||||
def bin_dir(self):
|
||||
return self.script_dir
|
||||
|
||||
@property
|
||||
def script_dir(self):
|
||||
return self.dest / self.interpreter.install_path("scripts")
|
||||
|
||||
@property
|
||||
def purelib(self):
|
||||
return self.dest / self.interpreter.install_path("purelib")
|
||||
|
||||
@property
|
||||
def platlib(self):
|
||||
return self.dest / self.interpreter.install_path("platlib")
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
return list(OrderedDict(((self.platlib, None), (self.purelib, None))).keys())
|
||||
|
||||
@property
|
||||
def stdlib(self):
|
||||
if self._stdlib is None:
|
||||
self._stdlib = Path(self.interpreter.sysconfig_path("stdlib", config_var=self._config_vars))
|
||||
return self._stdlib
|
||||
|
||||
@property
|
||||
def stdlib_platform(self):
|
||||
if self._stdlib_platform is None:
|
||||
self._stdlib_platform = Path(self.interpreter.sysconfig_path("platstdlib", config_var=self._config_vars))
|
||||
return self._stdlib_platform
|
||||
|
||||
@property
|
||||
def _config_vars(self):
|
||||
if self._conf_vars is None:
|
||||
self._conf_vars = self._calc_config_vars(self.dest)
|
||||
return self._conf_vars
|
||||
|
||||
def _calc_config_vars(self, to):
|
||||
sys_vars = self.interpreter.sysconfig_vars
|
||||
return {k: (to if v is not None and v.startswith(self.interpreter.prefix) else v) for k, v in sys_vars.items()}
|
||||
|
||||
@classmethod
|
||||
def can_describe(cls, interpreter): # noqa: ARG003
|
||||
"""Knows means it knows how the output will look."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def env_name(self):
|
||||
return self.dest.parts[-1]
|
||||
|
||||
@property
|
||||
def exe(self):
|
||||
return self.bin_dir / f"{self.exe_stem()}{self.suffix}"
|
||||
|
||||
@classmethod
|
||||
def exe_stem(cls):
|
||||
"""Executable name without suffix - there seems to be no standard way to get this without creating it."""
|
||||
raise NotImplementedError
|
||||
|
||||
def script(self, name):
|
||||
return self.script_dir / f"{name}{self.suffix}"
|
||||
|
||||
|
||||
class Python3Supports(Describe, ABC):
|
||||
@classmethod
|
||||
def can_describe(cls, interpreter):
|
||||
return interpreter.version_info.major == 3 and super().can_describe(interpreter) # noqa: PLR2004
|
||||
|
||||
|
||||
class PosixSupports(Describe, ABC):
|
||||
@classmethod
|
||||
def can_describe(cls, interpreter):
|
||||
return interpreter.os == "posix" and super().can_describe(interpreter)
|
||||
|
||||
|
||||
class WindowsSupports(Describe, ABC):
|
||||
@classmethod
|
||||
def can_describe(cls, interpreter):
|
||||
return interpreter.os == "nt" and super().can_describe(interpreter)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Describe",
|
||||
"PosixSupports",
|
||||
"Python3Supports",
|
||||
"WindowsSupports",
|
||||
]
|
66
site-packages/virtualenv/create/pyenv_cfg.py
Executable file
66
site-packages/virtualenv/create/pyenv_cfg.py
Executable file
@ -0,0 +1,66 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class PyEnvCfg:
|
||||
def __init__(self, content, path) -> None:
|
||||
self.content = content
|
||||
self.path = path
|
||||
|
||||
@classmethod
|
||||
def from_folder(cls, folder):
|
||||
return cls.from_file(folder / "pyvenv.cfg")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, path):
|
||||
content = cls._read_values(path) if path.exists() else OrderedDict()
|
||||
return PyEnvCfg(content, path)
|
||||
|
||||
@staticmethod
|
||||
def _read_values(path):
|
||||
content = OrderedDict()
|
||||
for line in path.read_text(encoding="utf-8").splitlines():
|
||||
equals_at = line.index("=")
|
||||
key = line[:equals_at].strip()
|
||||
value = line[equals_at + 1 :].strip()
|
||||
content[key] = value
|
||||
return content
|
||||
|
||||
def write(self):
|
||||
logging.debug("write %s", self.path)
|
||||
text = ""
|
||||
for key, value in self.content.items():
|
||||
normalized_value = os.path.realpath(value) if value and os.path.exists(value) else value
|
||||
line = f"{key} = {normalized_value}"
|
||||
logging.debug("\t%s", line)
|
||||
text += line
|
||||
text += "\n"
|
||||
self.path.write_text(text, encoding="utf-8")
|
||||
|
||||
def refresh(self):
|
||||
self.content = self._read_values(self.path)
|
||||
return self.content
|
||||
|
||||
def __setitem__(self, key, value) -> None:
|
||||
self.content[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.content[key]
|
||||
|
||||
def __contains__(self, item) -> bool:
|
||||
return item in self.content
|
||||
|
||||
def update(self, other):
|
||||
self.content.update(other)
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(path={self.path})"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PyEnvCfg",
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user