from belgium

master
Frans Masereel Centrum 2 years ago
parent 0676cdd9ca
commit b45ab8e359

File diff suppressed because one or more lines are too long

@ -96,11 +96,6 @@ function global:deactivate ([switch]$NonDestructive) {
Remove-Item -Path env:VIRTUAL_ENV Remove-Item -Path env:VIRTUAL_ENV
} }
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
@ -202,7 +197,7 @@ else {
$Prompt = $pyvenvCfg['prompt']; $Prompt = $pyvenvCfg['prompt'];
} }
else { else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf $Prompt = Split-Path -Path $venvDir -Leaf
} }
@ -233,7 +228,6 @@ if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT _OLD_VIRTUAL_PROMPT
} }
$env:VIRTUAL_ENV_PROMPT = $Prompt
} }
# Clear PYTHONHOME # Clear PYTHONHOME

@ -28,7 +28,6 @@ deactivate () {
fi fi
unset VIRTUAL_ENV unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct! # Self destruct!
unset -f deactivate unset -f deactivate
@ -38,7 +37,7 @@ deactivate () {
# unset irrelevant variables # unset irrelevant variables
deactivate nondestructive deactivate nondestructive
VIRTUAL_ENV="/Users/poni/_harvesting/axios-example" VIRTUAL_ENV="/Users/fmc_mac4/harvesting_the_net/axios-example"
export VIRTUAL_ENV export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH" _OLD_VIRTUAL_PATH="$PATH"
@ -57,8 +56,6 @@ if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}" _OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(axios-example) ${PS1:-}" PS1="(axios-example) ${PS1:-}"
export PS1 export PS1
VIRTUAL_ENV_PROMPT="(axios-example) "
export VIRTUAL_ENV_PROMPT
fi fi
# This should detect bash and zsh, which have a hash command that must # This should detect bash and zsh, which have a hash command that must

@ -3,12 +3,12 @@
# Created by Davide Di Blasi <davidedb@gmail.com>. # Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> # Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables. # Unset irrelevant variables.
deactivate nondestructive deactivate nondestructive
setenv VIRTUAL_ENV "/Users/poni/_harvesting/axios-example" setenv VIRTUAL_ENV "/Users/fmc_mac4/harvesting_the_net/axios-example"
set _OLD_VIRTUAL_PATH="$PATH" set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH" setenv PATH "$VIRTUAL_ENV/bin:$PATH"
@ -18,7 +18,6 @@ set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = "(axios-example) $prompt" set prompt = "(axios-example) $prompt"
setenv VIRTUAL_ENV_PROMPT "(axios-example) "
endif endif
alias pydoc python -m pydoc alias pydoc python -m pydoc

@ -20,7 +20,6 @@ function deactivate -d "Exit virtual environment and return to normal shell env
end end
set -e VIRTUAL_ENV set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive" if test "$argv[1]" != "nondestructive"
# Self-destruct! # Self-destruct!
functions -e deactivate functions -e deactivate
@ -30,7 +29,7 @@ end
# Unset irrelevant variables. # Unset irrelevant variables.
deactivate nondestructive deactivate nondestructive
set -gx VIRTUAL_ENV "/Users/poni/_harvesting/axios-example" set -gx VIRTUAL_ENV "/Users/fmc_mac4/harvesting_the_net/axios-example"
set -gx _OLD_VIRTUAL_PATH $PATH set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH set -gx PATH "$VIRTUAL_ENV/bin" $PATH
@ -62,5 +61,4 @@ if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
end end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT "(axios-example) "
end end

@ -1,4 +1,4 @@
#!/Users/poni/_harvesting/axios-example/bin/python3.10 #!/Users/fmc_mac4/harvesting_the_net/axios-example/bin/python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import re import re
import sys import sys

@ -1,8 +0,0 @@
#!/Users/poni/_harvesting/axios-example/bin/python3.10
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli.normalizer import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

@ -1,4 +1,4 @@
#!/Users/poni/_harvesting/axios-example/bin/python3.10 #!/Users/fmc_mac4/harvesting_the_net/axios-example/bin/python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import re import re
import sys import sys

@ -1,4 +1,4 @@
#!/Users/poni/_harvesting/axios-example/bin/python3.10 #!/Users/fmc_mac4/harvesting_the_net/axios-example/bin/python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import re import re
import sys import sys

@ -1,8 +0,0 @@
#!/Users/poni/_harvesting/axios-example/bin/python3.10
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1 +1 @@
python3.10 python3

@ -1 +1 @@
python3.10 /Library/Developer/CommandLineTools/usr/bin/python3

@ -1 +0,0 @@
/usr/local/opt/python@3.10/bin/python3.10

@ -1,28 +0,0 @@
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -1,126 +0,0 @@
Metadata-Version: 2.1
Name: Flask
Version: 2.1.2
Summary: A simple framework for building complex web applications.
Home-page: https://palletsprojects.com/p/flask
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://flask.palletsprojects.com/
Project-URL: Changes, https://flask.palletsprojects.com/changes/
Project-URL: Source Code, https://github.com/pallets/flask/
Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/
Project-URL: Twitter, https://twitter.com/PalletsTeam
Project-URL: Chat, https://discord.gg/pallets
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Framework :: Flask
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
Requires-Dist: Werkzeug (>=2.0)
Requires-Dist: Jinja2 (>=3.0)
Requires-Dist: itsdangerous (>=2.0)
Requires-Dist: click (>=8.0)
Requires-Dist: importlib-metadata (>=3.6.0) ; python_version < "3.10"
Provides-Extra: async
Requires-Dist: asgiref (>=3.2) ; extra == 'async'
Provides-Extra: dotenv
Requires-Dist: python-dotenv ; extra == 'dotenv'
Flask
=====
Flask is a lightweight `WSGI`_ web application framework. It is designed
to make getting started quick and easy, with the ability to scale up to
complex applications. It began as a simple wrapper around `Werkzeug`_
and `Jinja`_ and has become one of the most popular Python web
application frameworks.
Flask offers suggestions, but doesn't enforce any dependencies or
project layout. It is up to the developer to choose the tools and
libraries they want to use. There are many extensions provided by the
community that make adding new functionality easy.
.. _WSGI: https://wsgi.readthedocs.io/
.. _Werkzeug: https://werkzeug.palletsprojects.com/
.. _Jinja: https://jinja.palletsprojects.com/
Installing
----------
Install and update using `pip`_:
.. code-block:: text
$ pip install -U Flask
.. _pip: https://pip.pypa.io/en/stable/getting-started/
A Simple Example
----------------
.. code-block:: python
# save this as app.py
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, World!"
.. code-block:: text
$ flask run
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
Contributing
------------
For guidance on setting up a development environment and how to make a
contribution to Flask, see the `contributing guidelines`_.
.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst
Donate
------
The Pallets organization develops and supports Flask and the libraries
it uses. In order to grow the community of contributors and users, and
allow the maintainers to devote more time to the projects, `please
donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://flask.palletsprojects.com/
- Changes: https://flask.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/Flask/
- Source Code: https://github.com/pallets/flask/
- Issue Tracker: https://github.com/pallets/flask/issues/
- Website: https://palletsprojects.com/p/flask/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets

@ -1,52 +0,0 @@
../../../bin/flask,sha256=D0MxglDrmpyo8MR3iVaFP7dO-8Nur-9-7oFGFLMmUWc,244
Flask-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Flask-2.1.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
Flask-2.1.2.dist-info/METADATA,sha256=V3wRBN5pAKumPcu41b-ePUWUQkdyka_WVPXwMKGvLGQ,3907
Flask-2.1.2.dist-info/RECORD,,
Flask-2.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
Flask-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
Flask-2.1.2.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41
Flask-2.1.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
flask/__init__.py,sha256=hutpl3wZCIHR-FckBdDZMe_pw89k7llUTj7c7t77cWc,2207
flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
flask/__pycache__/__init__.cpython-310.pyc,,
flask/__pycache__/__main__.cpython-310.pyc,,
flask/__pycache__/app.cpython-310.pyc,,
flask/__pycache__/blueprints.cpython-310.pyc,,
flask/__pycache__/cli.cpython-310.pyc,,
flask/__pycache__/config.cpython-310.pyc,,
flask/__pycache__/ctx.cpython-310.pyc,,
flask/__pycache__/debughelpers.cpython-310.pyc,,
flask/__pycache__/globals.cpython-310.pyc,,
flask/__pycache__/helpers.cpython-310.pyc,,
flask/__pycache__/logging.cpython-310.pyc,,
flask/__pycache__/scaffold.cpython-310.pyc,,
flask/__pycache__/sessions.cpython-310.pyc,,
flask/__pycache__/signals.cpython-310.pyc,,
flask/__pycache__/templating.cpython-310.pyc,,
flask/__pycache__/testing.cpython-310.pyc,,
flask/__pycache__/typing.cpython-310.pyc,,
flask/__pycache__/views.cpython-310.pyc,,
flask/__pycache__/wrappers.cpython-310.pyc,,
flask/app.py,sha256=b6_j0OtlrssZ05fMReHNwzSLN3M7mkI9LR8UWuhcm0g,82070
flask/blueprints.py,sha256=W2C5eFciX2Zq8zJSKQHiQd488Ytcsrt6wcXSZ-rSU7c,23362
flask/cli.py,sha256=eCZMiAFr6quTZo2pZ5RH2NBS1cQEZqLke9MCloOVIMA,31185
flask/config.py,sha256=IWqHecH4poDxNEUg4U_ZA1CTlL5BKZDX3ofG4UGYyi0,12584
flask/ctx.py,sha256=smANecq_Tr3FFi2UMDs3Nn-xYi9GhKrzZvFpkDqXtLM,18336
flask/debughelpers.py,sha256=parkDxfxxGZZAOGSumyrGLCTKcws86lJ2X3RqikiwUE,6036
flask/globals.py,sha256=cWd-R2hUH3VqPhnmQNww892tQS6Yjqg_wg8UvW1M7NM,1723
flask/helpers.py,sha256=qkbHG_6-Ox_uDmmD8ZwA9k4nTI8Q8U5S_6QtXpzZCRg,29266
flask/json/__init__.py,sha256=jZHbiHKOICmnMttYIhHYrCj2LeHjZY-J7KiYoorjc4I,10394
flask/json/__pycache__/__init__.cpython-310.pyc,,
flask/json/__pycache__/tag.cpython-310.pyc,,
flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857
flask/logging.py,sha256=1o_hirVGqdj7SBdETnhX7IAjklG89RXlrwz_2CjzQQE,2273
flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
flask/scaffold.py,sha256=vFn4R_sDhaWwxH77UGmRUWgE2p9tzj9VqRHG5dvE7wM,32545
flask/sessions.py,sha256=y0f1WBTQqjebkjtiT6d0G_ejIvllsjzch5sq_NUoXec,15834
flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136
flask/templating.py,sha256=6rcyoV-Z57uBGMB6_xl4LhQJHbF456naf-GU8pjQSPM,5659
flask/testing.py,sha256=mfyDupACHNQinATGAcrqqDst9Ik4CnNg3rP9gvpUjks,10385
flask/typing.py,sha256=P1x3WCUYE7ddMNCUVqr04V5EjJy4M0Osz1MfVM0xgMQ,2116
flask/views.py,sha256=nhq31TRB5Z-z2mjFGZACaaB2Et5XPCmWhWxJxOvLWww,5948
flask/wrappers.py,sha256=Vgs2HlC8WNUOELQasV-Xad8DFTFwJe3eUltZG9z4Cu8,5675

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,28 +0,0 @@
Copyright 2007 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -1,113 +0,0 @@
Metadata-Version: 2.1
Name: Jinja2
Version: 3.1.2
Summary: A very fast and expressive template engine.
Home-page: https://palletsprojects.com/p/jinja/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://jinja.palletsprojects.com/
Project-URL: Changes, https://jinja.palletsprojects.com/changes/
Project-URL: Source Code, https://github.com/pallets/jinja/
Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
Project-URL: Twitter, https://twitter.com/PalletsTeam
Project-URL: Chat, https://discord.gg/pallets
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Text Processing :: Markup :: HTML
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
Requires-Dist: MarkupSafe (>=2.0)
Provides-Extra: i18n
Requires-Dist: Babel (>=2.7) ; extra == 'i18n'
Jinja
=====
Jinja is a fast, expressive, extensible templating engine. Special
placeholders in the template allow writing code similar to Python
syntax. Then the template is passed data to render the final document.
It includes:
- Template inheritance and inclusion.
- Define and import macros within templates.
- HTML templates can use autoescaping to prevent XSS from untrusted
user input.
- A sandboxed environment can safely render untrusted templates.
- AsyncIO support for generating templates and calling async
functions.
- I18N support with Babel.
- Templates are compiled to optimized Python code just-in-time and
cached, or can be compiled ahead-of-time.
- Exceptions point to the correct line in templates to make debugging
easier.
- Extensible filters, tests, functions, and even syntax.
Jinja's philosophy is that while application logic belongs in Python if
possible, it shouldn't make the template designer's job difficult by
restricting functionality too much.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
$ pip install -U Jinja2
.. _pip: https://pip.pypa.io/en/stable/getting-started/
In A Nutshell
-------------
.. code-block:: jinja
{% extends "base.html" %}
{% block title %}Members{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
Donate
------
The Pallets organization develops and supports Jinja and other popular
packages. In order to grow the community of contributors and users, and
allow the maintainers to devote more time to the projects, `please
donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://jinja.palletsprojects.com/
- Changes: https://jinja.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/Jinja2/
- Source Code: https://github.com/pallets/jinja/
- Issue Tracker: https://github.com/pallets/jinja/issues/
- Website: https://palletsprojects.com/p/jinja/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets

@ -1,59 +0,0 @@
Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539
Jinja2-3.1.2.dist-info/RECORD,,
Jinja2-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59
Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927
jinja2/__pycache__/__init__.cpython-310.pyc,,
jinja2/__pycache__/_identifier.cpython-310.pyc,,
jinja2/__pycache__/async_utils.cpython-310.pyc,,
jinja2/__pycache__/bccache.cpython-310.pyc,,
jinja2/__pycache__/compiler.cpython-310.pyc,,
jinja2/__pycache__/constants.cpython-310.pyc,,
jinja2/__pycache__/debug.cpython-310.pyc,,
jinja2/__pycache__/defaults.cpython-310.pyc,,
jinja2/__pycache__/environment.cpython-310.pyc,,
jinja2/__pycache__/exceptions.cpython-310.pyc,,
jinja2/__pycache__/ext.cpython-310.pyc,,
jinja2/__pycache__/filters.cpython-310.pyc,,
jinja2/__pycache__/idtracking.cpython-310.pyc,,
jinja2/__pycache__/lexer.cpython-310.pyc,,
jinja2/__pycache__/loaders.cpython-310.pyc,,
jinja2/__pycache__/meta.cpython-310.pyc,,
jinja2/__pycache__/nativetypes.cpython-310.pyc,,
jinja2/__pycache__/nodes.cpython-310.pyc,,
jinja2/__pycache__/optimizer.cpython-310.pyc,,
jinja2/__pycache__/parser.cpython-310.pyc,,
jinja2/__pycache__/runtime.cpython-310.pyc,,
jinja2/__pycache__/sandbox.cpython-310.pyc,,
jinja2/__pycache__/tests.cpython-310.pyc,,
jinja2/__pycache__/utils.cpython-310.pyc,,
jinja2/__pycache__/visitor.cpython-310.pyc,,
jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958
jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472
jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061
jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172
jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299
jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349
jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502
jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509
jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704
jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726
jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207
jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226
jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595
jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476
jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584
jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965
jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,2 +0,0 @@
[babel.extractors]
jinja2 = jinja2.ext:babel_extract[i18n]

@ -1,28 +0,0 @@
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -1,101 +0,0 @@
Metadata-Version: 2.1
Name: MarkupSafe
Version: 2.1.1
Summary: Safely add untrusted strings to HTML/XML markup.
Home-page: https://palletsprojects.com/p/markupsafe/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
Project-URL: Source Code, https://github.com/pallets/markupsafe/
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
Project-URL: Twitter, https://twitter.com/PalletsTeam
Project-URL: Chat, https://discord.gg/pallets
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Text Processing :: Markup :: HTML
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
MarkupSafe
==========
MarkupSafe implements a text object that escapes characters so it is
safe to use in HTML and XML. Characters that have special meanings are
replaced so that they display as the actual characters. This mitigates
injection attacks, meaning untrusted user input can safely be displayed
on a page.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U MarkupSafe
.. _pip: https://pip.pypa.io/en/stable/getting-started/
Examples
--------
.. code-block:: pycon
>>> from markupsafe import Markup, escape
>>> # escape replaces special characters and wraps in Markup
>>> escape("<script>alert(document.cookie);</script>")
Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
>>> # wrap in Markup to mark text "safe" and prevent escaping
>>> Markup("<strong>Hello</strong>")
Markup('<strong>hello</strong>')
>>> escape(Markup("<strong>Hello</strong>"))
Markup('<strong>hello</strong>')
>>> # Markup is a str subclass
>>> # methods and operators escape their arguments
>>> template = Markup("Hello <em>{name}</em>")
>>> template.format(name='"World"')
Markup('Hello <em>&#34;World&#34;</em>')
Donate
------
The Pallets organization develops and supports MarkupSafe and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
`please donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://markupsafe.palletsprojects.com/
- Changes: https://markupsafe.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/MarkupSafe/
- Source Code: https://github.com/pallets/markupsafe/
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
- Website: https://palletsprojects.com/p/markupsafe/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets

@ -1,15 +0,0 @@
MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242
MarkupSafe-2.1.1.dist-info/RECORD,,
MarkupSafe-2.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
MarkupSafe-2.1.1.dist-info/WHEEL,sha256=QN3MNOFrRIeE9ccPTe5mH4Q_A2DBDdD7_qUigvuJeQU,111
MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
markupsafe/__init__.py,sha256=xfaUQkKNRTdYWe6HnnJ2HjguFmS-C_0H6g8-Q9VAfkQ,9284
markupsafe/__pycache__/__init__.cpython-310.pyc,,
markupsafe/__pycache__/_native.cpython-310.pyc,,
markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713
markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083
markupsafe/_speedups.cpython-310-darwin.so,sha256=RrUcL36CQAvdE7Xf4OcvU3CVuQ4qkgDwylh52HXTyHM,35136
markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.0)
Root-Is-Purelib: false
Tag: cp310-cp310-macosx_10_9_x86_64

@ -1,22 +0,0 @@
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.

@ -1,321 +0,0 @@
Metadata-Version: 2.1
Name: PySocks
Version: 1.7.1
Summary: A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information.
Home-page: https://github.com/Anorov/PySocks
Author: Anorov
Author-email: anorov.vorona@gmail.com
License: BSD
Keywords: socks,proxy
Platform: UNKNOWN
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Description-Content-Type: text/markdown
PySocks
=======
PySocks lets you send traffic through SOCKS and HTTP proxy servers. It is a modern fork of [SocksiPy](http://socksipy.sourceforge.net/) with bug fixes and extra features.
Acts as a drop-in replacement to the socket module. Seamlessly configure SOCKS proxies for any socket object by calling `socket_object.set_proxy()`.
----------------
Features
========
* SOCKS proxy client for Python 2.7 and 3.4+
* TCP supported
* UDP mostly supported (issues may occur in some edge cases)
* HTTP proxy client included but not supported or recommended (you should use urllib2's or requests' own HTTP proxy interface)
* urllib2 handler included. `pip install` / `setup.py install` will automatically install the `sockshandler` module.
Installation
============
pip install PySocks
Or download the tarball / `git clone` and...
python setup.py install
These will install both the `socks` and `sockshandler` modules.
Alternatively, include just `socks.py` in your project.
--------------------------------------------
*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP (not SOCKS) proxies, it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead.
--------------------------------------------
Usage
=====
## socks.socksocket ##
import socks
s = socks.socksocket() # Same API as socket.socket in the standard lib
s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default
# Or
s.set_proxy(socks.SOCKS4, "localhost", 4444)
# Or
s.set_proxy(socks.HTTP, "5.5.5.5", 8888)
# Can be treated identical to a regular socket object
s.connect(("www.somesite.com", 80))
s.sendall("GET / HTTP/1.1 ...")
print s.recv(4096)
## Monkeypatching ##
To monkeypatch the entire standard library with a single default proxy:
import urllib2
import socket
import socks
socks.set_default_proxy(socks.SOCKS5, "localhost")
socket.socket = socks.socksocket
urllib2.urlopen("http://www.somesite.com/") # All requests will pass through the SOCKS proxy
Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended. Monkeypatching is usually an anti-pattern in Python.
## urllib2 Handler ##
Example use case with the `sockshandler` urllib2 handler. Note that you must import both `socks` and `sockshandler`, as the handler is its own module separate from PySocks. The module is included in the PyPI package.
import urllib2
import socks
from sockshandler import SocksiPyHandler
opener = urllib2.build_opener(SocksiPyHandler(socks.SOCKS5, "127.0.0.1", 9050))
print opener.open("http://www.somesite.com/") # All requests made by the opener will pass through the SOCKS proxy
--------------------------------------------
Original SocksiPy README attached below, amended to reflect API changes.
--------------------------------------------
SocksiPy
A Python SOCKS module.
(C) 2006 Dan-Haim. All rights reserved.
See LICENSE file for details.
*WHAT IS A SOCKS PROXY?*
A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as
a tunnel, relaying all traffic going through it without modifying it.
SOCKS proxies can be used to relay traffic using any network protocol that
uses TCP.
*WHAT IS SOCKSIPY?*
This Python module allows you to create TCP connections through a SOCKS
proxy without any special effort.
It also supports relaying UDP packets with a SOCKS5 proxy.
*PROXY COMPATIBILITY*
SocksiPy is compatible with three different types of proxies:
1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension.
2. SOCKS Version 5 (SOCKS5).
3. HTTP Proxies which support tunneling using the CONNECT method.
*SYSTEM REQUIREMENTS*
Being written in Python, SocksiPy can run on any platform that has a Python
interpreter and TCP/IP support.
This module has been tested with Python 2.3 and should work with greater versions
just as well.
INSTALLATION
-------------
Simply copy the file "socks.py" to your Python's `lib/site-packages` directory,
and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks]
USAGE
------
First load the socks module with the command:
>>> import socks
>>>
The socks module provides a class called `socksocket`, which is the base to all of the module's functionality.
The `socksocket` object has the same initialization parameters as the normal socket
object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and
type being either `SOCK_STREAM` or `SOCK_DGRAM`.
Generally, it is best to initialize the `socksocket` object with no parameters
>>> s = socks.socksocket()
>>>
The `socksocket` object has an interface which is very similiar to socket's (in fact
the `socksocket` class is derived from socket) with a few extra methods.
To select the proxy server you would like to use, use the `set_proxy` method, whose
syntax is:
set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]])
Explanation of the parameters:
`proxy_type` - The type of the proxy server. This can be one of three possible
choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4,
SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively.
`addr` - The IP address or DNS name of the proxy server.
`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http.
`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving.
If it is set to True, DNS resolving will be preformed remotely, on the server.
If it is set to False, DNS resolving will be preformed locally. Please note that
setting this to True with SOCKS4 servers actually use an extension to the protocol,
called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers
always support DNS). The default is True.
`username` - For SOCKS5 servers, this allows simple username / password authentication
with the server. For SOCKS4 servers, this parameter will be sent as the userid.
This parameter is ignored if an HTTP server is being used. If it is not provided,
authentication will not be used (servers may accept unauthenticated requests).
`password` - This parameter is valid only for SOCKS5 servers and specifies the
respective password for the username provided.
Example of usage:
>>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080
>>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081)
After the set_proxy method has been called, simply call the connect method with the
traditional parameters to establish a connection through the proxy:
>>> s.connect(("www.sourceforge.net", 80))
>>>
Connection will take a bit longer to allow negotiation with the proxy server.
Please note that calling connect without calling `set_proxy` earlier will connect
without a proxy (just like a regular socket).
Errors: Any errors in the connection process will trigger exceptions. The exception
may either be generated by the underlying socket layer or may be custom module
exceptions, whose details follow:
class `ProxyError` - This is a base exception class. It is not raised directly but
rather all other exception classes raised by this module are derived from it.
This allows an easy way to catch all proxy-related errors. It descends from `IOError`.
All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a
caught `socket.error` exception, or `None` if there wasn't any.
class `GeneralProxyError` - When thrown, it indicates a problem which does not fall
into another category.
* `Sent invalid data` - This error means that unexpected data has been received from
the server. The most common reason is that the server specified as the proxy is
not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong.
* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection.
This may indicate that the proxy server is experiencing network or software problems.
* `Bad proxy type` - This will be raised if the type of the proxy supplied to the
set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`.
* `Bad input` - This will be raised if the `connect()` method is called with bad input
parameters.
class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server
failed due to an authentication problem.
* `Authentication is required` - This will happen if you use a SOCKS5 server which
requires authentication without providing a username / password at all.
* `All offered authentication methods were rejected` - This will happen if the proxy
requires a special authentication method which is not supported by this module.
* `Unknown username or invalid password` - Self descriptive.
class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to
authentication.
The parameter is a tuple containing a code, as given by the server,
and a description of the
error. The possible errors, according to the RFC, are:
* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to
fulfill your request (internal server error).
* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to
is blacklisted on the server or requires authentication.
* `0x03` - Network unreachable - The target could not be contacted. A router on the network
had replied with a destination net unreachable error.
* `0x04` - Host unreachable - The target could not be contacted. A router on the network
had replied with a destination host unreachable error.
* `0x05` - Connection refused - The target server has actively refused the connection
(the requested port is closed).
* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server
has expired. This usually means that there are network problems causing the packet
to be caught in a router-to-router "ping-pong".
* `0x07` - Command not supported - For instance if the server does not support UDP.
* `0x08` - Address type not supported - The client has provided an invalid address type.
When using this module, this error should not occur.
class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple
containing a code and a description of the error, as given by the server. The
possible error, according to the specification are:
* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any
reason other then the two mentioned next.
* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client -
The Socks server had tried an ident lookup on your computer and has failed. In this
case you should run an identd server and/or configure your firewall to allow incoming
connections to local port 113 from the remote server.
* `0x5D` - request rejected because the client program and identd report different user-ids -
The Socks server had performed an ident lookup on your computer and has received a
different userid than the one you have provided. Change your userid (through the
username parameter of the set_proxy method) to match and try again.
class `HTTPError` - This will be raised for HTTP errors. The message will contain
the HTTP status code and provided error message.
After establishing the connection, the object behaves like a standard socket.
Methods like `makefile()` and `settimeout()` should behave just like regular sockets.
Call the `close()` method to close the connection.
In addition to the `socksocket` class, an additional function worth mentioning is the
`set_default_proxy` function. The parameters are the same as the `set_proxy` method.
This function will set default proxy settings for newly created `socksocket` objects,
in which the proxy settings haven't been changed via the `set_proxy` method.
This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy,
by overriding the socket object.
For example:
>>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com")
>>> socket.socket = socks.socksocket
>>> urllib.urlopen("http://www.sourceforge.net/")
PROBLEMS
---------
Please open a GitHub issue at https://github.com/Anorov/PySocks

@ -1,10 +0,0 @@
PySocks-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PySocks-1.7.1.dist-info/LICENSE,sha256=cCfiFOAU63i3rcwc7aWspxOnn8T2oMUsnaWz5wfm_-k,1401
PySocks-1.7.1.dist-info/METADATA,sha256=zbQMizjPOOP4DhEiEX24XXjNrYuIxF9UGUpN0uFDB6Y,13235
PySocks-1.7.1.dist-info/RECORD,,
PySocks-1.7.1.dist-info/WHEEL,sha256=t_MpApv386-8PVts2R6wsTifdIn0vbUDTVv61IbqFC8,92
PySocks-1.7.1.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19
__pycache__/socks.cpython-310.pyc,,
__pycache__/sockshandler.cpython-310.pyc,,
socks.py,sha256=xOYn27t9IGrbTBzWsUUuPa0YBuplgiUykzkOB5V5iFY,31086
sockshandler.py,sha256=2SYGj-pwt1kjgLoZAmyeaEXCeZDWRmfVS_QG6kErGtY,3966

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.3)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,28 +0,0 @@
Copyright 2007 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -1,128 +0,0 @@
Metadata-Version: 2.1
Name: Werkzeug
Version: 2.1.2
Summary: The comprehensive WSGI web application library.
Home-page: https://palletsprojects.com/p/werkzeug/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://werkzeug.palletsprojects.com/
Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/
Project-URL: Source Code, https://github.com/pallets/werkzeug/
Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/
Project-URL: Twitter, https://twitter.com/PalletsTeam
Project-URL: Chat, https://discord.gg/pallets
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
Provides-Extra: watchdog
Requires-Dist: watchdog ; extra == 'watchdog'
Werkzeug
========
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
a simple collection of various utilities for WSGI applications and has
become one of the most advanced WSGI utility libraries.
It includes:
- An interactive debugger that allows inspecting stack traces and
source code in the browser with an interactive interpreter for any
frame in the stack.
- A full-featured request object with objects to interact with
headers, query args, form data, files, and cookies.
- A response object that can wrap other WSGI applications and handle
streaming data.
- A routing system for matching URLs to endpoints and generating URLs
for endpoints, with an extensible system for capturing variables
from URLs.
- HTTP utilities to handle entity tags, cache control, dates, user
agents, cookies, files, and more.
- A threaded WSGI server for use while developing applications
locally.
- A test client for simulating HTTP requests during testing without
requiring running a server.
Werkzeug doesn't enforce any dependencies. It is up to the developer to
choose a template engine, database adapter, and even how to handle
requests. It can be used to build all sorts of end user applications
such as blogs, wikis, or bulletin boards.
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
providing more structure and patterns for defining powerful
applications.
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
.. _Flask: https://www.palletsprojects.com/p/flask/
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U Werkzeug
.. _pip: https://pip.pypa.io/en/stable/getting-started/
A Simple Example
----------------
.. code-block:: python
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
return Response('Hello, World!')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application)
Donate
------
The Pallets organization develops and supports Werkzeug and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
`please donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://werkzeug.palletsprojects.com/
- Changes: https://werkzeug.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/Werkzeug/
- Source Code: https://github.com/pallets/werkzeug/
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
- Website: https://palletsprojects.com/p/werkzeug/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets

@ -1,87 +0,0 @@
Werkzeug-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Werkzeug-2.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
Werkzeug-2.1.2.dist-info/METADATA,sha256=vWBYPD9d_Qzl4WAupfJ5Fy_ep7pqMPnGvkSLYiCi4B0,4400
Werkzeug-2.1.2.dist-info/RECORD,,
Werkzeug-2.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
Werkzeug-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
Werkzeug-2.1.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
werkzeug/__init__.py,sha256=BtM-3LM8iob4OwPY693-LqZ5RcnDS4iftOqBK28uZ2k,188
werkzeug/__pycache__/__init__.cpython-310.pyc,,
werkzeug/__pycache__/_internal.cpython-310.pyc,,
werkzeug/__pycache__/_reloader.cpython-310.pyc,,
werkzeug/__pycache__/datastructures.cpython-310.pyc,,
werkzeug/__pycache__/exceptions.cpython-310.pyc,,
werkzeug/__pycache__/formparser.cpython-310.pyc,,
werkzeug/__pycache__/http.cpython-310.pyc,,
werkzeug/__pycache__/local.cpython-310.pyc,,
werkzeug/__pycache__/routing.cpython-310.pyc,,
werkzeug/__pycache__/security.cpython-310.pyc,,
werkzeug/__pycache__/serving.cpython-310.pyc,,
werkzeug/__pycache__/test.cpython-310.pyc,,
werkzeug/__pycache__/testapp.cpython-310.pyc,,
werkzeug/__pycache__/urls.cpython-310.pyc,,
werkzeug/__pycache__/user_agent.cpython-310.pyc,,
werkzeug/__pycache__/utils.cpython-310.pyc,,
werkzeug/__pycache__/wsgi.cpython-310.pyc,,
werkzeug/_internal.py,sha256=g8PHJz2z39I3x0vwTvTKbXIg0eUQqGF9UtUzDMWT0Qw,16222
werkzeug/_reloader.py,sha256=lYStlIDduTxBOB8BSozy_44HQ7YT5fup-x3uuac1-2o,14331
werkzeug/datastructures.py,sha256=Sk5gYGJbgvwpM-5IursyEWwo815RB5NAs2wFcTjHG0M,97018
werkzeug/datastructures.pyi,sha256=L7MfJjHrEjKuAZ57w5d2eaiUIWYya52crapklFnKUz0,34493
werkzeug/debug/__init__.py,sha256=Qds7CmReDr13XUaKYvcwnGNBQp6d86ooGV_to2Uw0C0,17730
werkzeug/debug/__pycache__/__init__.cpython-310.pyc,,
werkzeug/debug/__pycache__/console.cpython-310.pyc,,
werkzeug/debug/__pycache__/repr.cpython-310.pyc,,
werkzeug/debug/__pycache__/tbtools.cpython-310.pyc,,
werkzeug/debug/console.py,sha256=08mKGZLMsrd2E-0qD82J5knUbI2DomHXUQ5z0550a_o,6082
werkzeug/debug/repr.py,sha256=Mp911LMRzZUoNvrCLQfKKpQZbNKdIM8VbjzJQjBkdsM,9481
werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078
werkzeug/debug/tbtools.py,sha256=cBsPKCrB0FRT8i5EUdGo0A8MStWSj7O3Jk40r7Ll3ok,12633
werkzeug/exceptions.py,sha256=5nzjr4AN_J-jtkT2FgDIm8SUiC0tjzWcROXse06H6a8,26498
werkzeug/formparser.py,sha256=rLEu_ZwVpvqshZg6E4Qiv36QsmzmCytTijBeGX3dDGk,16056
werkzeug/http.py,sha256=RUwj0JM1Em3LHyqyXSJOkdtBOT24mJlGFbklqo3PWDY,44602
werkzeug/local.py,sha256=cj0M4BzMGdg_CD-H3osv9Zf9by4qY-BzAD68bxp979Q,18343
werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500
werkzeug/middleware/__pycache__/__init__.cpython-310.pyc,,
werkzeug/middleware/__pycache__/dispatcher.cpython-310.pyc,,
werkzeug/middleware/__pycache__/http_proxy.cpython-310.pyc,,
werkzeug/middleware/__pycache__/lint.cpython-310.pyc,,
werkzeug/middleware/__pycache__/profiler.cpython-310.pyc,,
werkzeug/middleware/__pycache__/proxy_fix.cpython-310.pyc,,
werkzeug/middleware/__pycache__/shared_data.cpython-310.pyc,,
werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580
werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558
werkzeug/middleware/lint.py,sha256=L4ISeRPhFbrMWt8CFHHExyvuWxE3CyqbfD5hTQKkVjA,13966
werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899
werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974
werkzeug/middleware/shared_data.py,sha256=fXjrEkuqxUVLG1DLrOdQLc96QQdjftCBZ1oM5oK89h4,9528
werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
werkzeug/routing.py,sha256=zopf1P3MG-atd33YdBwIO49AnJ7nem5SKQig5FIhKEI,84346
werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
werkzeug/sansio/__pycache__/__init__.cpython-310.pyc,,
werkzeug/sansio/__pycache__/multipart.cpython-310.pyc,,
werkzeug/sansio/__pycache__/request.cpython-310.pyc,,
werkzeug/sansio/__pycache__/response.cpython-310.pyc,,
werkzeug/sansio/__pycache__/utils.cpython-310.pyc,,
werkzeug/sansio/multipart.py,sha256=BRjBk_mCPjSJzwNVvBgmrJGk3QxA9pYfsgzFki28bxc,8751
werkzeug/sansio/request.py,sha256=6xhrNJAqScdbBF5i7HN-Y_1XjJ04wQtBKOsZuCy0AYw,20176
werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098
werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164
werkzeug/security.py,sha256=vrBofh4WZZoUo1eAdJ6F1DrzVRlYauGS2CUDYpbQKj8,4658
werkzeug/serving.py,sha256=aL-dIwzwO_-UuUs0cKwYFOynUWVmYcaDjz713Wy_BHE,38337
werkzeug/test.py,sha256=7Ur4IinGCk9k5WCNk6x-mr2JrnupvKRXt6n-qNfo9oE,47841
werkzeug/testapp.py,sha256=p-2lMyvaHXzP1lau0tUAJTbW4STogoMpXFyCkeRBkAI,9397
werkzeug/urls.py,sha256=Q9Si-eVh7yxk3rwkzrwGRm146FXVXgg9lBP3k0HUfVM,36600
werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420
werkzeug/utils.py,sha256=5HGm_5WSKBTVVl8IgvA-b-jL7gjT-LHWXH0ZKzCCI0I,24932
werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120
werkzeug/wrappers/__pycache__/__init__.cpython-310.pyc,,
werkzeug/wrappers/__pycache__/request.cpython-310.pyc,,
werkzeug/wrappers/__pycache__/response.cpython-310.pyc,,
werkzeug/wrappers/request.py,sha256=UQ559KkGS0Po6HTBgvKMlk1_AsNw5zstzm8o_dRrfdQ,23415
werkzeug/wrappers/response.py,sha256=c2HUXrrt5Sf8-XEB1fUXxm6jp7Lu80KR0A_tbQFvw1Q,34750
werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,222 +0,0 @@
# don't import any costly modules
import sys
import os
is_pypy = '__pypy__' in sys.builtin_module_names
def warn_distutils_present():
if 'distutils' not in sys.modules:
return
if is_pypy and sys.version_info < (3, 7):
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
import warnings
warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils."
)
def clear_distutils():
if 'distutils' not in sys.modules:
return
import warnings
warnings.warn("Setuptools is replacing distutils.")
mods = [
name
for name in sys.modules
if name == "distutils" or name.startswith("distutils.")
]
for name in mods:
del sys.modules[name]
def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
return which == 'local'
def ensure_local_distutils():
import importlib
clear_distutils()
# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
with shim():
importlib.import_module('distutils')
# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
assert 'setuptools._distutils.log' not in sys.modules
def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()
class _TrivialRe:
def __init__(self, *patterns):
self._patterns = patterns
def match(self, string):
return all(pat in string for pat in self._patterns)
class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
# optimization: only consider top level modules and those
# found in the CPython test suite.
if path is not None and not fullname.startswith('test.'):
return
method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()
def spec_for_distutils(self):
if self.is_cpython():
return
import importlib
import importlib.abc
import importlib.util
try:
mod = importlib.import_module('setuptools._distutils')
except Exception:
# There are a couple of cases where setuptools._distutils
# may not be present:
# - An older Setuptools without a local distutils is
# taking precedence. Ref #2957.
# - Path manipulation during sitecustomize removes
# setuptools from the path but only after the hook
# has been loaded. Ref #2980.
# In either case, fall back to stdlib behavior.
return
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
mod.__name__ = 'distutils'
return mod
def exec_module(self, module):
pass
return importlib.util.spec_from_loader(
'distutils', DistutilsLoader(), origin=mod.__file__
)
@staticmethod
def is_cpython():
"""
Suppress supplying distutils for CPython (build and tests).
Ref #2965 and #3007.
"""
return os.path.isfile('pybuilddir.txt')
def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if self.pip_imported_during_build():
return
clear_distutils()
self.spec_for_distutils = lambda: None
@classmethod
def pip_imported_during_build(cls):
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback
return any(
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
)
@staticmethod
def frame_file_is_setup(frame):
"""
Return True if the indicated frame suggests a setup.py file.
"""
# some frames may not have __file__ (#2940)
return frame.f_globals.get('__file__', '').endswith('setup.py')
def spec_for_sensitive_tests(self):
"""
Ensure stdlib distutils when running select tests under CPython.
python/cpython#91169
"""
clear_distutils()
self.spec_for_distutils = lambda: None
sensitive_tests = (
[
'test.test_distutils',
'test.test_peg_generator',
'test.test_importlib',
]
if sys.version_info < (3, 10)
else [
'test.test_distutils',
]
)
for name in DistutilsMetaFinder.sensitive_tests:
setattr(
DistutilsMetaFinder,
f'spec_for_{name}',
DistutilsMetaFinder.spec_for_sensitive_tests,
)
DISTUTILS_FINDER = DistutilsMetaFinder()
def add_shim():
DISTUTILS_FINDER in sys.meta_path or insert_shim()
class shim:
def __enter__(self):
insert_shim()
def __exit__(self, exc, value, tb):
remove_shim()
def insert_shim():
sys.meta_path.insert(0, DISTUTILS_FINDER)
def remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass

@ -1 +0,0 @@
__import__('_distutils_hack').do_override()

@ -1,145 +0,0 @@
Metadata-Version: 2.1
Name: async-generator
Version: 1.10
Summary: Async generators and context managers for Python 3.5+
Home-page: https://github.com/python-trio/async_generator
Author: Nathaniel J. Smith
Author-email: njs@pobox.com
License: MIT -or- Apache License 2.0
Keywords: async
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5
.. image:: https://img.shields.io/badge/chat-join%20now-blue.svg
:target: https://gitter.im/python-trio/general
:alt: Join chatroom
.. image:: https://img.shields.io/badge/docs-read%20now-blue.svg
:target: https://async-generator.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://travis-ci.org/python-trio/async_generator.svg?branch=master
:target: https://travis-ci.org/python-trio/async_generator
:alt: Automated test status
.. image:: https://ci.appveyor.com/api/projects/status/af4eyed8o8tc3t0r/branch/master?svg=true
:target: https://ci.appveyor.com/project/python-trio/trio/history
:alt: Automated test status (Windows)
.. image:: https://codecov.io/gh/python-trio/async_generator/branch/master/graph/badge.svg
:target: https://codecov.io/gh/python-trio/async_generator
:alt: Test coverage
The async_generator library
===========================
Python 3.6 added `async generators
<https://www.python.org/dev/peps/pep-0525/>`__. (What's an async
generator? `Check out my 5-minute lightning talk demo from PyCon 2016
<https://youtu.be/PulzIT8KYLk?t=24m30s>`__.) Python 3.7 adds some more
tools to make them usable, like ``contextlib.asynccontextmanager``.
This library gives you all that back to Python 3.5.
For example, this code only works in Python 3.6+:
.. code-block:: python3
async def load_json_lines(stream_reader):
async for line in stream_reader:
yield json.loads(line)
But this code does the same thing, and works on Python 3.5+:
.. code-block:: python3
from async_generator import async_generator, yield_
@async_generator
async def load_json_lines(stream_reader):
async for line in stream_reader:
await yield_(json.loads(line))
Or in Python 3.7, you can write:
.. code-block:: python3
from contextlib import asynccontextmanager
@asynccontextmanager
async def background_server():
async with trio.open_nursery() as nursery:
value = await nursery.start(my_server)
try:
yield value
finally:
# Kill the server when the scope exits
nursery.cancel_scope.cancel()
This is the same, but back to 3.5:
.. code-block:: python3
from async_generator import async_generator, yield_, asynccontextmanager
@asynccontextmanager
@async_generator
async def background_server():
async with trio.open_nursery() as nursery:
value = await nursery.start(my_server)
try:
await yield_(value)
finally:
# Kill the server when the scope exits
nursery.cancel_scope.cancel()
(And if you're on 3.6, you can use ``@asynccontextmanager`` with
native generators.)
Let's do this
=============
* Install: ``python3 -m pip install -U async_generator`` (or on Windows,
maybe ``py -3 -m pip install -U async_generator``
* Manual: https://async-generator.readthedocs.io/
* Bug tracker and source code: https://github.com/python-trio/async_generator
* Real-time chat: https://gitter.im/python-trio/general
* License: MIT or Apache 2, your choice
* Contributor guide: https://trio.readthedocs.io/en/latest/contributing.html
* Code of conduct: Contributors are requested to follow our `code of
conduct
<https://trio.readthedocs.io/en/latest/code-of-conduct.html>`__ in
all project spaces.
How come some of those links talk about "trio"?
===============================================
`Trio <https://trio.readthedocs.io>`__ is a new async concurrency
library for Python that's obsessed with usability and correctness we
want to make it *easy* to get things *right*. The ``async_generator``
library is maintained by the Trio project as part of that mission, and
because Trio uses ``async_generator`` internally.
You can use ``async_generator`` with any async library. It works great
with ``asyncio``, or Twisted, or whatever you like. (But we think Trio
is pretty sweet.)

@ -1,21 +0,0 @@
async_generator-1.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
async_generator-1.10.dist-info/METADATA,sha256=FZoDYGYfSdJkSUSR5T_YMqq2TnwYa-RFOm6SbhWFzGA,4870
async_generator-1.10.dist-info/RECORD,,
async_generator-1.10.dist-info/WHEEL,sha256=NzFAKnL7g-U64xnS1s5e3mJnxKpOTeOtlXdFwS9yNXI,92
async_generator-1.10.dist-info/top_level.txt,sha256=Qc2NF6EJciFqrZ6gAdWuQIveMaqWJw4jqv1anjEHT_U,16
async_generator/__init__.py,sha256=6eYc-CD3B5kQx8LzMhTEqJyKQH5UTsy8IZhR3AvcVb8,454
async_generator/__pycache__/__init__.cpython-310.pyc,,
async_generator/__pycache__/_impl.cpython-310.pyc,,
async_generator/__pycache__/_util.cpython-310.pyc,,
async_generator/__pycache__/_version.cpython-310.pyc,,
async_generator/_impl.py,sha256=t1p5goS6TrQmpxLL4vOFa9zpl0SorICI8WZc8e81lxU,16106
async_generator/_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
async_generator/_tests/__pycache__/__init__.cpython-310.pyc,,
async_generator/_tests/__pycache__/conftest.cpython-310.pyc,,
async_generator/_tests/__pycache__/test_async_generator.cpython-310.pyc,,
async_generator/_tests/__pycache__/test_util.cpython-310.pyc,,
async_generator/_tests/conftest.py,sha256=eL5uA75o6d9feDTeEXu8vYDg-kgbnfuaGILJKGyWOFw,1211
async_generator/_tests/test_async_generator.py,sha256=HBXQAlZdt68hzSQ6eMoSYsoO--Bic3Ojv1Z71hCQb7U,27936
async_generator/_tests/test_util.py,sha256=-vLPOW_V2etk3Bf2M3cqEOGjxeRPFnSGfftsIYBaCCQ,6373
async_generator/_util.py,sha256=jtBz2-fn6ec0JmaKKY-sC0TAq6zqGAKL3qs7LYQ4uFw,4358
async_generator/_version.py,sha256=Nas37COFU-AbvlukCSCZDPrvCzgtKiG5QGZXnYenLC8,21

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.31.1)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,23 +0,0 @@
from ._version import __version__
from ._impl import (
async_generator,
yield_,
yield_from_,
isasyncgen,
isasyncgenfunction,
get_asyncgen_hooks,
set_asyncgen_hooks,
)
from ._util import aclosing, asynccontextmanager
__all__ = [
"async_generator",
"yield_",
"yield_from_",
"aclosing",
"isasyncgen",
"isasyncgenfunction",
"asynccontextmanager",
"get_asyncgen_hooks",
"set_asyncgen_hooks",
]

@ -1,455 +0,0 @@
import sys
from functools import wraps
from types import coroutine
import inspect
from inspect import (
getcoroutinestate, CORO_CREATED, CORO_CLOSED, CORO_SUSPENDED
)
import collections.abc
class YieldWrapper:
def __init__(self, payload):
self.payload = payload
def _wrap(value):
return YieldWrapper(value)
def _is_wrapped(box):
return isinstance(box, YieldWrapper)
def _unwrap(box):
return box.payload
# This is the magic code that lets you use yield_ and yield_from_ with native
# generators.
#
# The old version worked great on Linux and MacOS, but not on Windows, because
# it depended on _PyAsyncGenValueWrapperNew. The new version segfaults
# everywhere, and I'm not sure why -- probably my lack of understanding
# of ctypes and refcounts.
#
# There are also some commented out tests that should be re-enabled if this is
# fixed:
#
# if sys.version_info >= (3, 6):
# # Use the same box type that the interpreter uses internally. This allows
# # yield_ and (more importantly!) yield_from_ to work in built-in
# # generators.
# import ctypes # mua ha ha.
#
# # We used to call _PyAsyncGenValueWrapperNew to create and set up new
# # wrapper objects, but that symbol isn't available on Windows:
# #
# # https://github.com/python-trio/async_generator/issues/5
# #
# # Fortunately, the type object is available, but it means we have to do
# # this the hard way.
#
# # We don't actually need to access this, but we need to make a ctypes
# # structure so we can call addressof.
# class _ctypes_PyTypeObject(ctypes.Structure):
# pass
# _PyAsyncGenWrappedValue_Type_ptr = ctypes.addressof(
# _ctypes_PyTypeObject.in_dll(
# ctypes.pythonapi, "_PyAsyncGenWrappedValue_Type"))
# _PyObject_GC_New = ctypes.pythonapi._PyObject_GC_New
# _PyObject_GC_New.restype = ctypes.py_object
# _PyObject_GC_New.argtypes = (ctypes.c_void_p,)
#
# _Py_IncRef = ctypes.pythonapi.Py_IncRef
# _Py_IncRef.restype = None
# _Py_IncRef.argtypes = (ctypes.py_object,)
#
# class _ctypes_PyAsyncGenWrappedValue(ctypes.Structure):
# _fields_ = [
# ('PyObject_HEAD', ctypes.c_byte * object().__sizeof__()),
# ('agw_val', ctypes.py_object),
# ]
# def _wrap(value):
# box = _PyObject_GC_New(_PyAsyncGenWrappedValue_Type_ptr)
# raw = ctypes.cast(ctypes.c_void_p(id(box)),
# ctypes.POINTER(_ctypes_PyAsyncGenWrappedValue))
# raw.contents.agw_val = value
# _Py_IncRef(value)
# return box
#
# def _unwrap(box):
# assert _is_wrapped(box)
# raw = ctypes.cast(ctypes.c_void_p(id(box)),
# ctypes.POINTER(_ctypes_PyAsyncGenWrappedValue))
# value = raw.contents.agw_val
# _Py_IncRef(value)
# return value
#
# _PyAsyncGenWrappedValue_Type = type(_wrap(1))
# def _is_wrapped(box):
# return isinstance(box, _PyAsyncGenWrappedValue_Type)
# The magic @coroutine decorator is how you write the bottom level of
# coroutine stacks -- 'async def' can only use 'await' = yield from; but
# eventually we must bottom out in a @coroutine that calls plain 'yield'.
@coroutine
def _yield_(value):
return (yield _wrap(value))
# But we wrap the bare @coroutine version in an async def, because async def
# has the magic feature that users can get warnings messages if they forget to
# use 'await'.
async def yield_(value=None):
return await _yield_(value)
async def yield_from_(delegate):
# Transcribed with adaptations from:
#
# https://www.python.org/dev/peps/pep-0380/#formal-semantics
#
# This takes advantage of a sneaky trick: if an @async_generator-wrapped
# function calls another async function (like yield_from_), and that
# second async function calls yield_, then because of the hack we use to
# implement yield_, the yield_ will actually propagate through yield_from_
# back to the @async_generator wrapper. So even though we're a regular
# function, we can directly yield values out of the calling async
# generator.
def unpack_StopAsyncIteration(e):
if e.args:
return e.args[0]
else:
return None
_i = type(delegate).__aiter__(delegate)
if hasattr(_i, "__await__"):
_i = await _i
try:
_y = await type(_i).__anext__(_i)
except StopAsyncIteration as _e:
_r = unpack_StopAsyncIteration(_e)
else:
while 1:
try:
_s = await yield_(_y)
except GeneratorExit as _e:
try:
_m = _i.aclose
except AttributeError:
pass
else:
await _m()
raise _e
except BaseException as _e:
_x = sys.exc_info()
try:
_m = _i.athrow
except AttributeError:
raise _e
else:
try:
_y = await _m(*_x)
except StopAsyncIteration as _e:
_r = unpack_StopAsyncIteration(_e)
break
else:
try:
if _s is None:
_y = await type(_i).__anext__(_i)
else:
_y = await _i.asend(_s)
except StopAsyncIteration as _e:
_r = unpack_StopAsyncIteration(_e)
break
return _r
# This is the awaitable / iterator that implements asynciter.__anext__() and
# friends.
#
# Note: we can be sloppy about the distinction between
#
# type(self._it).__next__(self._it)
#
# and
#
# self._it.__next__()
#
# because we happen to know that self._it is not a general iterator object,
# but specifically a coroutine iterator object where these are equivalent.
class ANextIter:
def __init__(self, it, first_fn, *first_args):
self._it = it
self._first_fn = first_fn
self._first_args = first_args
def __await__(self):
return self
def __next__(self):
if self._first_fn is not None:
first_fn = self._first_fn
first_args = self._first_args
self._first_fn = self._first_args = None
return self._invoke(first_fn, *first_args)
else:
return self._invoke(self._it.__next__)
def send(self, value):
return self._invoke(self._it.send, value)
def throw(self, type, value=None, traceback=None):
return self._invoke(self._it.throw, type, value, traceback)
def _invoke(self, fn, *args):
try:
result = fn(*args)
except StopIteration as e:
# The underlying generator returned, so we should signal the end
# of iteration.
raise StopAsyncIteration(e.value)
except StopAsyncIteration as e:
# PEP 479 says: if a generator raises Stop(Async)Iteration, then
# it should be wrapped into a RuntimeError. Python automatically
# enforces this for StopIteration; for StopAsyncIteration we need
# to it ourselves.
raise RuntimeError(
"async_generator raise StopAsyncIteration"
) from e
if _is_wrapped(result):
raise StopIteration(_unwrap(result))
else:
return result
UNSPECIFIED = object()
try:
from sys import get_asyncgen_hooks, set_asyncgen_hooks
except ImportError:
import threading
asyncgen_hooks = collections.namedtuple(
"asyncgen_hooks", ("firstiter", "finalizer")
)
class _hooks_storage(threading.local):
def __init__(self):
self.firstiter = None
self.finalizer = None
_hooks = _hooks_storage()
def get_asyncgen_hooks():
return asyncgen_hooks(
firstiter=_hooks.firstiter, finalizer=_hooks.finalizer
)
def set_asyncgen_hooks(firstiter=UNSPECIFIED, finalizer=UNSPECIFIED):
if firstiter is not UNSPECIFIED:
if firstiter is None or callable(firstiter):
_hooks.firstiter = firstiter
else:
raise TypeError(
"callable firstiter expected, got {}".format(
type(firstiter).__name__
)
)
if finalizer is not UNSPECIFIED:
if finalizer is None or callable(finalizer):
_hooks.finalizer = finalizer
else:
raise TypeError(
"callable finalizer expected, got {}".format(
type(finalizer).__name__
)
)
class AsyncGenerator:
# https://bitbucket.org/pypy/pypy/issues/2786:
# PyPy implements 'await' in a way that requires the frame object
# used to execute a coroutine to keep a weakref to that coroutine.
# During a GC pass, weakrefs to all doomed objects are broken
# before any of the doomed objects' finalizers are invoked.
# If an AsyncGenerator is unreachable, its _coroutine probably
# is too, and the weakref from ag._coroutine.cr_frame to
# ag._coroutine will be broken before ag.__del__ can do its
# one-turn close attempt or can schedule a full aclose() using
# the registered finalization hook. It doesn't look like the
# underlying issue is likely to be fully fixed anytime soon,
# so we work around it by preventing an AsyncGenerator and
# its _coroutine from being considered newly unreachable at
# the same time if the AsyncGenerator's finalizer might want
# to iterate the coroutine some more.
_pypy_issue2786_workaround = set()
def __init__(self, coroutine):
self._coroutine = coroutine
self._it = coroutine.__await__()
self.ag_running = False
self._finalizer = None
self._closed = False
self._hooks_inited = False
# On python 3.5.0 and 3.5.1, __aiter__ must be awaitable.
# Starting in 3.5.2, it should not be awaitable, and if it is, then it
# raises a PendingDeprecationWarning.
# See:
# https://www.python.org/dev/peps/pep-0492/#api-design-and-implementation-revisions
# https://docs.python.org/3/reference/datamodel.html#async-iterators
# https://bugs.python.org/issue27243
if sys.version_info < (3, 5, 2):
async def __aiter__(self):
return self
else:
def __aiter__(self):
return self
################################################################
# Introspection attributes
################################################################
@property
def ag_code(self):
return self._coroutine.cr_code
@property
def ag_frame(self):
return self._coroutine.cr_frame
################################################################
# Core functionality
################################################################
# These need to return awaitables, rather than being async functions,
# to match the native behavior where the firstiter hook is called
# immediately on asend()/etc, even if the coroutine that asend()
# produces isn't awaited for a bit.
def __anext__(self):
return self._do_it(self._it.__next__)
def asend(self, value):
return self._do_it(self._it.send, value)
def athrow(self, type, value=None, traceback=None):
return self._do_it(self._it.throw, type, value, traceback)
def _do_it(self, start_fn, *args):
if not self._hooks_inited:
self._hooks_inited = True
(firstiter, self._finalizer) = get_asyncgen_hooks()
if firstiter is not None:
firstiter(self)
if sys.implementation.name == "pypy":
self._pypy_issue2786_workaround.add(self._coroutine)
# On CPython 3.5.2 (but not 3.5.0), coroutines get cranky if you try
# to iterate them after they're exhausted. Generators OTOH just raise
# StopIteration. We want to convert the one into the other, so we need
# to avoid iterating stopped coroutines.
if getcoroutinestate(self._coroutine) is CORO_CLOSED:
raise StopAsyncIteration()
async def step():
if self.ag_running:
raise ValueError("async generator already executing")
try:
self.ag_running = True
return await ANextIter(self._it, start_fn, *args)
except StopAsyncIteration:
self._pypy_issue2786_workaround.discard(self._coroutine)
raise
finally:
self.ag_running = False
return step()
################################################################
# Cleanup
################################################################
async def aclose(self):
state = getcoroutinestate(self._coroutine)
if state is CORO_CLOSED or self._closed:
return
# Make sure that even if we raise "async_generator ignored
# GeneratorExit", and thus fail to exhaust the coroutine,
# __del__ doesn't complain again.
self._closed = True
if state is CORO_CREATED:
# Make sure that aclose() on an unstarted generator returns
# successfully and prevents future iteration.
self._it.close()
return
try:
await self.athrow(GeneratorExit)
except (GeneratorExit, StopAsyncIteration):
self._pypy_issue2786_workaround.discard(self._coroutine)
else:
raise RuntimeError("async_generator ignored GeneratorExit")
def __del__(self):
self._pypy_issue2786_workaround.discard(self._coroutine)
if getcoroutinestate(self._coroutine) is CORO_CREATED:
# Never started, nothing to clean up, just suppress the "coroutine
# never awaited" message.
self._coroutine.close()
if getcoroutinestate(self._coroutine
) is CORO_SUSPENDED and not self._closed:
if self._finalizer is not None:
self._finalizer(self)
else:
# Mimic the behavior of native generators on GC with no finalizer:
# throw in GeneratorExit, run for one turn, and complain if it didn't
# finish.
thrower = self.athrow(GeneratorExit)
try:
thrower.send(None)
except (GeneratorExit, StopAsyncIteration):
pass
except StopIteration:
raise RuntimeError("async_generator ignored GeneratorExit")
else:
raise RuntimeError(
"async_generator {!r} awaited during finalization; install "
"a finalization hook to support this, or wrap it in "
"'async with aclosing(...):'"
.format(self.ag_code.co_name)
)
finally:
thrower.close()
if hasattr(collections.abc, "AsyncGenerator"):
collections.abc.AsyncGenerator.register(AsyncGenerator)
def async_generator(coroutine_maker):
@wraps(coroutine_maker)
def async_generator_maker(*args, **kwargs):
return AsyncGenerator(coroutine_maker(*args, **kwargs))
async_generator_maker._async_gen_function = id(async_generator_maker)
return async_generator_maker
def isasyncgen(obj):
if hasattr(inspect, "isasyncgen"):
if inspect.isasyncgen(obj):
return True
return isinstance(obj, AsyncGenerator)
def isasyncgenfunction(obj):
if hasattr(inspect, "isasyncgenfunction"):
if inspect.isasyncgenfunction(obj):
return True
return getattr(obj, "_async_gen_function", -1) == id(obj)

@ -1,36 +0,0 @@
import pytest
from functools import wraps, partial
import inspect
import types
@types.coroutine
def mock_sleep():
yield "mock_sleep"
# Wrap any 'async def' tests so that they get automatically iterated.
# We used to use pytest-asyncio as a convenient way to do this, but nowadays
# pytest-asyncio uses us! In addition to it being generally bad for our test
# infrastructure to depend on the code-under-test, this totally messes up
# coverage info because depending on pytest's plugin load order, we might get
# imported before pytest-cov can be loaded and start gathering coverage.
@pytest.hookimpl(tryfirst=True)
def pytest_pyfunc_call(pyfuncitem):
if inspect.iscoroutinefunction(pyfuncitem.obj):
fn = pyfuncitem.obj
@wraps(fn)
def wrapper(**kwargs):
coro = fn(**kwargs)
try:
while True:
value = coro.send(None)
if value != "mock_sleep": # pragma: no cover
raise RuntimeError(
"coroutine runner confused: {!r}".format(value)
)
except StopIteration:
pass
pyfuncitem.obj = wrapper

@ -1,227 +0,0 @@
import pytest
from .. import aclosing, async_generator, yield_, asynccontextmanager
@async_generator
async def async_range(count, closed_slot):
try:
for i in range(count): # pragma: no branch
await yield_(i)
except GeneratorExit:
closed_slot[0] = True
async def test_aclosing():
closed_slot = [False]
async with aclosing(async_range(10, closed_slot)) as gen:
it = iter(range(10))
async for item in gen: # pragma: no branch
assert item == next(it)
if item == 4:
break
assert closed_slot[0]
closed_slot = [False]
try:
async with aclosing(async_range(10, closed_slot)) as gen:
it = iter(range(10))
async for item in gen: # pragma: no branch
assert item == next(it)
if item == 4:
raise ValueError()
except ValueError:
pass
assert closed_slot[0]
async def test_contextmanager_do_not_unchain_non_stopiteration_exceptions():
@asynccontextmanager
@async_generator
async def manager_issue29692():
try:
await yield_()
except Exception as exc:
raise RuntimeError('issue29692:Chained') from exc
with pytest.raises(RuntimeError) as excinfo:
async with manager_issue29692():
raise ZeroDivisionError
assert excinfo.value.args[0] == 'issue29692:Chained'
assert isinstance(excinfo.value.__cause__, ZeroDivisionError)
# This is a little funky because of implementation details in
# async_generator It can all go away once we stop supporting Python3.5
with pytest.raises(RuntimeError) as excinfo:
async with manager_issue29692():
exc = StopIteration('issue29692:Unchained')
raise exc
assert excinfo.value.args[0] == 'issue29692:Chained'
cause = excinfo.value.__cause__
assert cause.args[0] == 'generator raised StopIteration'
assert cause.__cause__ is exc
with pytest.raises(StopAsyncIteration) as excinfo:
async with manager_issue29692():
raise StopAsyncIteration('issue29692:Unchained')
assert excinfo.value.args[0] == 'issue29692:Unchained'
assert excinfo.value.__cause__ is None
@asynccontextmanager
@async_generator
async def noop_async_context_manager():
await yield_()
with pytest.raises(StopIteration):
async with noop_async_context_manager():
raise StopIteration
# Native async generators are only available from Python 3.6 and onwards
nativeasyncgenerators = True
try:
exec(
"""
@asynccontextmanager
async def manager_issue29692_2():
try:
yield
except Exception as exc:
raise RuntimeError('issue29692:Chained') from exc
"""
)
except SyntaxError:
nativeasyncgenerators = False
@pytest.mark.skipif(
not nativeasyncgenerators,
reason="Python < 3.6 doesn't have native async generators"
)
async def test_native_contextmanager_do_not_unchain_non_stopiteration_exceptions(
):
with pytest.raises(RuntimeError) as excinfo:
async with manager_issue29692_2():
raise ZeroDivisionError
assert excinfo.value.args[0] == 'issue29692:Chained'
assert isinstance(excinfo.value.__cause__, ZeroDivisionError)
for cls in [StopIteration, StopAsyncIteration]:
with pytest.raises(cls) as excinfo:
async with manager_issue29692_2():
raise cls('issue29692:Unchained')
assert excinfo.value.args[0] == 'issue29692:Unchained'
assert excinfo.value.__cause__ is None
async def test_asynccontextmanager_exception_passthrough():
# This was the cause of annoying coverage flapping, see gh-140
@asynccontextmanager
@async_generator
async def noop_async_context_manager():
await yield_()
for exc_type in [StopAsyncIteration, RuntimeError, ValueError]:
with pytest.raises(exc_type):
async with noop_async_context_manager():
raise exc_type
# And let's also check a boring nothing pass-through while we're at it
async with noop_async_context_manager():
pass
async def test_asynccontextmanager_catches_exception():
@asynccontextmanager
@async_generator
async def catch_it():
with pytest.raises(ValueError):
await yield_()
async with catch_it():
raise ValueError
async def test_asynccontextmanager_different_exception():
@asynccontextmanager
@async_generator
async def switch_it():
try:
await yield_()
except KeyError:
raise ValueError
with pytest.raises(ValueError):
async with switch_it():
raise KeyError
async def test_asynccontextmanager_nice_message_on_sync_enter():
@asynccontextmanager
@async_generator
async def xxx(): # pragma: no cover
await yield_()
cm = xxx()
with pytest.raises(RuntimeError) as excinfo:
with cm:
pass # pragma: no cover
assert "async with" in str(excinfo.value)
async with cm:
pass
async def test_asynccontextmanager_no_yield():
@asynccontextmanager
@async_generator
async def yeehaw():
pass
with pytest.raises(RuntimeError) as excinfo:
async with yeehaw():
assert False # pragma: no cover
assert "didn't yield" in str(excinfo.value)
async def test_asynccontextmanager_too_many_yields():
closed_count = 0
@asynccontextmanager
@async_generator
async def doubleyield():
try:
await yield_()
except Exception:
pass
try:
await yield_()
finally:
nonlocal closed_count
closed_count += 1
with pytest.raises(RuntimeError) as excinfo:
async with doubleyield():
pass
assert "didn't stop" in str(excinfo.value)
assert closed_count == 1
with pytest.raises(RuntimeError) as excinfo:
async with doubleyield():
raise ValueError
assert "didn't stop after athrow" in str(excinfo.value)
assert closed_count == 2
async def test_asynccontextmanager_requires_asyncgenfunction():
with pytest.raises(TypeError):
@asynccontextmanager
def syncgen(): # pragma: no cover
yield

@ -1,110 +0,0 @@
import sys
from functools import wraps
from ._impl import isasyncgenfunction
class aclosing:
def __init__(self, aiter):
self._aiter = aiter
async def __aenter__(self):
return self._aiter
async def __aexit__(self, *args):
await self._aiter.aclose()
# Very much derived from the one in contextlib, by copy/pasting and then
# asyncifying everything. (Also I dropped the obscure support for using
# context managers as function decorators. It could be re-added; I just
# couldn't be bothered.)
# So this is a derivative work licensed under the PSF License, which requires
# the following notice:
#
# Copyright © 2001-2017 Python Software Foundation; All Rights Reserved
class _AsyncGeneratorContextManager:
def __init__(self, func, args, kwds):
self._func_name = func.__name__
self._agen = func(*args, **kwds).__aiter__()
async def __aenter__(self):
if sys.version_info < (3, 5, 2):
self._agen = await self._agen
try:
return await self._agen.asend(None)
except StopAsyncIteration:
raise RuntimeError("async generator didn't yield") from None
async def __aexit__(self, type, value, traceback):
async with aclosing(self._agen):
if type is None:
try:
await self._agen.asend(None)
except StopAsyncIteration:
return False
else:
raise RuntimeError("async generator didn't stop")
else:
# It used to be possible to have type != None, value == None:
# https://bugs.python.org/issue1705170
# but AFAICT this can't happen anymore.
assert value is not None
try:
await self._agen.athrow(type, value, traceback)
raise RuntimeError(
"async generator didn't stop after athrow()"
)
except StopAsyncIteration as exc:
# Suppress StopIteration *unless* it's the same exception
# that was passed to throw(). This prevents a
# StopIteration raised inside the "with" statement from
# being suppressed.
return (exc is not value)
except RuntimeError as exc:
# Don't re-raise the passed in exception. (issue27112)
if exc is value:
return False
# Likewise, avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a
# RuntimeError (see PEP 479).
if (isinstance(value, (StopIteration, StopAsyncIteration))
and exc.__cause__ is value):
return False
raise
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise an
# exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so
# this fixes the impedance mismatch between the throw()
# protocol and the __exit__() protocol.
#
if sys.exc_info()[1] is value:
return False
raise
def __enter__(self):
raise RuntimeError(
"use 'async with {func_name}(...)', not 'with {func_name}(...)'".
format(func_name=self._func_name)
)
def __exit__(self): # pragma: no cover
assert False, """Never called, but should be defined"""
def asynccontextmanager(func):
"""Like @contextmanager, but async."""
if not isasyncgenfunction(func):
raise TypeError(
"must be an async generator (native or from async_generator; "
"if using @async_generator then @acontextmanager must be on top."
)
@wraps(func)
def helper(*args, **kwds):
return _AsyncGeneratorContextManager(func, args, kwds)
# A hint for sphinxcontrib-trio:
helper.__returns_acontextmanager__ = True
return helper

@ -1,79 +0,0 @@
# SPDX-License-Identifier: MIT
import sys
from functools import partial
from . import converters, exceptions, filters, setters, validators
from ._cmp import cmp_using
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
NOTHING,
Attribute,
Factory,
attrib,
attrs,
fields,
fields_dict,
make_class,
validate,
)
from ._version_info import VersionInfo
__version__ = "22.1.0"
__version_info__ = VersionInfo._from_version_string(__version__)
__title__ = "attrs"
__description__ = "Classes Without Boilerplate"
__url__ = "https://www.attrs.org/"
__uri__ = __url__
__doc__ = __description__ + " <" + __uri__ + ">"
__author__ = "Hynek Schlawack"
__email__ = "hs@ox.cx"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
s = attributes = attrs
ib = attr = attrib
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
__all__ = [
"Attribute",
"Factory",
"NOTHING",
"asdict",
"assoc",
"astuple",
"attr",
"attrib",
"attributes",
"attrs",
"cmp_using",
"converters",
"evolve",
"exceptions",
"fields",
"fields_dict",
"filters",
"get_run_validators",
"has",
"ib",
"make_class",
"resolve_types",
"s",
"set_run_validators",
"setters",
"validate",
"validators",
]
if sys.version_info[:2] >= (3, 6):
from ._next_gen import define, field, frozen, mutable # noqa: F401
__all__.extend(("define", "field", "frozen", "mutable"))

@ -1,486 +0,0 @@
import sys
from typing import (
Any,
Callable,
ClassVar,
Dict,
Generic,
List,
Mapping,
Optional,
Protocol,
Sequence,
Tuple,
Type,
TypeVar,
Union,
overload,
)
# `import X as X` is required to make these public
from . import converters as converters
from . import exceptions as exceptions
from . import filters as filters
from . import setters as setters
from . import validators as validators
from ._cmp import cmp_using as cmp_using
from ._version_info import VersionInfo
__version__: str
__version_info__: VersionInfo
__title__: str
__description__: str
__url__: str
__uri__: str
__author__: str
__email__: str
__license__: str
__copyright__: str
_T = TypeVar("_T")
_C = TypeVar("_C", bound=type)
_EqOrderType = Union[bool, Callable[[Any], Any]]
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
_ConverterType = Callable[[Any], Any]
_FilterType = Callable[[Attribute[_T], _T], bool]
_ReprType = Callable[[Any], str]
_ReprArgType = Union[bool, _ReprType]
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
_OnSetAttrArgType = Union[
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
]
_FieldTransformer = Callable[
[type, List[Attribute[Any]]], List[Attribute[Any]]
]
# FIXME: in reality, if multiple validators are passed they must be in a list
# or tuple, but those are invariant and so would prevent subtypes of
# _ValidatorType from working when passed in a list or tuple.
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
# A protocol to be able to statically accept an attrs class.
class AttrsInstance(Protocol):
__attrs_attrs__: ClassVar[Any]
# _make --
NOTHING: object
# NOTE: Factory lies about its return type to make this possible:
# `x: List[int] # = Factory(list)`
# Work around mypy issue #4554 in the common case by using an overload.
if sys.version_info >= (3, 8):
from typing import Literal
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Callable[[Any], _T],
takes_self: Literal[True],
) -> _T: ...
@overload
def Factory(
factory: Callable[[], _T],
takes_self: Literal[False],
) -> _T: ...
else:
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Union[Callable[[Any], _T], Callable[[], _T]],
takes_self: bool = ...,
) -> _T: ...
# Static type inference support via __dataclass_transform__ implemented as per:
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
# This annotation must be applied to all overloads of "define" and "attrs"
#
# NOTE: This is a typing construct and does not exist at runtime. Extensions
# wrapping attrs decorators should declare a separate __dataclass_transform__
# signature in the extension module using the specification linked above to
# provide pyright support.
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]: ...
class Attribute(Generic[_T]):
name: str
default: Optional[_T]
validator: Optional[_ValidatorType[_T]]
repr: _ReprArgType
cmp: _EqOrderType
eq: _EqOrderType
order: _EqOrderType
hash: Optional[bool]
init: bool
converter: Optional[_ConverterType]
metadata: Dict[Any, Any]
type: Optional[Type[_T]]
kw_only: bool
on_setattr: _OnSetAttrType
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
# NOTE: We had several choices for the annotation to use for type arg:
# 1) Type[_T]
# - Pros: Handles simple cases correctly
# - Cons: Might produce less informative errors in the case of conflicting
# TypeVars e.g. `attr.ib(default='bad', type=int)`
# 2) Callable[..., _T]
# - Pros: Better error messages than #1 for conflicting TypeVars
# - Cons: Terrible error messages for validator checks.
# e.g. attr.ib(type=int, validator=validate_str)
# -> error: Cannot infer function type argument
# 3) type (and do all of the work in the mypy plugin)
# - Pros: Simple here, and we could customize the plugin with our own errors.
# - Cons: Would need to write mypy plugin code to handle all the cases.
# We chose option #1.
# `attr` lies about its return type to make the following possible:
# attr() -> Any
# attr(8) -> int
# attr(validator=<some callable>) -> Whatever the callable expects.
# This makes this type of assignments possible:
# x: int = attr(8)
#
# This form catches explicit None or no default but with no other arguments
# returns Any.
@overload
def attrib(
default: None = ...,
validator: None = ...,
repr: _ReprArgType = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
type: None = ...,
converter: None = ...,
factory: None = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> Any: ...
# This form catches an explicit None or no default and infers the type from the
# other arguments.
@overload
def attrib(
default: None = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
type: Optional[Type[_T]] = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> _T: ...
# This form catches an explicit default argument.
@overload
def attrib(
default: _T,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
type: Optional[Type[_T]] = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> _T: ...
# This form covers type=non-Type: e.g. forward references (str), Any
@overload
def attrib(
default: Optional[_T] = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
type: object = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> Any: ...
@overload
def field(
*,
default: None = ...,
validator: None = ...,
repr: _ReprArgType = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
converter: None = ...,
factory: None = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> Any: ...
# This form catches an explicit None or no default and infers the type from the
# other arguments.
@overload
def field(
*,
default: None = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> _T: ...
# This form catches an explicit default argument.
@overload
def field(
*,
default: _T,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> _T: ...
# This form covers type=non-Type: e.g. forward references (str), Any
@overload
def field(
*,
default: Optional[_T] = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
) -> Any: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
def attrs(
maybe_cls: _C,
these: Optional[Dict[str, Any]] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> _C: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
def attrs(
maybe_cls: None = ...,
these: Optional[Dict[str, Any]] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> Callable[[_C], _C]: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
def define(
maybe_cls: _C,
*,
these: Optional[Dict[str, Any]] = ...,
repr: bool = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
auto_detect: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> _C: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
def define(
maybe_cls: None = ...,
*,
these: Optional[Dict[str, Any]] = ...,
repr: bool = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
auto_detect: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> Callable[[_C], _C]: ...
mutable = define
frozen = define # they differ only in their defaults
def fields(cls: Type[AttrsInstance]) -> Any: ...
def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
def validate(inst: AttrsInstance) -> None: ...
def resolve_types(
cls: _C,
globalns: Optional[Dict[str, Any]] = ...,
localns: Optional[Dict[str, Any]] = ...,
attribs: Optional[List[Attribute[Any]]] = ...,
) -> _C: ...
# TODO: add support for returning a proper attrs class from the mypy plugin
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
# [attr.ib()])` is valid
def make_class(
name: str,
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
bases: Tuple[type, ...] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
collect_by_mro: bool = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
) -> type: ...
# _funcs --
# TODO: add support for returning TypedDict from the mypy plugin
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
# these:
# https://github.com/python/mypy/issues/4236
# https://github.com/python/typing/issues/253
# XXX: remember to fix attrs.asdict/astuple too!
def asdict(
inst: AttrsInstance,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
dict_factory: Type[Mapping[Any, Any]] = ...,
retain_collection_types: bool = ...,
value_serializer: Optional[
Callable[[type, Attribute[Any], Any], Any]
] = ...,
tuple_keys: Optional[bool] = ...,
) -> Dict[str, Any]: ...
# TODO: add support for returning NamedTuple from the mypy plugin
def astuple(
inst: AttrsInstance,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
tuple_factory: Type[Sequence[Any]] = ...,
retain_collection_types: bool = ...,
) -> Tuple[Any, ...]: ...
def has(cls: type) -> bool: ...
def assoc(inst: _T, **changes: Any) -> _T: ...
def evolve(inst: _T, **changes: Any) -> _T: ...
# _config --
def set_run_validators(run: bool) -> None: ...
def get_run_validators() -> bool: ...
# aliases --
s = attributes = attrs
ib = attr = attrib
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)

@ -1,155 +0,0 @@
# SPDX-License-Identifier: MIT
import functools
import types
from ._make import _make_ne
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
def cmp_using(
eq=None,
lt=None,
le=None,
gt=None,
ge=None,
require_same_type=True,
class_name="Comparable",
):
"""
Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
``cmp`` arguments to customize field comparison.
The resulting class will have a full set of ordering methods if
at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
:param Optional[callable] eq: `callable` used to evaluate equality
of two objects.
:param Optional[callable] lt: `callable` used to evaluate whether
one object is less than another object.
:param Optional[callable] le: `callable` used to evaluate whether
one object is less than or equal to another object.
:param Optional[callable] gt: `callable` used to evaluate whether
one object is greater than another object.
:param Optional[callable] ge: `callable` used to evaluate whether
one object is greater than or equal to another object.
:param bool require_same_type: When `True`, equality and ordering methods
will return `NotImplemented` if objects are not of the same type.
:param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
See `comparison` for more details.
.. versionadded:: 21.1.0
"""
body = {
"__slots__": ["value"],
"__init__": _make_init(),
"_requirements": [],
"_is_comparable_to": _is_comparable_to,
}
# Add operations.
num_order_functions = 0
has_eq_function = False
if eq is not None:
has_eq_function = True
body["__eq__"] = _make_operator("eq", eq)
body["__ne__"] = _make_ne()
if lt is not None:
num_order_functions += 1
body["__lt__"] = _make_operator("lt", lt)
if le is not None:
num_order_functions += 1
body["__le__"] = _make_operator("le", le)
if gt is not None:
num_order_functions += 1
body["__gt__"] = _make_operator("gt", gt)
if ge is not None:
num_order_functions += 1
body["__ge__"] = _make_operator("ge", ge)
type_ = types.new_class(
class_name, (object,), {}, lambda ns: ns.update(body)
)
# Add same type requirement.
if require_same_type:
type_._requirements.append(_check_same_type)
# Add total ordering if at least one operation was defined.
if 0 < num_order_functions < 4:
if not has_eq_function:
# functools.total_ordering requires __eq__ to be defined,
# so raise early error here to keep a nice stack.
raise ValueError(
"eq must be define is order to complete ordering from "
"lt, le, gt, ge."
)
type_ = functools.total_ordering(type_)
return type_
def _make_init():
"""
Create __init__ method.
"""
def __init__(self, value):
"""
Initialize object with *value*.
"""
self.value = value
return __init__
def _make_operator(name, func):
"""
Create operator method.
"""
def method(self, other):
if not self._is_comparable_to(other):
return NotImplemented
result = func(self.value, other.value)
if result is NotImplemented:
return NotImplemented
return result
method.__name__ = "__%s__" % (name,)
method.__doc__ = "Return a %s b. Computed by attrs." % (
_operation_names[name],
)
return method
def _is_comparable_to(self, other):
"""
Check whether `other` is comparable to `self`.
"""
for func in self._requirements:
if not func(self, other):
return False
return True
def _check_same_type(self, other):
"""
Return True if *self* and *other* are of the same type, False otherwise.
"""
return other.value.__class__ is self.value.__class__

@ -1,13 +0,0 @@
from typing import Any, Callable, Optional, Type
_CompareWithType = Callable[[Any, Any], bool]
def cmp_using(
eq: Optional[_CompareWithType],
lt: Optional[_CompareWithType],
le: Optional[_CompareWithType],
gt: Optional[_CompareWithType],
ge: Optional[_CompareWithType],
require_same_type: bool,
class_name: str,
) -> Type: ...

@ -1,185 +0,0 @@
# SPDX-License-Identifier: MIT
import inspect
import platform
import sys
import threading
import types
import warnings
from collections.abc import Mapping, Sequence # noqa
PYPY = platform.python_implementation() == "PyPy"
PY36 = sys.version_info[:2] >= (3, 6)
HAS_F_STRINGS = PY36
PY310 = sys.version_info[:2] >= (3, 10)
if PYPY or PY36:
ordered_dict = dict
else:
from collections import OrderedDict
ordered_dict = OrderedDict
def just_warn(*args, **kw):
warnings.warn(
"Running interpreter doesn't sufficiently support code object "
"introspection. Some features like bare super() or accessing "
"__class__ will not work with slotted classes.",
RuntimeWarning,
stacklevel=2,
)
class _AnnotationExtractor:
"""
Extract type annotations from a callable, returning None whenever there
is none.
"""
__slots__ = ["sig"]
def __init__(self, callable):
try:
self.sig = inspect.signature(callable)
except (ValueError, TypeError): # inspect failed
self.sig = None
def get_first_param_type(self):
"""
Return the type annotation of the first argument if it's not empty.
"""
if not self.sig:
return None
params = list(self.sig.parameters.values())
if params and params[0].annotation is not inspect.Parameter.empty:
return params[0].annotation
return None
def get_return_type(self):
"""
Return the return type if it's not empty.
"""
if (
self.sig
and self.sig.return_annotation is not inspect.Signature.empty
):
return self.sig.return_annotation
return None
def make_set_closure_cell():
"""Return a function of two arguments (cell, value) which sets
the value stored in the closure cell `cell` to `value`.
"""
# pypy makes this easy. (It also supports the logic below, but
# why not do the easy/fast thing?)
if PYPY:
def set_closure_cell(cell, value):
cell.__setstate__((value,))
return set_closure_cell
# Otherwise gotta do it the hard way.
# Create a function that will set its first cellvar to `value`.
def set_first_cellvar_to(value):
x = value
return
# This function will be eliminated as dead code, but
# not before its reference to `x` forces `x` to be
# represented as a closure cell rather than a local.
def force_x_to_be_a_cell(): # pragma: no cover
return x
try:
# Extract the code object and make sure our assumptions about
# the closure behavior are correct.
co = set_first_cellvar_to.__code__
if co.co_cellvars != ("x",) or co.co_freevars != ():
raise AssertionError # pragma: no cover
# Convert this code object to a code object that sets the
# function's first _freevar_ (not cellvar) to the argument.
if sys.version_info >= (3, 8):
def set_closure_cell(cell, value):
cell.cell_contents = value
else:
args = [co.co_argcount]
args.append(co.co_kwonlyargcount)
args.extend(
[
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
# These two arguments are reversed:
co.co_cellvars,
co.co_freevars,
]
)
set_first_freevar_code = types.CodeType(*args)
def set_closure_cell(cell, value):
# Create a function using the set_first_freevar_code,
# whose first closure cell is `cell`. Calling it will
# change the value of that cell.
setter = types.FunctionType(
set_first_freevar_code, {}, "setter", (), (cell,)
)
# And call it to set the cell.
setter(value)
# Make sure it works on this interpreter:
def make_func_with_cell():
x = None
def func():
return x # pragma: no cover
return func
cell = make_func_with_cell().__closure__[0]
set_closure_cell(cell, 100)
if cell.cell_contents != 100:
raise AssertionError # pragma: no cover
except Exception:
return just_warn
else:
return set_closure_cell
set_closure_cell = make_set_closure_cell()
# Thread-local global to track attrs instances which are already being repr'd.
# This is needed because there is no other (thread-safe) way to pass info
# about the instances that are already being repr'd through the call stack
# in order to ensure we don't perform infinite recursion.
#
# For instance, if an instance contains a dict which contains that instance,
# we need to know that we're already repr'ing the outside instance from within
# the dict's repr() call.
#
# This lives here rather than in _make.py so that the functions in _make.py
# don't have a direct reference to the thread-local in their globals dict.
# If they have such a reference, it breaks cloudpickle.
repr_context = threading.local()

@ -1,31 +0,0 @@
# SPDX-License-Identifier: MIT
__all__ = ["set_run_validators", "get_run_validators"]
_run_validators = True
def set_run_validators(run):
"""
Set whether or not validators are run. By default, they are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
instead.
"""
if not isinstance(run, bool):
raise TypeError("'run' must be bool.")
global _run_validators
_run_validators = run
def get_run_validators():
"""
Return whether or not validators are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
instead.
"""
return _run_validators

@ -1,420 +0,0 @@
# SPDX-License-Identifier: MIT
import copy
from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
def asdict(
inst,
recurse=True,
filter=None,
dict_factory=dict,
retain_collection_types=False,
value_serializer=None,
):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
dictionaries, pass in ``collections.OrderedDict``.
:param bool retain_collection_types: Do not convert to ``list`` when
encountering an attribute whose type is ``tuple`` or ``set``. Only
meaningful if ``recurse`` is ``True``.
:param Optional[callable] value_serializer: A hook that is called for every
attribute or dict key/value. It receives the current instance, field
and value and must return the (updated) value. The hook is run *after*
the optional *filter* has been applied.
:rtype: return type of *dict_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
.. versionadded:: 20.3.0 *value_serializer*
.. versionadded:: 21.3.0 If a dict has a collection for a key, it is
serialized as a tuple.
"""
attrs = fields(inst.__class__)
rv = dict_factory()
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if value_serializer is not None:
v = value_serializer(inst, a, v)
if recurse is True:
if has(v.__class__):
rv[a.name] = asdict(
v,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
rv[a.name] = cf(
[
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in v
]
)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
(
_asdict_anything(
kk,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in v.items()
)
else:
rv[a.name] = v
else:
rv[a.name] = v
return rv
def _asdict_anything(
val,
is_key,
filter,
dict_factory,
retain_collection_types,
value_serializer,
):
"""
``asdict`` only works on attrs instances, this works on anything.
"""
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
# Attrs class.
rv = asdict(
val,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(val, (tuple, list, set, frozenset)):
if retain_collection_types is True:
cf = val.__class__
elif is_key:
cf = tuple
else:
cf = list
rv = cf(
[
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in val
]
)
elif isinstance(val, dict):
df = dict_factory
rv = df(
(
_asdict_anything(
kk,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in val.items()
)
else:
rv = val
if value_serializer is not None:
rv = value_serializer(None, None, rv)
return rv
def astuple(
inst,
recurse=True,
filter=None,
tuple_factory=tuple,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a tuple.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
:param bool retain_collection_types: Do not convert to ``list``
or ``dict`` when encountering an attribute which type is
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
``True``.
:rtype: return type of *tuple_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.2.0
"""
attrs = fields(inst.__class__)
rv = []
retain = retain_collection_types # Very long. :/
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv.append(
astuple(
v,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
for j in v
]
)
)
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
astuple(
kk,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(kk.__class__)
else kk,
astuple(
vv,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(vv.__class__)
else vv,
)
for kk, vv in v.items()
)
)
else:
rv.append(v)
else:
rv.append(v)
return rv if tuple_factory is list else tuple_factory(rv)
def has(cls):
"""
Check whether *cls* is a class with ``attrs`` attributes.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:rtype: bool
"""
return getattr(cls, "__attrs_attrs__", None) is not None
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use `attrs.evolve` instead if you can.
This function will not be removed du to the slightly different approach
compared to `attrs.evolve`.
"""
import warnings
warnings.warn(
"assoc is deprecated and will be removed after 2018/01.",
DeprecationWarning,
stacklevel=2,
)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in changes.items():
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
"{k} is not an attrs attribute on {cl}.".format(
k=k, cl=new.__class__
)
)
_obj_setattr(new, k, v)
return new
def evolve(inst, **changes):
"""
Create a new instance, based on *inst* with *changes* applied.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise TypeError: If *attr_name* couldn't be found in the class
``__init__``.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 17.1.0
"""
cls = inst.__class__
attrs = fields(cls)
for a in attrs:
if not a.init:
continue
attr_name = a.name # To deal with private attributes.
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
return cls(**changes)
def resolve_types(cls, globalns=None, localns=None, attribs=None):
"""
Resolve any strings and forward annotations in type annotations.
This is only required if you need concrete types in `Attribute`'s *type*
field. In other words, you don't need to resolve your types if you only
use them for static type checking.
With no arguments, names will be looked up in the module in which the class
was created. If this is not what you want, e.g. if the name only exists
inside a method, you may pass *globalns* or *localns* to specify other
dictionaries in which to look up these names. See the docs of
`typing.get_type_hints` for more details.
:param type cls: Class to resolve.
:param Optional[dict] globalns: Dictionary containing global variables.
:param Optional[dict] localns: Dictionary containing local variables.
:param Optional[list] attribs: List of attribs for the given class.
This is necessary when calling from inside a ``field_transformer``
since *cls* is not an ``attrs`` class yet.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class and you didn't pass any attribs.
:raise NameError: If types cannot be resolved because of missing variables.
:returns: *cls* so you can use this function also as a class decorator.
Please note that you have to apply it **after** `attrs.define`. That
means the decorator has to come in the line **before** `attrs.define`.
.. versionadded:: 20.1.0
.. versionadded:: 21.1.0 *attribs*
"""
# Since calling get_type_hints is expensive we cache whether we've
# done it already.
if getattr(cls, "__attrs_types_resolved__", None) != cls:
import typing
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
for field in fields(cls) if attribs is None else attribs:
if field.name in hints:
# Since fields have been frozen we must work around it.
_obj_setattr(field, "type", hints[field.name])
# We store the class we resolved so that subclasses know they haven't
# been resolved.
cls.__attrs_types_resolved__ = cls
# Return the class so you can use it as a decorator too.
return cls

File diff suppressed because it is too large Load Diff

@ -1,220 +0,0 @@
# SPDX-License-Identifier: MIT
"""
These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
`attr.ib` with different default values.
"""
from functools import partial
from . import setters
from ._funcs import asdict as _asdict
from ._funcs import astuple as _astuple
from ._make import (
NOTHING,
_frozen_setattrs,
_ng_default_on_setattr,
attrib,
attrs,
)
from .exceptions import UnannotatedAttributeError
def define(
maybe_cls=None,
*,
these=None,
repr=None,
hash=None,
init=None,
slots=True,
frozen=False,
weakref_slot=True,
str=False,
auto_attribs=None,
kw_only=False,
cache_hash=False,
auto_exc=True,
eq=None,
order=False,
auto_detect=True,
getstate_setstate=None,
on_setattr=None,
field_transformer=None,
match_args=True,
):
r"""
Define an ``attrs`` class.
Differences to the classic `attr.s` that it uses underneath:
- Automatically detect whether or not *auto_attribs* should be `True` (c.f.
*auto_attribs* parameter).
- If *frozen* is `False`, run converters and validators when setting an
attribute by default.
- *slots=True*
.. caution::
Usually this has only upsides and few visible effects in everyday
programming. But it *can* lead to some suprising behaviors, so please
make sure to read :term:`slotted classes`.
- *auto_exc=True*
- *auto_detect=True*
- *order=False*
- Some options that were only relevant on Python 2 or were kept around for
backwards-compatibility have been removed.
Please note that these are all defaults and you can change them as you
wish.
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
1. If any attributes are annotated and no unannotated `attrs.fields`\ s
are found, it assumes *auto_attribs=True*.
2. Otherwise it assumes *auto_attribs=False* and tries to collect
`attrs.fields`\ s.
For now, please refer to `attr.s` for the rest of the parameters.
.. versionadded:: 20.1.0
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
"""
def do_it(cls, auto_attribs):
return attrs(
maybe_cls=cls,
these=these,
repr=repr,
hash=hash,
init=init,
slots=slots,
frozen=frozen,
weakref_slot=weakref_slot,
str=str,
auto_attribs=auto_attribs,
kw_only=kw_only,
cache_hash=cache_hash,
auto_exc=auto_exc,
eq=eq,
order=order,
auto_detect=auto_detect,
collect_by_mro=True,
getstate_setstate=getstate_setstate,
on_setattr=on_setattr,
field_transformer=field_transformer,
match_args=match_args,
)
def wrap(cls):
"""
Making this a wrapper ensures this code runs during class creation.
We also ensure that frozen-ness of classes is inherited.
"""
nonlocal frozen, on_setattr
had_on_setattr = on_setattr not in (None, setters.NO_OP)
# By default, mutable classes convert & validate on setattr.
if frozen is False and on_setattr is None:
on_setattr = _ng_default_on_setattr
# However, if we subclass a frozen class, we inherit the immutability
# and disable on_setattr.
for base_cls in cls.__bases__:
if base_cls.__setattr__ is _frozen_setattrs:
if had_on_setattr:
raise ValueError(
"Frozen classes can't use on_setattr "
"(frozen-ness was inherited)."
)
on_setattr = setters.NO_OP
break
if auto_attribs is not None:
return do_it(cls, auto_attribs)
try:
return do_it(cls, True)
except UnannotatedAttributeError:
return do_it(cls, False)
# maybe_cls's type depends on the usage of the decorator. It's a class
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
mutable = define
frozen = partial(define, frozen=True, on_setattr=None)
def field(
*,
default=NOTHING,
validator=None,
repr=True,
hash=None,
init=True,
metadata=None,
converter=None,
factory=None,
kw_only=False,
eq=None,
order=None,
on_setattr=None,
):
"""
Identical to `attr.ib`, except keyword-only and with some arguments
removed.
.. versionadded:: 20.1.0
"""
return attrib(
default=default,
validator=validator,
repr=repr,
hash=hash,
init=init,
metadata=metadata,
converter=converter,
factory=factory,
kw_only=kw_only,
eq=eq,
order=order,
on_setattr=on_setattr,
)
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
"""
Same as `attr.asdict`, except that collections types are always retained
and dict is always used as *dict_factory*.
.. versionadded:: 21.3.0
"""
return _asdict(
inst=inst,
recurse=recurse,
filter=filter,
value_serializer=value_serializer,
retain_collection_types=True,
)
def astuple(inst, *, recurse=True, filter=None):
"""
Same as `attr.astuple`, except that collections types are always retained
and `tuple` is always used as the *tuple_factory*.
.. versionadded:: 21.3.0
"""
return _astuple(
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
)

@ -1,86 +0,0 @@
# SPDX-License-Identifier: MIT
from functools import total_ordering
from ._funcs import astuple
from ._make import attrib, attrs
@total_ordering
@attrs(eq=False, order=False, slots=True, frozen=True)
class VersionInfo:
"""
A version object that can be compared to tuple of length 1--4:
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
True
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
True
>>> vi = attr.VersionInfo(19, 2, 0, "final")
>>> vi < (19, 1, 1)
False
>>> vi < (19,)
False
>>> vi == (19, 2,)
True
>>> vi == (19, 2, 1)
False
.. versionadded:: 19.2
"""
year = attrib(type=int)
minor = attrib(type=int)
micro = attrib(type=int)
releaselevel = attrib(type=str)
@classmethod
def _from_version_string(cls, s):
"""
Parse *s* and return a _VersionInfo.
"""
v = s.split(".")
if len(v) == 3:
v.append("final")
return cls(
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
)
def _ensure_tuple(self, other):
"""
Ensure *other* is a tuple of a valid length.
Returns a possibly transformed *other* and ourselves as a tuple of
the same length as *other*.
"""
if self.__class__ is other.__class__:
other = astuple(other)
if not isinstance(other, tuple):
raise NotImplementedError
if not (1 <= len(other) <= 4):
raise NotImplementedError
return astuple(self)[: len(other)], other
def __eq__(self, other):
try:
us, them = self._ensure_tuple(other)
except NotImplementedError:
return NotImplemented
return us == them
def __lt__(self, other):
try:
us, them = self._ensure_tuple(other)
except NotImplementedError:
return NotImplemented
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
# have to do anything special with releaselevel for now.
return us < them

@ -1,9 +0,0 @@
class VersionInfo:
@property
def year(self) -> int: ...
@property
def minor(self) -> int: ...
@property
def micro(self) -> int: ...
@property
def releaselevel(self) -> str: ...

@ -1,144 +0,0 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful converters.
"""
import typing
from ._compat import _AnnotationExtractor
from ._make import NOTHING, Factory, pipe
__all__ = [
"default_if_none",
"optional",
"pipe",
"to_bool",
]
def optional(converter):
"""
A converter that allows an attribute to be optional. An optional attribute
is one which can be set to ``None``.
Type annotations will be inferred from the wrapped converter's, if it
has any.
:param callable converter: the converter that is used for non-``None``
values.
.. versionadded:: 17.1.0
"""
def optional_converter(val):
if val is None:
return None
return converter(val)
xtr = _AnnotationExtractor(converter)
t = xtr.get_first_param_type()
if t:
optional_converter.__annotations__["val"] = typing.Optional[t]
rt = xtr.get_return_type()
if rt:
optional_converter.__annotations__["return"] = typing.Optional[rt]
return optional_converter
def default_if_none(default=NOTHING, factory=None):
"""
A converter that allows to replace ``None`` values by *default* or the
result of *factory*.
:param default: Value to be used if ``None`` is passed. Passing an instance
of `attrs.Factory` is supported, however the ``takes_self`` option
is *not*.
:param callable factory: A callable that takes no parameters whose result
is used if ``None`` is passed.
:raises TypeError: If **neither** *default* or *factory* is passed.
:raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of `attrs.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
"""
if default is NOTHING and factory is None:
raise TypeError("Must pass either `default` or `factory`.")
if default is not NOTHING and factory is not None:
raise TypeError(
"Must pass either `default` or `factory` but not both."
)
if factory is not None:
default = Factory(factory)
if isinstance(default, Factory):
if default.takes_self:
raise ValueError(
"`takes_self` is not supported by default_if_none."
)
def default_if_none_converter(val):
if val is not None:
return val
return default.factory()
else:
def default_if_none_converter(val):
if val is not None:
return val
return default
return default_if_none_converter
def to_bool(val):
"""
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
Values mapping to :code:`True`:
- :code:`True`
- :code:`"true"` / :code:`"t"`
- :code:`"yes"` / :code:`"y"`
- :code:`"on"`
- :code:`"1"`
- :code:`1`
Values mapping to :code:`False`:
- :code:`False`
- :code:`"false"` / :code:`"f"`
- :code:`"no"` / :code:`"n"`
- :code:`"off"`
- :code:`"0"`
- :code:`0`
:raises ValueError: for any other value.
.. versionadded:: 21.3.0
"""
if isinstance(val, str):
val = val.lower()
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
try:
if val in truthy:
return True
if val in falsy:
return False
except TypeError:
# Raised when "val" is not hashable (e.g., lists)
pass
raise ValueError("Cannot convert value to bool: {}".format(val))

@ -1,13 +0,0 @@
from typing import Callable, Optional, TypeVar, overload
from . import _ConverterType
_T = TypeVar("_T")
def pipe(*validators: _ConverterType) -> _ConverterType: ...
def optional(converter: _ConverterType) -> _ConverterType: ...
@overload
def default_if_none(default: _T) -> _ConverterType: ...
@overload
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
def to_bool(val: str) -> bool: ...

@ -1,92 +0,0 @@
# SPDX-License-Identifier: MIT
class FrozenError(AttributeError):
"""
A frozen/immutable instance or attribute have been attempted to be
modified.
It mirrors the behavior of ``namedtuples`` by using the same error message
and subclassing `AttributeError`.
.. versionadded:: 20.1.0
"""
msg = "can't set attribute"
args = [msg]
class FrozenInstanceError(FrozenError):
"""
A frozen instance has been attempted to be modified.
.. versionadded:: 16.1.0
"""
class FrozenAttributeError(FrozenError):
"""
A frozen attribute has been attempted to be modified.
.. versionadded:: 20.1.0
"""
class AttrsAttributeNotFoundError(ValueError):
"""
An ``attrs`` function couldn't find an attribute that the user asked for.
.. versionadded:: 16.2.0
"""
class NotAnAttrsClassError(ValueError):
"""
A non-``attrs`` class has been passed into an ``attrs`` function.
.. versionadded:: 16.2.0
"""
class DefaultAlreadySetError(RuntimeError):
"""
A default has been set using ``attr.ib()`` and is attempted to be reset
using the decorator.
.. versionadded:: 17.1.0
"""
class UnannotatedAttributeError(RuntimeError):
"""
A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
annotation.
.. versionadded:: 17.3.0
"""
class PythonTooOldError(RuntimeError):
"""
It was attempted to use an ``attrs`` feature that requires a newer Python
version.
.. versionadded:: 18.2.0
"""
class NotCallableError(TypeError):
"""
A ``attr.ib()`` requiring a callable has been set with a value
that is not callable.
.. versionadded:: 19.2.0
"""
def __init__(self, msg, value):
super(TypeError, self).__init__(msg, value)
self.msg = msg
self.value = value
def __str__(self):
return str(self.msg)

@ -1,17 +0,0 @@
from typing import Any
class FrozenError(AttributeError):
msg: str = ...
class FrozenInstanceError(FrozenError): ...
class FrozenAttributeError(FrozenError): ...
class AttrsAttributeNotFoundError(ValueError): ...
class NotAnAttrsClassError(ValueError): ...
class DefaultAlreadySetError(RuntimeError): ...
class UnannotatedAttributeError(RuntimeError): ...
class PythonTooOldError(RuntimeError): ...
class NotCallableError(TypeError):
msg: str = ...
value: Any = ...
def __init__(self, msg: str, value: Any) -> None: ...

@ -1,51 +0,0 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful filters for `attr.asdict`.
"""
from ._make import Attribute
def _split_what(what):
"""
Returns a tuple of `frozenset`s of classes and attributes.
"""
return (
frozenset(cls for cls in what if isinstance(cls, type)),
frozenset(cls for cls in what if isinstance(cls, Attribute)),
)
def include(*what):
"""
Include *what*.
:param what: What to include.
:type what: `list` of `type` or `attrs.Attribute`\\ s
:rtype: `callable`
"""
cls, attrs = _split_what(what)
def include_(attribute, value):
return value.__class__ in cls or attribute in attrs
return include_
def exclude(*what):
"""
Exclude *what*.
:param what: What to exclude.
:type what: `list` of classes or `attrs.Attribute`\\ s.
:rtype: `callable`
"""
cls, attrs = _split_what(what)
def exclude_(attribute, value):
return value.__class__ not in cls and attribute not in attrs
return exclude_

@ -1,6 +0,0 @@
from typing import Any, Union
from . import Attribute, _FilterType
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...

@ -1,73 +0,0 @@
# SPDX-License-Identifier: MIT
"""
Commonly used hooks for on_setattr.
"""
from . import _config
from .exceptions import FrozenAttributeError
def pipe(*setters):
"""
Run all *setters* and return the return value of the last one.
.. versionadded:: 20.1.0
"""
def wrapped_pipe(instance, attrib, new_value):
rv = new_value
for setter in setters:
rv = setter(instance, attrib, rv)
return rv
return wrapped_pipe
def frozen(_, __, ___):
"""
Prevent an attribute to be modified.
.. versionadded:: 20.1.0
"""
raise FrozenAttributeError()
def validate(instance, attrib, new_value):
"""
Run *attrib*'s validator on *new_value* if it has one.
.. versionadded:: 20.1.0
"""
if _config._run_validators is False:
return new_value
v = attrib.validator
if not v:
return new_value
v(instance, attrib, new_value)
return new_value
def convert(instance, attrib, new_value):
"""
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
result.
.. versionadded:: 20.1.0
"""
c = attrib.converter
if c:
return c(new_value)
return new_value
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
# autodata stopped working, so the docstring is inlined in the API docs.
NO_OP = object()

@ -1,19 +0,0 @@
from typing import Any, NewType, NoReturn, TypeVar, cast
from . import Attribute, _OnSetAttrType
_T = TypeVar("_T")
def frozen(
instance: Any, attribute: Attribute[Any], new_value: Any
) -> NoReturn: ...
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
# convert is allowed to return Any, because they can be chained using pipe.
def convert(
instance: Any, attribute: Attribute[Any], new_value: Any
) -> Any: ...
_NoOpType = NewType("_NoOpType", object)
NO_OP: _NoOpType

@ -1,594 +0,0 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful validators.
"""
import operator
import re
from contextlib import contextmanager
from ._config import get_run_validators, set_run_validators
from ._make import _AndValidator, and_, attrib, attrs
from .exceptions import NotCallableError
try:
Pattern = re.Pattern
except AttributeError: # Python <3.7 lacks a Pattern type.
Pattern = type(re.compile(""))
__all__ = [
"and_",
"deep_iterable",
"deep_mapping",
"disabled",
"ge",
"get_disabled",
"gt",
"in_",
"instance_of",
"is_callable",
"le",
"lt",
"matches_re",
"max_len",
"min_len",
"optional",
"provides",
"set_disabled",
]
def set_disabled(disabled):
"""
Globally disable or enable running validators.
By default, they are run.
:param disabled: If ``True``, disable running all validators.
:type disabled: bool
.. warning::
This function is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(not disabled)
def get_disabled():
"""
Return a bool indicating whether validators are currently disabled or not.
:return: ``True`` if validators are currently disabled.
:rtype: bool
.. versionadded:: 21.3.0
"""
return not get_run_validators()
@contextmanager
def disabled():
"""
Context manager that disables running validators within its context.
.. warning::
This context manager is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(False)
try:
yield
finally:
set_run_validators(True)
@attrs(repr=False, slots=True, hash=True)
class _InstanceOfValidator:
type = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not isinstance(value, self.type):
raise TypeError(
"'{name}' must be {type!r} (got {value!r} that is a "
"{actual!r}).".format(
name=attr.name,
type=self.type,
actual=value.__class__,
value=value,
),
attr,
self.type,
value,
)
def __repr__(self):
return "<instance_of validator for type {type!r}>".format(
type=self.type
)
def instance_of(type):
"""
A validator that raises a `TypeError` if the initializer is called
with a wrong type for this particular attribute (checks are performed using
`isinstance` therefore it's also valid to pass a tuple of types).
:param type: The type to check for.
:type type: type or tuple of types
:raises TypeError: With a human readable error message, the attribute
(of type `attrs.Attribute`), the expected type, and the value it
got.
"""
return _InstanceOfValidator(type)
@attrs(repr=False, frozen=True, slots=True)
class _MatchesReValidator:
pattern = attrib()
match_func = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.match_func(value):
raise ValueError(
"'{name}' must match regex {pattern!r}"
" ({value!r} doesn't)".format(
name=attr.name, pattern=self.pattern.pattern, value=value
),
attr,
self.pattern,
value,
)
def __repr__(self):
return "<matches_re validator for pattern {pattern!r}>".format(
pattern=self.pattern
)
def matches_re(regex, flags=0, func=None):
r"""
A validator that raises `ValueError` if the initializer is called
with a string that doesn't match *regex*.
:param regex: a regex string or precompiled pattern to match against
:param int flags: flags that will be passed to the underlying re function
(default 0)
:param callable func: which underlying `re` function to call. Valid options
are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
means `re.fullmatch`. For performance reasons, the pattern is always
precompiled using `re.compile`.
.. versionadded:: 19.2.0
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
"""
valid_funcs = (re.fullmatch, None, re.search, re.match)
if func not in valid_funcs:
raise ValueError(
"'func' must be one of {}.".format(
", ".join(
sorted(
e and e.__name__ or "None" for e in set(valid_funcs)
)
)
)
)
if isinstance(regex, Pattern):
if flags:
raise TypeError(
"'flags' can only be used with a string pattern; "
"pass flags to re.compile() instead"
)
pattern = regex
else:
pattern = re.compile(regex, flags)
if func is re.match:
match_func = pattern.match
elif func is re.search:
match_func = pattern.search
else:
match_func = pattern.fullmatch
return _MatchesReValidator(pattern, match_func)
@attrs(repr=False, slots=True, hash=True)
class _ProvidesValidator:
interface = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.interface.providedBy(value):
raise TypeError(
"'{name}' must provide {interface!r} which {value!r} "
"doesn't.".format(
name=attr.name, interface=self.interface, value=value
),
attr,
self.interface,
value,
)
def __repr__(self):
return "<provides validator for interface {interface!r}>".format(
interface=self.interface
)
def provides(interface):
"""
A validator that raises a `TypeError` if the initializer is called
with an object that does not provide the requested *interface* (checks are
performed using ``interface.providedBy(value)`` (see `zope.interface
<https://zopeinterface.readthedocs.io/en/latest/>`_).
:param interface: The interface to check for.
:type interface: ``zope.interface.Interface``
:raises TypeError: With a human readable error message, the attribute
(of type `attrs.Attribute`), the expected interface, and the
value it got.
"""
return _ProvidesValidator(interface)
@attrs(repr=False, slots=True, hash=True)
class _OptionalValidator:
validator = attrib()
def __call__(self, inst, attr, value):
if value is None:
return
self.validator(inst, attr, value)
def __repr__(self):
return "<optional validator for {what} or None>".format(
what=repr(self.validator)
)
def optional(validator):
"""
A validator that makes an attribute optional. An optional attribute is one
which can be set to ``None`` in addition to satisfying the requirements of
the sub-validator.
:param validator: A validator (or a list of validators) that is used for
non-``None`` values.
:type validator: callable or `list` of callables.
.. versionadded:: 15.1.0
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
"""
if isinstance(validator, list):
return _OptionalValidator(_AndValidator(validator))
return _OptionalValidator(validator)
@attrs(repr=False, slots=True, hash=True)
class _InValidator:
options = attrib()
def __call__(self, inst, attr, value):
try:
in_options = value in self.options
except TypeError: # e.g. `1 in "abc"`
in_options = False
if not in_options:
raise ValueError(
"'{name}' must be in {options!r} (got {value!r})".format(
name=attr.name, options=self.options, value=value
),
attr,
self.options,
value,
)
def __repr__(self):
return "<in_ validator with options {options!r}>".format(
options=self.options
)
def in_(options):
"""
A validator that raises a `ValueError` if the initializer is called
with a value that does not belong in the options provided. The check is
performed using ``value in options``.
:param options: Allowed options.
:type options: list, tuple, `enum.Enum`, ...
:raises ValueError: With a human readable error message, the attribute (of
type `attrs.Attribute`), the expected options, and the value it
got.
.. versionadded:: 17.1.0
.. versionchanged:: 22.1.0
The ValueError was incomplete until now and only contained the human
readable error message. Now it contains all the information that has
been promised since 17.1.0.
"""
return _InValidator(options)
@attrs(repr=False, slots=False, hash=True)
class _IsCallableValidator:
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not callable(value):
message = (
"'{name}' must be callable "
"(got {value!r} that is a {actual!r})."
)
raise NotCallableError(
msg=message.format(
name=attr.name, value=value, actual=value.__class__
),
value=value,
)
def __repr__(self):
return "<is_callable validator>"
def is_callable():
"""
A validator that raises a `attr.exceptions.NotCallableError` if the
initializer is called with a value for this particular attribute
that is not callable.
.. versionadded:: 19.1.0
:raises `attr.exceptions.NotCallableError`: With a human readable error
message containing the attribute (`attrs.Attribute`) name,
and the value it got.
"""
return _IsCallableValidator()
@attrs(repr=False, slots=True, hash=True)
class _DeepIterable:
member_validator = attrib(validator=is_callable())
iterable_validator = attrib(
default=None, validator=optional(is_callable())
)
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if self.iterable_validator is not None:
self.iterable_validator(inst, attr, value)
for member in value:
self.member_validator(inst, attr, member)
def __repr__(self):
iterable_identifier = (
""
if self.iterable_validator is None
else " {iterable!r}".format(iterable=self.iterable_validator)
)
return (
"<deep_iterable validator for{iterable_identifier}"
" iterables of {member!r}>"
).format(
iterable_identifier=iterable_identifier,
member=self.member_validator,
)
def deep_iterable(member_validator, iterable_validator=None):
"""
A validator that performs deep validation of an iterable.
:param member_validator: Validator(s) to apply to iterable members
:param iterable_validator: Validator to apply to iterable itself
(optional)
.. versionadded:: 19.1.0
:raises TypeError: if any sub-validators fail
"""
if isinstance(member_validator, (list, tuple)):
member_validator = and_(*member_validator)
return _DeepIterable(member_validator, iterable_validator)
@attrs(repr=False, slots=True, hash=True)
class _DeepMapping:
key_validator = attrib(validator=is_callable())
value_validator = attrib(validator=is_callable())
mapping_validator = attrib(default=None, validator=optional(is_callable()))
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if self.mapping_validator is not None:
self.mapping_validator(inst, attr, value)
for key in value:
self.key_validator(inst, attr, key)
self.value_validator(inst, attr, value[key])
def __repr__(self):
return (
"<deep_mapping validator for objects mapping {key!r} to {value!r}>"
).format(key=self.key_validator, value=self.value_validator)
def deep_mapping(key_validator, value_validator, mapping_validator=None):
"""
A validator that performs deep validation of a dictionary.
:param key_validator: Validator to apply to dictionary keys
:param value_validator: Validator to apply to dictionary values
:param mapping_validator: Validator to apply to top-level mapping
attribute (optional)
.. versionadded:: 19.1.0
:raises TypeError: if any sub-validators fail
"""
return _DeepMapping(key_validator, value_validator, mapping_validator)
@attrs(repr=False, frozen=True, slots=True)
class _NumberValidator:
bound = attrib()
compare_op = attrib()
compare_func = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.compare_func(value, self.bound):
raise ValueError(
"'{name}' must be {op} {bound}: {value}".format(
name=attr.name,
op=self.compare_op,
bound=self.bound,
value=value,
)
)
def __repr__(self):
return "<Validator for x {op} {bound}>".format(
op=self.compare_op, bound=self.bound
)
def lt(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number larger or equal to *val*.
:param val: Exclusive upper bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<", operator.lt)
def le(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number greater than *val*.
:param val: Inclusive upper bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<=", operator.le)
def ge(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number smaller than *val*.
:param val: Inclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">=", operator.ge)
def gt(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number smaller or equal to *val*.
:param val: Exclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">", operator.gt)
@attrs(repr=False, frozen=True, slots=True)
class _MaxLengthValidator:
max_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) > self.max_length:
raise ValueError(
"Length of '{name}' must be <= {max}: {len}".format(
name=attr.name, max=self.max_length, len=len(value)
)
)
def __repr__(self):
return "<max_len validator for {max}>".format(max=self.max_length)
def max_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is longer than *length*.
:param int length: Maximum length of the string or iterable
.. versionadded:: 21.3.0
"""
return _MaxLengthValidator(length)
@attrs(repr=False, frozen=True, slots=True)
class _MinLengthValidator:
min_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) < self.min_length:
raise ValueError(
"Length of '{name}' must be => {min}: {len}".format(
name=attr.name, min=self.min_length, len=len(value)
)
)
def __repr__(self):
return "<min_len validator for {min}>".format(min=self.min_length)
def min_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is shorter than *length*.
:param int length: Minimum length of the string or iterable
.. versionadded:: 22.1.0
"""
return _MinLengthValidator(length)

@ -1,80 +0,0 @@
from typing import (
Any,
AnyStr,
Callable,
Container,
ContextManager,
Iterable,
List,
Mapping,
Match,
Optional,
Pattern,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from . import _ValidatorType
from . import _ValidatorArgType
_T = TypeVar("_T")
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_T3 = TypeVar("_T3")
_I = TypeVar("_I", bound=Iterable)
_K = TypeVar("_K")
_V = TypeVar("_V")
_M = TypeVar("_M", bound=Mapping)
def set_disabled(run: bool) -> None: ...
def get_disabled() -> bool: ...
def disabled() -> ContextManager[None]: ...
# To be more precise on instance_of use some overloads.
# If there are more than 3 items in the tuple then we fall back to Any
@overload
def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
@overload
def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
@overload
def instance_of(
type: Tuple[Type[_T1], Type[_T2]]
) -> _ValidatorType[Union[_T1, _T2]]: ...
@overload
def instance_of(
type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
@overload
def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
def provides(interface: Any) -> _ValidatorType[Any]: ...
def optional(
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
) -> _ValidatorType[Optional[_T]]: ...
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
def matches_re(
regex: Union[Pattern[AnyStr], AnyStr],
flags: int = ...,
func: Optional[
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
] = ...,
) -> _ValidatorType[AnyStr]: ...
def deep_iterable(
member_validator: _ValidatorArgType[_T],
iterable_validator: Optional[_ValidatorType[_I]] = ...,
) -> _ValidatorType[_I]: ...
def deep_mapping(
key_validator: _ValidatorType[_K],
value_validator: _ValidatorType[_V],
mapping_validator: Optional[_ValidatorType[_M]] = ...,
) -> _ValidatorType[_M]: ...
def is_callable() -> _ValidatorType[_T]: ...
def lt(val: _T) -> _ValidatorType[_T]: ...
def le(val: _T) -> _ValidatorType[_T]: ...
def ge(val: _T) -> _ValidatorType[_T]: ...
def gt(val: _T) -> _ValidatorType[_T]: ...
def max_len(length: int) -> _ValidatorType[_T]: ...
def min_len(length: int) -> _ValidatorType[_T]: ...

@ -1,11 +0,0 @@
Credits
=======
``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
Its the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
Both were inspired by Twisteds `FancyEqMixin <https://docs.twisted.org/en/stable/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, mkay?

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Hynek Schlawack and the attrs contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -1,240 +0,0 @@
Metadata-Version: 2.1
Name: attrs
Version: 22.1.0
Summary: Classes Without Boilerplate
Home-page: https://www.attrs.org/
Author: Hynek Schlawack
Author-email: hs@ox.cx
Maintainer: Hynek Schlawack
Maintainer-email: hs@ox.cx
License: MIT
Project-URL: Documentation, https://www.attrs.org/
Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html
Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
Project-URL: Source Code, https://github.com/python-attrs/attrs
Project-URL: Funding, https://github.com/sponsors/hynek
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
Project-URL: Ko-fi, https://ko-fi.com/the_hynek
Keywords: class,attribute,boilerplate
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=3.5
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: AUTHORS.rst
Provides-Extra: dev
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev'
Requires-Dist: hypothesis ; extra == 'dev'
Requires-Dist: pympler ; extra == 'dev'
Requires-Dist: pytest (>=4.3.0) ; extra == 'dev'
Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'dev'
Requires-Dist: pytest-mypy-plugins ; extra == 'dev'
Requires-Dist: zope.interface ; extra == 'dev'
Requires-Dist: furo ; extra == 'dev'
Requires-Dist: sphinx ; extra == 'dev'
Requires-Dist: sphinx-notfound-page ; extra == 'dev'
Requires-Dist: pre-commit ; extra == 'dev'
Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'dev'
Provides-Extra: docs
Requires-Dist: furo ; extra == 'docs'
Requires-Dist: sphinx ; extra == 'docs'
Requires-Dist: zope.interface ; extra == 'docs'
Requires-Dist: sphinx-notfound-page ; extra == 'docs'
Provides-Extra: tests
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests'
Requires-Dist: hypothesis ; extra == 'tests'
Requires-Dist: pympler ; extra == 'tests'
Requires-Dist: pytest (>=4.3.0) ; extra == 'tests'
Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'tests'
Requires-Dist: pytest-mypy-plugins ; extra == 'tests'
Requires-Dist: zope.interface ; extra == 'tests'
Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests'
Provides-Extra: tests_no_zope
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope'
Requires-Dist: hypothesis ; extra == 'tests_no_zope'
Requires-Dist: pympler ; extra == 'tests_no_zope'
Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope'
Requires-Dist: mypy (!=0.940,>=0.900) ; extra == 'tests_no_zope'
Requires-Dist: pytest-mypy-plugins ; extra == 'tests_no_zope'
Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope'
.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png
:alt: attrs logo
:align: center
``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods <https://www.attrs.org/en/latest/glossary.html#term-dunder-methods>`_).
`Trusted by NASA <https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement>`_ for Mars missions since 2020!
Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
.. teaser-end
For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
.. -code-begin-
.. code-block:: pycon
>>> from attrs import asdict, define, make_class, Factory
>>> @define
... class SomeClass:
... a_number: int = 42
... list_of_numbers: list[int] = Factory(list)
...
... def hard_math(self, another_number):
... return self.a_number + sum(self.list_of_numbers) * another_number
>>> sc = SomeClass(1, [1, 2, 3])
>>> sc
SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
>>> sc.hard_math(3)
19
>>> sc == SomeClass(1, [1, 2, 3])
True
>>> sc != SomeClass(2, [3, 2, 1])
True
>>> asdict(sc)
{'a_number': 1, 'list_of_numbers': [1, 2, 3]}
>>> SomeClass()
SomeClass(a_number=42, list_of_numbers=[])
>>> C = make_class("C", ["a", "b"])
>>> C("foo", "bar")
C(a='foo', b='bar')
After *declaring* your attributes, ``attrs`` gives you:
- a concise and explicit overview of the class's attributes,
- a nice human-readable ``__repr__``,
- equality-checking methods,
- an initializer,
- and much more,
*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
**Hate type annotations**!?
No problem!
Types are entirely **optional** with ``attrs``.
Simply assign ``attrs.field()`` to the attributes instead of annotating them with types.
----
This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0.
The classic APIs (``@attr.s``, ``attr.ib``, plus their serious-business aliases) and the ``attr`` package import name will remain **indefinitely**.
Please check out `On The Core API Names <https://www.attrs.org/en/latest/names.html>`_ for a more in-depth explanation.
Data Classes
============
On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` `are a descendant <https://hynek.me/articles/import-attrs/>`_ of ``attrs``).
In practice it does a lot more and is more flexible.
For instance it allows you to define `special handling of NumPy arrays for equality checks <https://www.attrs.org/en/stable/comparison.html#customization>`_, or allows more ways to `plug into the initialization process <https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization>`_.
For more details, please refer to our `comparison page <https://www.attrs.org/en/stable/why.html#data-classes>`_.
.. -project-information-
Project Information
===================
- **License**: `MIT <https://choosealicense.com/licenses/mit/>`_
- **PyPI**: https://pypi.org/project/attrs/
- **Source Code**: https://github.com/python-attrs/attrs
- **Documentation**: https://www.attrs.org/
- **Changelog**: https://www.attrs.org/en/stable/changelog.html
- **Get Help**: please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_
- **Third-party Extensions**: https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs
- **Supported Python Versions**: 3.5 and later (last 2.7-compatible release is `21.4.0 <https://pypi.org/project/attrs/21.4.0/>`_)
If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md>`_ to get you started!
``attrs`` for Enterprise
------------------------
Available as part of the Tidelift Subscription.
The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
`Learn more. <https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
Release Information
===================
22.1.0 (2022-07-28)
-------------------
Backwards-incompatible Changes
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Python 2.7 is not supported anymore.
Dealing with Python 2.7 tooling has become too difficult for a volunteer-run project.
We have supported Python 2 more than 2 years after it was officially discontinued and feel that we have paid our dues.
All version up to 21.4.0 from December 2021 remain fully functional, of course.
`#936 <https://github.com/python-attrs/attrs/issues/936>`_
- The deprecated ``cmp`` attribute of ``attrs.Attribute`` has been removed.
This does not affect the *cmp* argument to ``attr.s`` that can be used as a shortcut to set *eq* and *order* at the same time.
`#939 <https://github.com/python-attrs/attrs/issues/939>`_
Changes
^^^^^^^
- Instantiation of frozen slotted classes is now faster.
`#898 <https://github.com/python-attrs/attrs/issues/898>`_
- If an ``eq`` key is defined, it is also used before hashing the attribute.
`#909 <https://github.com/python-attrs/attrs/issues/909>`_
- Added ``attrs.validators.min_len()``.
`#916 <https://github.com/python-attrs/attrs/issues/916>`_
- ``attrs.validators.deep_iterable()``'s *member_validator* argument now also accepts a list of validators and wraps them in an ``attrs.validators.and_()``.
`#925 <https://github.com/python-attrs/attrs/issues/925>`_
- Added missing type stub re-imports for ``attrs.converters`` and ``attrs.filters``.
`#931 <https://github.com/python-attrs/attrs/issues/931>`_
- Added missing stub for ``attr(s).cmp_using()``.
`#949 <https://github.com/python-attrs/attrs/issues/949>`_
- ``attrs.validators._in()``'s ``ValueError`` is not missing the attribute, expected options, and the value it got anymore.
`#951 <https://github.com/python-attrs/attrs/issues/951>`_
- Python 3.11 is now officially supported.
`#969 <https://github.com/python-attrs/attrs/issues/969>`_
`Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
Credits
=======
``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
Its the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
Both were inspired by Twisteds `FancyEqMixin <https://docs.twisted.org/en/stable/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, mkay?

@ -1,56 +0,0 @@
attr/__init__.py,sha256=KZjj88xCd_tH-W59HR1EvHiYAUi4Zd1dzOxx8P77jeI,1602
attr/__init__.pyi,sha256=t-1r-I1VnyxFrqic__QxIk1HUc3ag53L2b8lv0xDZTw,15137
attr/__pycache__/__init__.cpython-310.pyc,,
attr/__pycache__/_cmp.cpython-310.pyc,,
attr/__pycache__/_compat.cpython-310.pyc,,
attr/__pycache__/_config.cpython-310.pyc,,
attr/__pycache__/_funcs.cpython-310.pyc,,
attr/__pycache__/_make.cpython-310.pyc,,
attr/__pycache__/_next_gen.cpython-310.pyc,,
attr/__pycache__/_version_info.cpython-310.pyc,,
attr/__pycache__/converters.cpython-310.pyc,,
attr/__pycache__/exceptions.cpython-310.pyc,,
attr/__pycache__/filters.cpython-310.pyc,,
attr/__pycache__/setters.cpython-310.pyc,,
attr/__pycache__/validators.cpython-310.pyc,,
attr/_cmp.py,sha256=Mmqj-6w71g_vx0TTLLkU4pbmv28qz_FyBGcUb1HX9ZE,4102
attr/_cmp.pyi,sha256=cSlVvIH4As2NlDIoLglPEbSrBMWVVTpwExVDDBH6pn8,357
attr/_compat.py,sha256=Qr9kZOu95Og7joPaQiXoPb54epKqxNU39Xu0u4QVGZI,5568
attr/_config.py,sha256=5W8lgRePuIOWu1ZuqF1899e2CmXGc95-ipwTpF1cEU4,826
attr/_funcs.py,sha256=XTFKZtd5zxsUvWocBw7VAswTxH-nFHk0H8gJ2JQkxD4,14645
attr/_make.py,sha256=Srxbhb8kB17T6nP9e_dgcXY72zda9xfL5uJzva6LExY,97569
attr/_next_gen.py,sha256=N0Gb5WdBHfcfQhcUsLAc_2ZYzl0JtAX1NOHuDgvkecE,5882
attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121
attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
attr/converters.py,sha256=TWCfmCAxk8s2tgTSYnyQv9MRDPf1pk8Rj16KO_Xqe1c,3610
attr/converters.pyi,sha256=MQo7iEzPNVoFpKqD30sVwgVpdNoIeSCF2nsXvoxLZ-Y,416
attr/exceptions.py,sha256=ZGEMLv0CDY1TOtj49OF84myejOn-LCCXAKGIKalKkVU,1915
attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
attr/filters.py,sha256=aZep54h8-4ZYV5lmZ3Dx2mqeQH4cMx6tuCmCylLNbEU,1038
attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215
attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400
attr/setters.pyi,sha256=7dM10rqpQVDW0y-iJUnq8rabdO5Wx2Sbo5LwNa0IXl0,573
attr/validators.py,sha256=cpOHMNSt02ApbTQtQAwBTMeWZqp0u_sx-e3xH-jTyR4,16793
attr/validators.pyi,sha256=6ngbvnWnFSkbf5J2dK86pq2xpEtrwzWTKfJ4aUvSIlk,2355
attrs-22.1.0.dist-info/AUTHORS.rst,sha256=jau5p7JMPbBJeCCpGBWsRj8zpxUVAhpyoHFJRfjM888,743
attrs-22.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
attrs-22.1.0.dist-info/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
attrs-22.1.0.dist-info/METADATA,sha256=vwSMK_BbEgVHrgFWpj3iW0PISTMPHzi6qham9jg7LtA,11015
attrs-22.1.0.dist-info/RECORD,,
attrs-22.1.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
attrs-22.1.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11
attrs/__init__.py,sha256=CeyxLGVViAEKKsLOLaif8vF3vs1a28vsrRVLv7eMEgM,1109
attrs/__init__.pyi,sha256=vuFxNbulP9Q7hfpO6Lb5A-_0mbEJOiwYyefjzXMqVfs,2100
attrs/__pycache__/__init__.cpython-310.pyc,,
attrs/__pycache__/converters.cpython-310.pyc,,
attrs/__pycache__/exceptions.cpython-310.pyc,,
attrs/__pycache__/filters.cpython-310.pyc,,
attrs/__pycache__/setters.cpython-310.pyc,,
attrs/__pycache__/validators.cpython-310.pyc,,
attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70
attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70
attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67
attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67
attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70

@ -1,6 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

@ -1,70 +0,0 @@
# SPDX-License-Identifier: MIT
from attr import (
NOTHING,
Attribute,
Factory,
__author__,
__copyright__,
__description__,
__doc__,
__email__,
__license__,
__title__,
__url__,
__version__,
__version_info__,
assoc,
cmp_using,
define,
evolve,
field,
fields,
fields_dict,
frozen,
has,
make_class,
mutable,
resolve_types,
validate,
)
from attr._next_gen import asdict, astuple
from . import converters, exceptions, filters, setters, validators
__all__ = [
"__author__",
"__copyright__",
"__description__",
"__doc__",
"__email__",
"__license__",
"__title__",
"__url__",
"__version__",
"__version_info__",
"asdict",
"assoc",
"astuple",
"Attribute",
"cmp_using",
"converters",
"define",
"evolve",
"exceptions",
"Factory",
"field",
"fields_dict",
"fields",
"filters",
"frozen",
"has",
"make_class",
"mutable",
"NOTHING",
"resolve_types",
"setters",
"validate",
"validators",
]

@ -1,66 +0,0 @@
from typing import (
Any,
Callable,
Dict,
Mapping,
Optional,
Sequence,
Tuple,
Type,
)
# Because we need to type our own stuff, we have to make everything from
# attr explicitly public too.
from attr import __author__ as __author__
from attr import __copyright__ as __copyright__
from attr import __description__ as __description__
from attr import __email__ as __email__
from attr import __license__ as __license__
from attr import __title__ as __title__
from attr import __url__ as __url__
from attr import __version__ as __version__
from attr import __version_info__ as __version_info__
from attr import _FilterType
from attr import assoc as assoc
from attr import Attribute as Attribute
from attr import cmp_using as cmp_using
from attr import converters as converters
from attr import define as define
from attr import evolve as evolve
from attr import exceptions as exceptions
from attr import Factory as Factory
from attr import field as field
from attr import fields as fields
from attr import fields_dict as fields_dict
from attr import filters as filters
from attr import frozen as frozen
from attr import has as has
from attr import make_class as make_class
from attr import mutable as mutable
from attr import NOTHING as NOTHING
from attr import resolve_types as resolve_types
from attr import setters as setters
from attr import validate as validate
from attr import validators as validators
# TODO: see definition of attr.asdict/astuple
def asdict(
inst: Any,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
dict_factory: Type[Mapping[Any, Any]] = ...,
retain_collection_types: bool = ...,
value_serializer: Optional[
Callable[[type, Attribute[Any], Any], Any]
] = ...,
tuple_keys: bool = ...,
) -> Dict[str, Any]: ...
# TODO: add support for returning NamedTuple from the mypy plugin
def astuple(
inst: Any,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
tuple_factory: Type[Sequence[Any]] = ...,
retain_collection_types: bool = ...,
) -> Tuple[Any, ...]: ...

@ -1,3 +0,0 @@
# SPDX-License-Identifier: MIT
from attr.converters import * # noqa

@ -1,3 +0,0 @@
# SPDX-License-Identifier: MIT
from attr.exceptions import * # noqa

@ -1,3 +0,0 @@
# SPDX-License-Identifier: MIT
from attr.filters import * # noqa

@ -1,3 +0,0 @@
# SPDX-License-Identifier: MIT
from attr.setters import * # noqa

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save