Initial Commit

This commit is contained in:
mwiegand 2022-03-17 22:16:30 +01:00
commit 03107339af
1511 changed files with 421304 additions and 0 deletions

3
.envrc Normal file
View file

@ -0,0 +1,3 @@
python3 -m venv .venv
source ./.venv/bin/activate

241
.venv/bin/Activate.ps1 Normal file
View file

@ -0,0 +1,241 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

66
.venv/bin/activate Normal file
View file

@ -0,0 +1,66 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(.venv) ${PS1:-}"
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

25
.venv/bin/activate.csh Normal file
View file

@ -0,0 +1,25 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = "(.venv) $prompt"
endif
alias pydoc python -m pydoc
rehash

64
.venv/bin/activate.fish Normal file
View file

@ -0,0 +1,64 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/); you cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

8
.venv/bin/jsonschema Executable file
View file

@ -0,0 +1,8 @@
#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from jsonschema.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/normalizer Executable file
View file

@ -0,0 +1,8 @@
#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli.normalizer import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

8
.venv/bin/pip Executable file
View file

@ -0,0 +1,8 @@
#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pip3 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pip3.9 Executable file
View file

@ -0,0 +1,8 @@
#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
.venv/bin/python Symbolic link
View file

@ -0,0 +1 @@
python3.9

1
.venv/bin/python3 Symbolic link
View file

@ -0,0 +1 @@
python3.9

1
.venv/bin/python3.9 Symbolic link
View file

@ -0,0 +1 @@
/usr/local/opt/python@3.9/bin/python3.9

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,20 @@
Copyright 2020 Dag Brattli
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,128 @@
Metadata-Version: 2.1
Name: OSlash
Version: 0.6.3
Summary: OSlash (Ø) for Python 3.8+
Home-page: https://github.com/dbrattli/oslash
Author: Dag Brattli
Author-email: dag@brattli.net
License: MIT License
Download-URL: https://github.com/dbrattli/oslash
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Other Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Description-Content-Type: text/markdown
Requires-Dist: typing-extensions
# Functors, Applicatives, And Monads in Python
![Python package](https://github.com/dbrattli/OSlash/workflows/Python%20package/badge.svg)
OSlash (Ø) is a library for playing with functional programming in Python 3.8+. It's an attempt to re-implement some of
the code from [Learn You a Haskell for Great Good!](http://learnyouahaskell.com/) in Python 3.8. OSlash unifies
functional and object oriented paradigms by grouping related functions within classes. Objects are however never used
for storing values or mutable data, and data only lives within function closures.
OSlash is intended to be a tutorial. For practical functional programming in Python in production environments you
should use [FSlash](https://github.com/dbrattli/fslash) instead.
## Install
```bash
> pip3 install oslash
```
The project currently contains implementations for:
## Abstract Base Classes
- **[Functor](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#functors)**, for stuff that can be mapped
- **[Applicative](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#applicatives)**, for callable stuff
- **Monoid**, for associative stuff
- **[Monad](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#monads)**, for monadic stuff
## And Some Monads
- **Identity**, boxed stuff in its simplest form
- **[Maybe (Just | Nothing)](https://github.com/dbrattli/oslash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures)**, for optional stuff
- **Either (Right | Left)**, for possible failures
- **List**, purely functional list of stuff
- **[IO Action](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#io-monad)**, for impure stuff
- **[Writer](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads#the-writer-monad)**, for logging stuff
- **[Reader](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads#the-reader-monad)**, for callable stuff
- **State**, for stateful computations of stuff
- **Cont**, for continuation of stuff
## Monadic functions
- **>>**, for sequencing monadic actions
- **lift**, for mapping a function over monadic values
- **join**, for removing one level of monadic structure
- **compose**, for composing monadic functions
## Utility functions
- **compose**, for composing 0 to n functions
## But why?
Yes, I know there are other projects out there like [PyMonad](https://bitbucket.org/jason_delaat/pymonad/),
[fn.py](https://github.com/kachayev/fn.py). I'm simply doing this in order to better understand the
[book](http://learnyouahaskell.com/). It's so much easier to learn when you implement things yourself. The code may be
similar to PyMonad in structure, but is quite different in implementation.
Why is the project called OSlash? OSlash is the Norwegian character called [Oslash](http://en.wikipedia.org/wiki/Ø).
Initially I wanted to create a project that used Ø and ø (unicode) for the project name and modules. It didn't work out
well, so I renamed it to OSlash.
## Examples
Haskell:
```haskell
> fmap (+3) (Just 2)
Just 5
> (+3) <$> (Just 2)
Just 5
```
Python:
```python
>>> Just(2).map(lambda x: x+3)
Just 5
>>> (lambda x: x+3) % Just(2)
Just 5
```
IO Actions:
```python
from oslash import put_line, get_line
main = put_line("What is your name?") | (lambda _:
get_line() | (lambda name:
put_line("What is your age?") | (lambda _:
get_line() | (lambda age:
put_line("Hello " + name + "!") | (lambda _:
put_line("You are " + age + " years old"))))))
if __name__ == "__main__":
main()
```
## Tutorials
- [Functors, Applicatives, And Monads In Pictures](https://github.com/dbrattli/oslash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures) in Python.
- [Three Useful Monads](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads) _(in progress)_
- [Using Either monad in Python](https://medium.com/@rnesytov/using-either-monad-in-python-b6eac698dff5)

View file

@ -0,0 +1,53 @@
OSlash-0.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
OSlash-0.6.3.dist-info/LICENSE,sha256=4Nj7npUZIOpByuFd2Y4NnLIEHToVvcRRBeD-HAmgVuI,1050
OSlash-0.6.3.dist-info/METADATA,sha256=Y7u5wnr-CHjNKb8ZzLGkhuGuPVCYI8LpRH-CVabi-0I,4740
OSlash-0.6.3.dist-info/RECORD,,
OSlash-0.6.3.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92
OSlash-0.6.3.dist-info/top_level.txt,sha256=fncxVNYZeY5RRUGVK-06Zrh1NxnF_tb1Xv0aT8L8Q6s,7
OSlash-0.6.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
oslash/__init__.py,sha256=8XYliRj0rScyHp6kEGQ-apAb4RFt3k9sJKdrdqyUoik,606
oslash/__pycache__/__init__.cpython-39.pyc,,
oslash/__pycache__/_version.cpython-39.pyc,,
oslash/__pycache__/cont.cpython-39.pyc,,
oslash/__pycache__/do.cpython-39.pyc,,
oslash/__pycache__/either.cpython-39.pyc,,
oslash/__pycache__/identity.cpython-39.pyc,,
oslash/__pycache__/ioaction.cpython-39.pyc,,
oslash/__pycache__/list.cpython-39.pyc,,
oslash/__pycache__/maybe.cpython-39.pyc,,
oslash/__pycache__/monadic.cpython-39.pyc,,
oslash/__pycache__/observable.cpython-39.pyc,,
oslash/__pycache__/reader.cpython-39.pyc,,
oslash/__pycache__/state.cpython-39.pyc,,
oslash/__pycache__/writer.cpython-39.pyc,,
oslash/_version.py,sha256=bfRfzZb0Upr5hkWvmAXH6ZtM9vl2RzrvUibynTjIgOg,497
oslash/cont.py,sha256=b6lhAeNksKn7QrG3vCT6weHkpAWTQKOOZKkMFGt2g4g,2435
oslash/do.py,sha256=2R9HgYhfYaJ4sXXAorZa4Dx-2vWw8dzX7M4qc_oT4II,6379
oslash/either.py,sha256=73mXh5iCzSLBJgk3QlgMX2xN-tZCam3hobbeNNAlbzw,4058
oslash/identity.py,sha256=DegmadVO4vKqbV46xlTYQ-ZWnYzvG5mxLqAjurluefo,1761
oslash/ioaction.py,sha256=4ig3EzsTgWq5qDAnZVkpkjawFIAoooG2jZEOwGBKDfY,6784
oslash/list.py,sha256=teV3WwJD0WlNmd_PBU37_voDXp9kkFezxWr1rF1keFs,8725
oslash/maybe.py,sha256=WKidTTmWz6gIe1HidcGC2-ZZG1rygmvyHpwIEpiwufc,6643
oslash/monadic.py,sha256=JKVo0lkmAKatvK9tdGT4ZQcnlkdKObf5B7Zrtuo8sn4,597
oslash/observable.py,sha256=n-BEkav1j_e3F31WTpxVuCjY54nvJecyg3OMKvonBZM,2968
oslash/reader.py,sha256=4kR47DJh2yg9cv4jqdWnMwBWUPSMvbtDF5rKUzJtZOQ,4527
oslash/state.py,sha256=RPlCS4h5IJ0OZGcomRxxk6maB0hcIxOqbzJid6iHgIo,2277
oslash/typing/__init__.py,sha256=nQgRsFD6GP-9YHKPEtSgsOiAUTxJ82leBSCWSB27Xew,133
oslash/typing/__pycache__/__init__.cpython-39.pyc,,
oslash/typing/__pycache__/applicative.cpython-39.pyc,,
oslash/typing/__pycache__/functor.cpython-39.pyc,,
oslash/typing/__pycache__/monad.cpython-39.pyc,,
oslash/typing/__pycache__/monoid.cpython-39.pyc,,
oslash/typing/applicative.py,sha256=OaBvmf0T5YHuRYPL2gPz7gZDsjtppEZN-lMCf3AS8Ec,1869
oslash/typing/functor.py,sha256=XGTkT4s95mjWBQw0E994AFPujuBdQx9oFhnIxVTMt9k,1141
oslash/typing/monad.py,sha256=3z9WlsEAPiv1Ls3WpsuyAYx3h45UFXIK-a8oXgmXTvI,2745
oslash/typing/monoid.py,sha256=bdUo8QGn7Pdc8iVEn7XT_jwdBG75_kG1xRyk7k9k_uk,967
oslash/util/__init__.py,sha256=YQdehJ8CwGsguuJCQkGjQu7k6jmRXyrXMDPsDVO1Lq8,87
oslash/util/__pycache__/__init__.cpython-39.pyc,,
oslash/util/__pycache__/basic.cpython-39.pyc,,
oslash/util/__pycache__/fn.cpython-39.pyc,,
oslash/util/__pycache__/numerals.cpython-39.pyc,,
oslash/util/basic.py,sha256=e5AoXonkxyGeyXw40foVdBXDAajI-n2Gcv7xZ-jwbN0,99
oslash/util/fn.py,sha256=IHRM6JPSCWam9dzZs-uL9rP4bmyhVlsnJXXS8DoTwVw,2405
oslash/util/numerals.py,sha256=F2Us8AmoEwGdnXROB3OO9xchYcETzW60LNC_Eki7Dg8,1084
oslash/writer.py,sha256=PHCFzHIPe-txosY1gi53CCC1En01WiGtia0YvUJU96M,3342

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.35.1)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1 @@
oslash

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,174 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View file

@ -0,0 +1,246 @@
Metadata-Version: 2.1
Name: PyNaCl
Version: 1.5.0
Summary: Python binding to the Networking and Cryptography (NaCl) library
Home-page: https://github.com/pyca/pynacl/
Author: The PyNaCl developers
Author-email: cryptography-dev@python.org
License: Apache License 2.0
Platform: UNKNOWN
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Requires-Python: >=3.6
License-File: LICENSE
Requires-Dist: cffi (>=1.4.1)
Provides-Extra: docs
Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
Provides-Extra: tests
Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests'
Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests'
===============================================
PyNaCl: Python binding to the libsodium library
===============================================
.. image:: https://img.shields.io/pypi/v/pynacl.svg
:target: https://pypi.org/project/PyNaCl/
:alt: Latest Version
.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main
:target: https://codecov.io/github/pyca/pynacl?branch=main
.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg
:target: https://pypi.org/project/PyNaCl/
:alt: Compatible Python Versions
PyNaCl is a Python binding to `libsodium`_, which is a fork of the
`Networking and Cryptography library`_. These libraries have a stated goal of
improving usability, security and speed. It supports Python 3.6+ as well as
PyPy 3.
.. _libsodium: https://github.com/jedisct1/libsodium
.. _Networking and Cryptography library: https://nacl.cr.yp.to/
Features
--------
* Digital signatures
* Secret-key encryption
* Public-key encryption
* Hashing and message authentication
* Password based key derivation and password hashing
`Changelog`_
------------
.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/
Installation
============
Binary wheel install
--------------------
PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ ,
so all dependencies are included. Make sure you have an up-to-date pip
and run:
.. code-block:: console
$ pip install pynacl
Faster wheel build
------------------
You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make``
and enable `parallelization`_:
.. code-block:: console
$ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl
Linux source build
------------------
PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled
with PyNaCl so to install you can run:
.. code-block:: console
$ pip install pynacl
If you'd prefer to use the version of ``libsodium`` provided by your
distribution, you can disable the bundled copy during install by running:
.. code-block:: console
$ SODIUM_INSTALL=system pip install pynacl
.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools
is generally discouraged, and is completely unsupported in PyNaCl's case.
.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html
.. _libsodium: https://github.com/jedisct1/libsodium
.. [#many] `manylinux1 wheels <https://www.python.org/dev/peps/pep-0513/>`_
are built on a baseline linux environment based on Centos 5.11
and should work on most x86 and x86_64 glibc based linux environments.
Changelog
=========
1.5.0 (2022-01-07)
------------------
* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5.
* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1``
wheels.
* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS
``universal2`` wheels (the latter supports macOS ``arm64``).
* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release).
* Add inline type hints.
1.4.0 (2020-05-25)
------------------
* Update ``libsodium`` to 1.0.18.
* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1``
wheels. Continuing to produce them was a maintenance burden.
* Added support for Python 3.8, and removed support for Python 3.4.
* Add low level bindings for extracting the seed and the public key
from crypto_sign_ed25519 secret key
* Add low level bindings for deterministic random generation.
* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485)
* Fix checks on very slow builders (#481, #495)
* Add low-level bindings to ed25519 arithmetic functions
* Update low-level blake2b state implementation
* Fix wrong short-input behavior of SealedBox.decrypt() (#517)
* Raise CryptPrefixError exception instead of InvalidkeyError when trying
to check a password against a verifier stored in a unknown format (#519)
* Add support for minimal builds of libsodium. Trying to call functions
not available in a minimal build will raise an UnavailableError
exception. To compile a minimal build of the bundled libsodium, set
the SODIUM_INSTALL_MINIMAL environment variable to any non-empty
string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup.
1.3.0 2018-09-26
----------------
* Added support for Python 3.7.
* Update ``libsodium`` to 1.0.16.
* Run and test all code examples in PyNaCl docs through sphinx's
doctest builder.
* Add low-level bindings for chacha20-poly1305 AEAD constructions.
* Add low-level bindings for the chacha20-poly1305 secretstream constructions.
* Add low-level bindings for ed25519ph pre-hashed signing construction.
* Add low-level bindings for constant-time increment and addition
on fixed-precision big integers represented as little-endian
byte sequences.
* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API.
* Add low-level bindings for libsodium's crypto_kx... key exchange
construction.
* Set hypothesis deadline to None in tests/test_pwhash.py to avoid
incorrect test failures on slower processor architectures. GitHub
issue #370
1.2.1 - 2017-12-04
------------------
* Update hypothesis minimum allowed version.
* Infrastructure: add proper configuration for readthedocs builder
runtime environment.
1.2.0 - 2017-11-01
------------------
* Update ``libsodium`` to 1.0.15.
* Infrastructure: add jenkins support for automatic build of
``manylinux1`` binary wheels
* Added support for ``SealedBox`` construction.
* Added support for ``argon2i`` and ``argon2id`` password hashing constructs
and restructured high-level password hashing implementation to expose
the same interface for all hashers.
* Added support for 128 bit ``siphashx24`` variant of ``siphash24``.
* Added support for ``from_seed`` APIs for X25519 keypair generation.
* Dropped support for Python 3.3.
1.1.2 - 2017-03-31
------------------
* reorder link time library search path when using bundled
libsodium
1.1.1 - 2017-03-15
------------------
* Fixed a circular import bug in ``nacl.utils``.
1.1.0 - 2017-03-14
------------------
* Dropped support for Python 2.6.
* Added ``shared_key()`` method on ``Box``.
* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or
``SecretBox`` and it will automatically generate a random nonce.
* Added support for ``siphash24``.
* Added support for ``blake2b``.
* Added support for ``scrypt``.
* Update ``libsodium`` to 1.0.11.
* Default to the bundled ``libsodium`` when compiling.
* All raised exceptions are defined mixing-in
``nacl.exceptions.CryptoError``
1.0.1 - 2016-01-24
------------------
* Fix an issue with absolute paths that prevented the creation of wheels.
1.0 - 2016-01-23
----------------
* PyNaCl has been ported to use the new APIs available in cffi 1.0+.
Due to this change we no longer support PyPy releases older than 2.6.
* Python 3.2 support has been dropped.
* Functions to convert between Ed25519 and Curve25519 keys have been added.
0.3.0 - 2015-03-04
------------------
* The low-level API (`nacl.c.*`) has been changed to match the
upstream NaCl C/C++ conventions (as well as those of other NaCl bindings).
The order of arguments and return values has changed significantly. To
avoid silent failures, `nacl.c` has been removed, and replaced with
`nacl.bindings` (with the new argument ordering). If you have code which
calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review
the new docstrings and update your code/imports to match the new
conventions.

View file

@ -0,0 +1,68 @@
PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyNaCl-1.5.0.dist-info/LICENSE,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694
PyNaCl-1.5.0.dist-info/METADATA,sha256=U9PLLkcKk_YC_Tl5OSoMiAmblalKU9qFsRtxiwa-TiM,8656
PyNaCl-1.5.0.dist-info/RECORD,,
PyNaCl-1.5.0.dist-info/WHEEL,sha256=4Pc_qD0VhjiuSiOgv99UcgTmp8ljuSMnWHBjm4-tsjo,114
PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13
nacl/__init__.py,sha256=0IUunzBT8_Jn0DUdHacBExOYeAEMggo8slkfjo7O0XM,1116
nacl/__pycache__/__init__.cpython-39.pyc,,
nacl/__pycache__/encoding.cpython-39.pyc,,
nacl/__pycache__/exceptions.cpython-39.pyc,,
nacl/__pycache__/hash.cpython-39.pyc,,
nacl/__pycache__/hashlib.cpython-39.pyc,,
nacl/__pycache__/public.cpython-39.pyc,,
nacl/__pycache__/secret.cpython-39.pyc,,
nacl/__pycache__/signing.cpython-39.pyc,,
nacl/__pycache__/utils.cpython-39.pyc,,
nacl/_sodium.abi3.so,sha256=eZC39TLe_Z_mLp_OdkxMWrxMsYw6NyDy9voJ2pMwbo0,1021039
nacl/bindings/__init__.py,sha256=BDlStrds2EuUS4swOL4pnf92PWVS_CHRCptX3KhEX-s,16997
nacl/bindings/__pycache__/__init__.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_aead.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_box.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_core.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_generichash.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_hash.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_kx.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_pwhash.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_scalarmult.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_secretbox.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_secretstream.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_shorthash.cpython-39.pyc,,
nacl/bindings/__pycache__/crypto_sign.cpython-39.pyc,,
nacl/bindings/__pycache__/randombytes.cpython-39.pyc,,
nacl/bindings/__pycache__/sodium_core.cpython-39.pyc,,
nacl/bindings/__pycache__/utils.cpython-39.pyc,,
nacl/bindings/crypto_aead.py,sha256=BIw1k_JCfr5ylZk0RF5rCFIM1fhfLkEa-aiWkrfffNE,15597
nacl/bindings/crypto_box.py,sha256=Ox0NG2t4MsGhBAa7Kgah4o0gc99ULMsqkdX56ofOouY,10139
nacl/bindings/crypto_core.py,sha256=6u9G3y7H-QrawO785UkFFFtwDoCkeHE63GOUl9p5-eA,13736
nacl/bindings/crypto_generichash.py,sha256=9mX0DGIIzicr-uXrqFM1nU4tirasbixDwbcdfV7W1fc,8852
nacl/bindings/crypto_hash.py,sha256=Rg1rsEwE3azhsQT-dNVPA4NB9VogJAKn1EfxYt0pPe0,2175
nacl/bindings/crypto_kx.py,sha256=oZNVlNgROpHOa1XQ_uZe0tqIkdfuApeJlRnwR23_74k,6723
nacl/bindings/crypto_pwhash.py,sha256=laVDo4xFUuGyEjtZAU510AklBF6ablBy7Z3HN1WDYjY,18848
nacl/bindings/crypto_scalarmult.py,sha256=_DX-mst2uCnzjo6fP5HRTnhv1BC95B9gmJc3L_or16g,8244
nacl/bindings/crypto_secretbox.py,sha256=KgZ1VvkCJDlQ85jtfe9c02VofPvuEgZEhWni-aX3MsM,2914
nacl/bindings/crypto_secretstream.py,sha256=G0FgZS01qA5RzWzm5Bdms8Yy_lvgdZDoUYYBActPmvQ,11165
nacl/bindings/crypto_shorthash.py,sha256=PQU7djHTLDGdVs-w_TsivjFHHp5EK5k2Yh6p-6z0T60,2603
nacl/bindings/crypto_sign.py,sha256=53j2im9E4F79qT_2U8IfCAc3lzg0VMwEjvAPEUccVDg,10342
nacl/bindings/randombytes.py,sha256=uBK3W4WcjgnjZdWanrX0fjYZpr9KHbBgNMl9rui-Ojc,1563
nacl/bindings/sodium_core.py,sha256=9Y9CX--sq-TaPaQRPRpx8SWDSS9PJOja_Cqb-yqyJNQ,1039
nacl/bindings/utils.py,sha256=KDwQnadXeNMbqEA1SmpNyCVo5k8MiUQa07QM66VzfXM,4298
nacl/encoding.py,sha256=qTAPc2MXSkdh4cqDVY0ra6kHyViHMCmEo_re7cgGk5w,2915
nacl/exceptions.py,sha256=GZH32aJtZgqCO4uz0LRsev8z0WyvAYuV3YVqT9AAQq4,2451
nacl/hash.py,sha256=EYBOe6UVc9SUQINEmyuRSa1QGRSvdwdrBzTL1tdFLU8,6392
nacl/hashlib.py,sha256=L5Fv75St8AMPvb-GhA4YqX5p1mC_Sb4HhC1NxNQMpJA,4400
nacl/public.py,sha256=RVGCWQRjIJOmW-8sNrVLtsDjMMGx30i6UyfViGCnQNA,14792
nacl/pwhash/__init__.py,sha256=XSDXd7wQHNLEHl0mkHfVb5lFQsp6ygHkhen718h0BSM,2675
nacl/pwhash/__pycache__/__init__.cpython-39.pyc,,
nacl/pwhash/__pycache__/_argon2.cpython-39.pyc,,
nacl/pwhash/__pycache__/argon2i.cpython-39.pyc,,
nacl/pwhash/__pycache__/argon2id.cpython-39.pyc,,
nacl/pwhash/__pycache__/scrypt.cpython-39.pyc,,
nacl/pwhash/_argon2.py,sha256=jL1ChR9biwYh3RSuc-LJ2-W4DlVLHpir-XHGX8cpeJQ,1779
nacl/pwhash/argon2i.py,sha256=IIvIuO9siKUu5-Wpz0SGiltLQv7Du_mi9BUE8INRK_4,4405
nacl/pwhash/argon2id.py,sha256=H22i8O4j9Ws4L3JsXl9TRcJzDcyaVumhQRPzINAgJWM,4433
nacl/pwhash/scrypt.py,sha256=fMr3Qht1a1EY8aebNNntfLRjinIPXtKYKKrrBhY5LDc,6986
nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
nacl/secret.py,sha256=kauBNuP-0rb3TjU2EMBMu5Vnmzjnscp1bRqMspy5LzU,12108
nacl/signing.py,sha256=kbTEUyHLUMaNLv1nCjxzGxCs82Qs5w8gxE_CnEwPuIU,8337
nacl/utils.py,sha256=gmlTD1x9ZNwzHd8LpALH1CHud-Htv8ejRb3y7TyS9f0,2341

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: false
Tag: cp36-abi3-macosx_10_10_universal2

View file

@ -0,0 +1,2 @@
_sodium
nacl

View file

@ -0,0 +1,198 @@
# don't import any costly modules
import sys
import os
is_pypy = '__pypy__' in sys.builtin_module_names
def warn_distutils_present():
if 'distutils' not in sys.modules:
return
if is_pypy and sys.version_info < (3, 7):
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
import warnings
warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils.")
def clear_distutils():
if 'distutils' not in sys.modules:
return
import warnings
warnings.warn("Setuptools is replacing distutils.")
mods = [
name for name in sys.modules
if name == "distutils" or name.startswith("distutils.")
]
for name in mods:
del sys.modules[name]
def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
return which == 'local'
def ensure_local_distutils():
import importlib
clear_distutils()
# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
with shim():
importlib.import_module('distutils')
# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()
class _TrivialRe:
def __init__(self, *patterns):
self._patterns = patterns
def match(self, string):
return all(pat in string for pat in self._patterns)
class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
if path is not None:
return
method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()
def spec_for_distutils(self):
import importlib
import importlib.abc
import importlib.util
import warnings
# warnings.filterwarnings() imports the re module
warnings._add_filter(
'ignore',
_TrivialRe("distutils", "deprecated"),
DeprecationWarning,
None,
0,
append=True
)
try:
mod = importlib.import_module('setuptools._distutils')
except Exception:
# There are a couple of cases where setuptools._distutils
# may not be present:
# - An older Setuptools without a local distutils is
# taking precedence. Ref #2957.
# - Path manipulation during sitecustomize removes
# setuptools from the path but only after the hook
# has been loaded. Ref #2980.
# In either case, fall back to stdlib behavior.
return
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
return mod
def exec_module(self, module):
pass
return importlib.util.spec_from_loader(
'distutils', DistutilsLoader(), origin=mod.__file__
)
def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if self.pip_imported_during_build():
return
if self.is_get_pip():
return
clear_distutils()
self.spec_for_distutils = lambda: None
@classmethod
def pip_imported_during_build(cls):
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback
return any(
cls.frame_file_is_setup(frame)
for frame, line in traceback.walk_stack(None)
)
@classmethod
def is_get_pip(cls):
"""
Detect if get-pip is being invoked. Ref #2993.
"""
try:
import __main__
return os.path.basename(__main__.__file__) == 'get-pip.py'
except AttributeError:
pass
@staticmethod
def frame_file_is_setup(frame):
"""
Return True if the indicated frame suggests a setup.py file.
"""
# some frames may not have __file__ (#2940)
return frame.f_globals.get('__file__', '').endswith('setup.py')
DISTUTILS_FINDER = DistutilsMetaFinder()
def add_shim():
DISTUTILS_FINDER in sys.meta_path or insert_shim()
class shim:
def __enter__(self):
insert_shim()
def __exit__(self, exc, value, tb):
remove_shim()
def insert_shim():
sys.meta_path.insert(0, DISTUTILS_FINDER)
def remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass

View file

@ -0,0 +1 @@
__import__('_distutils_hack').do_override()

View file

@ -0,0 +1 @@
__version__ = '0.18.1'

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2018 Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,106 @@
Metadata-Version: 2.1
Name: anyio
Version: 3.5.0
Summary: High level compatibility layer for multiple asynchronous event loop implementations
Home-page: UNKNOWN
Author: Alex Grönholm
Author-email: alex.gronholm@nextday.fi
License: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Framework :: AnyIO
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Requires-Python: >=3.6.2
License-File: LICENSE
Requires-Dist: idna (>=2.8)
Requires-Dist: sniffio (>=1.1)
Requires-Dist: contextvars ; python_version < "3.7"
Requires-Dist: dataclasses ; python_version < "3.7"
Requires-Dist: typing-extensions ; python_version < "3.8"
Provides-Extra: doc
Requires-Dist: packaging ; extra == 'doc'
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc'
Provides-Extra: test
Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test'
Requires-Dist: hypothesis (>=4.0) ; extra == 'test'
Requires-Dist: pytest (>=6.0) ; extra == 'test'
Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test'
Requires-Dist: trustme ; extra == 'test'
Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test'
Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test'
Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test'
Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test'
Provides-Extra: trio
Requires-Dist: trio (>=0.16) ; extra == 'trio'
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony
with the native SC of trio itself.
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
trio_. AnyIO can also be adopted into a library or application incrementally bit by bit, no full
refactoring necessary. It will blend in with native libraries of your chosen backend.
Documentation
-------------
View full documentation at: https://anyio.readthedocs.io/
Features
--------
AnyIO offers the following functionality:
* Task groups (nurseries_ in trio terminology)
* High level networking (TCP, UDP and UNIX sockets)
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)
* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Asynchronous file I/O (using worker threads)
* Signal handling
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _Hypothesis: https://hypothesis.works/

View file

@ -0,0 +1,82 @@
anyio-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-3.5.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-3.5.0.dist-info/METADATA,sha256=qiAHEKm52YTAQjLLGrl1dHcWDN9wgUtRo_hbjC6N3og,4693
anyio-3.5.0.dist-info/RECORD,,
anyio-3.5.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
anyio-3.5.0.dist-info/entry_points.txt,sha256=z1bvtbND76CfYuqdNZxiaibWP2IOqSVa8FQKIk4lVQk,40
anyio-3.5.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=fxdj2SjxB6e-Q4lg_j3uDRK6MiZo-VXW4lVIV_YPpFk,3843
anyio/__pycache__/__init__.cpython-39.pyc,,
anyio/__pycache__/from_thread.cpython-39.pyc,,
anyio/__pycache__/lowlevel.cpython-39.pyc,,
anyio/__pycache__/pytest_plugin.cpython-39.pyc,,
anyio/__pycache__/to_process.cpython-39.pyc,,
anyio/__pycache__/to_thread.cpython-39.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-39.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-39.pyc,,
anyio/_backends/__pycache__/_trio.cpython-39.pyc,,
anyio/_backends/_asyncio.py,sha256=l8780cOAPM2wbAfuPJQTTHyuKiX5efFydzSEQEHHiks,66395
anyio/_backends/_trio.py,sha256=wu-9Sx53rUqVpyX2O7bUA4ElmkoHRbqBscRoP1xXAXU,27664
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-39.pyc,,
anyio/_core/__pycache__/_compat.cpython-39.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-39.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-39.pyc,,
anyio/_core/__pycache__/_fileio.cpython-39.pyc,,
anyio/_core/__pycache__/_resources.cpython-39.pyc,,
anyio/_core/__pycache__/_signals.cpython-39.pyc,,
anyio/_core/__pycache__/_sockets.cpython-39.pyc,,
anyio/_core/__pycache__/_streams.cpython-39.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-39.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-39.pyc,,
anyio/_core/__pycache__/_tasks.cpython-39.pyc,,
anyio/_core/__pycache__/_testing.cpython-39.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-39.pyc,,
anyio/_core/_compat.py,sha256=RM2vCmSviAmW0qFKuMaCGzn3JKD63UMNOIM6X6rGjdU,5668
anyio/_core/_eventloop.py,sha256=3CrLCclhm1R_K_6wK3LP_Q7eEEqEwuIv7M3At1hhZkc,4055
anyio/_core/_exceptions.py,sha256=CfFeKh4K25Z2X0lff2ahbVoBx1M3hxVagoOQwfRnEC8,2829
anyio/_core/_fileio.py,sha256=9BAV2LZ-90cvmEQ1AuKhCNxqQeWCXr8qDqjZtHZ3aC4,18062
anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399
anyio/_core/_signals.py,sha256=ub6LfvBz-z3O1qj8-WkWi46t_dpcPTefSfC27NBs-lU,820
anyio/_core/_sockets.py,sha256=7S8UKPkfgcya0qXTkUisrnrR_lEPR56HbFbvB1ehz2Q,19784
anyio/_core/_streams.py,sha256=GiNATCZCl2BDRCOXwPMV9Bonz7NbFSa4xK_IKUb5hWI,1483
anyio/_core/_subprocesses.py,sha256=n6cX_LNh3gyo-lTDUi0pVV6hmr5Au3QkUfAVuVeHXpE,4869
anyio/_core/_synchronization.py,sha256=nsrsv9ee7_sEUV6uncsfg_8sfc4nO-CbM13tUXRsE_Y,16720
anyio/_core/_tasks.py,sha256=nhM5aEbdUjOdL3aCUonp3dy1zurl7OGFSsPg8OujEmE,5199
anyio/_core/_testing.py,sha256=bp6n3_KFC68AhUAcu0XGq0aZRYSeQWP4FY3uWtsfc_8,2166
anyio/_core/_typedattr.py,sha256=0hYrxkAFHCEBkcIC1-goHLd5bXth5VbNkCLTojvNbaM,2496
anyio/abc/__init__.py,sha256=ugKefsiv5Y4DGLzEYsOrS-izkTO6UNM7v9dYpbLAknQ,1980
anyio/abc/__pycache__/__init__.cpython-39.pyc,,
anyio/abc/__pycache__/_resources.cpython-39.pyc,,
anyio/abc/__pycache__/_sockets.cpython-39.pyc,,
anyio/abc/__pycache__/_streams.cpython-39.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-39.pyc,,
anyio/abc/__pycache__/_tasks.cpython-39.pyc,,
anyio/abc/__pycache__/_testing.cpython-39.pyc,,
anyio/abc/_resources.py,sha256=VC7Gzy8xwOGrPtfjNuSjGaKVXmBy0IS4sVpEwq2vZa0,761
anyio/abc/_sockets.py,sha256=uFgijTGLAHbrfK8JA3arScbiN0o88bf0uUSlq4MjnEg,5605
anyio/abc/_streams.py,sha256=h_EXlQsbpwt63gd2jSjaGBLprBfzG7vcSQYIZuDI5LY,6516
anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071
anyio/abc/_tasks.py,sha256=bcNfMaayFOrrlpPRklklK2GfIXGWgRaP-HUs35-J_18,3051
anyio/abc/_testing.py,sha256=LfRDpPw4FQrja9dkhzV_RovBmV4sxqvzxHX5YrV6lYc,1147
anyio/from_thread.py,sha256=6qdCL0PS6pbh3fdDPgR2uLucrAKcVDwCaZlB_DcPeNA,16042
anyio/lowlevel.py,sha256=98x-Z9jKxEeuvZs7KFP15bZ6D-n-SlEzmxjRRqj1YlU,4612
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=bguloPM9UfdxIGlteWnctgT2PXbs1zFRdZ_JHtIGSJc,5544
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-39.pyc,,
anyio/streams/__pycache__/buffered.cpython-39.pyc,,
anyio/streams/__pycache__/file.cpython-39.pyc,,
anyio/streams/__pycache__/memory.cpython-39.pyc,,
anyio/streams/__pycache__/stapled.cpython-39.pyc,,
anyio/streams/__pycache__/text.cpython-39.pyc,,
anyio/streams/__pycache__/tls.cpython-39.pyc,,
anyio/streams/buffered.py,sha256=32jQEEkqefrmPgAXKAQoGnNSdm5l0zzaa0V_nYkwpbM,4435
anyio/streams/file.py,sha256=HT-u90tt-zNwlRlZhKSpFhKrWEKd4QkLPR4ySF9FfUs,4345
anyio/streams/memory.py,sha256=4qzW3_N69w-AdixRZOkCemF6veRBcV6-2IRjL63BXA8,9161
anyio/streams/stapled.py,sha256=euIt3fnuvs3rE7Xn5QsDYhebP5neXAoyCVcAPcM6vpE,4168
anyio/streams/text.py,sha256=iTrT7auMl2SGvFxGf-UA0DJAdTx2ZOW663q1ucMihzs,4966
anyio/streams/tls.py,sha256=_DSW8p4l8xh5DR4tCi_8QS83wptgMcHJ_JSzPXdNPLE,11778
anyio/to_process.py,sha256=tXGfHyGokeVERftxEU5AvygQS8OoOdPIFXTs8a_5lRw,9020
anyio/to_thread.py,sha256=f-SIvh1-VSg78_R5k6JfP7sXJ5epx3eBa3cDPh1s8lk,2139

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1,3 @@
[pytest11]
anyio = anyio.pytest_plugin

View file

@ -0,0 +1 @@
anyio

View file

@ -0,0 +1,116 @@
__all__ = (
'maybe_async',
'maybe_async_cm',
'run',
'sleep',
'sleep_forever',
'sleep_until',
'current_time',
'get_all_backends',
'get_cancelled_exc_class',
'BrokenResourceError',
'BrokenWorkerProcess',
'BusyResourceError',
'ClosedResourceError',
'DelimiterNotFound',
'EndOfStream',
'ExceptionGroup',
'IncompleteRead',
'TypedAttributeLookupError',
'WouldBlock',
'AsyncFile',
'Path',
'open_file',
'wrap_file',
'aclose_forcefully',
'open_signal_receiver',
'connect_tcp',
'connect_unix',
'create_tcp_listener',
'create_unix_listener',
'create_udp_socket',
'create_connected_udp_socket',
'getaddrinfo',
'getnameinfo',
'wait_socket_readable',
'wait_socket_writable',
'create_memory_object_stream',
'run_process',
'open_process',
'create_lock',
'CapacityLimiter',
'CapacityLimiterStatistics',
'Condition',
'ConditionStatistics',
'Event',
'EventStatistics',
'Lock',
'LockStatistics',
'Semaphore',
'SemaphoreStatistics',
'create_condition',
'create_event',
'create_semaphore',
'create_capacity_limiter',
'open_cancel_scope',
'fail_after',
'move_on_after',
'current_effective_deadline',
'TASK_STATUS_IGNORED',
'CancelScope',
'create_task_group',
'TaskInfo',
'get_current_task',
'get_running_tasks',
'wait_all_tasks_blocked',
'run_sync_in_worker_thread',
'run_async_from_thread',
'run_sync_from_thread',
'current_default_worker_thread_limiter',
'create_blocking_portal',
'start_blocking_portal',
'typed_attribute',
'TypedAttributeSet',
'TypedAttributeProvider'
)
from typing import Any
from ._core._compat import maybe_async, maybe_async_cm
from ._core._eventloop import (
current_time, get_all_backends, get_cancelled_exc_class, run, sleep, sleep_forever,
sleep_until)
from ._core._exceptions import (
BrokenResourceError, BrokenWorkerProcess, BusyResourceError, ClosedResourceError,
DelimiterNotFound, EndOfStream, ExceptionGroup, IncompleteRead, TypedAttributeLookupError,
WouldBlock)
from ._core._fileio import AsyncFile, Path, open_file, wrap_file
from ._core._resources import aclose_forcefully
from ._core._signals import open_signal_receiver
from ._core._sockets import (
connect_tcp, connect_unix, create_connected_udp_socket, create_tcp_listener, create_udp_socket,
create_unix_listener, getaddrinfo, getnameinfo, wait_socket_readable, wait_socket_writable)
from ._core._streams import create_memory_object_stream
from ._core._subprocesses import open_process, run_process
from ._core._synchronization import (
CapacityLimiter, CapacityLimiterStatistics, Condition, ConditionStatistics, Event,
EventStatistics, Lock, LockStatistics, Semaphore, SemaphoreStatistics, create_capacity_limiter,
create_condition, create_event, create_lock, create_semaphore)
from ._core._tasks import (
TASK_STATUS_IGNORED, CancelScope, create_task_group, current_effective_deadline, fail_after,
move_on_after, open_cancel_scope)
from ._core._testing import TaskInfo, get_current_task, get_running_tasks, wait_all_tasks_blocked
from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
# Re-exported here, for backwards compatibility
# isort: off
from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread
from .from_thread import (
create_blocking_portal, run_async_from_thread, run_sync_from_thread, start_blocking_portal)
# Re-export imports so they look like they live directly in this package
key: str
value: Any
for key, value in list(locals().items()):
if getattr(value, '__module__', '').startswith('anyio.'):
value.__module__ = __name__

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,833 @@
import array
import math
import socket
from concurrent.futures import Future
from contextvars import copy_context
from dataclasses import dataclass
from functools import partial
from io import IOBase
from os import PathLike
from signal import Signals
from types import TracebackType
from typing import (
Any, Awaitable, Callable, Collection, ContextManager, Coroutine, Deque, Dict, Generic, List,
Mapping, NoReturn, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast)
import sniffio
import trio.from_thread
from outcome import Error, Outcome, Value
from trio.socket import SocketType as TrioSocketType
from trio.to_thread import run_sync
from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T
from .._core._eventloop import claim_worker_thread
from .._core._exceptions import (
BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream)
from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
from .._core._sockets import convert_ipv6_sockaddr
from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
from .._core._synchronization import Event as BaseEvent
from .._core._synchronization import ResourceGuard
from .._core._tasks import CancelScope as BaseCancelScope
from ..abc import IPSockAddrType, UDPPacketType
try:
from trio import lowlevel as trio_lowlevel
except ImportError:
from trio import hazmat as trio_lowlevel # type: ignore[no-redef]
from trio.hazmat import wait_readable, wait_writable
else:
from trio.lowlevel import wait_readable, wait_writable
try:
from trio.lowlevel import open_process as trio_open_process # type: ignore[attr-defined]
except ImportError:
from trio import open_process as trio_open_process
T_Retval = TypeVar('T_Retval')
T_SockAddr = TypeVar('T_SockAddr', str, IPSockAddrType)
#
# Event loop
#
run = trio.run
current_token = trio.lowlevel.current_trio_token
RunVar = trio.lowlevel.RunVar
#
# Miscellaneous
#
sleep = trio.sleep
#
# Timeouts and cancellation
#
class CancelScope(BaseCancelScope):
def __new__(cls, original: Optional[trio.CancelScope] = None,
**kwargs: object) -> 'CancelScope':
return object.__new__(cls)
def __init__(self, original: Optional[trio.CancelScope] = None, **kwargs: Any) -> None:
self.__original = original or trio.CancelScope(**kwargs)
def __enter__(self) -> 'CancelScope':
self.__original.__enter__()
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self.__original.__exit__(exc_type, exc_val, exc_tb)
def cancel(self) -> DeprecatedAwaitable:
self.__original.cancel()
return DeprecatedAwaitable(self.cancel)
@property
def deadline(self) -> float:
return self.__original.deadline
@deadline.setter
def deadline(self, value: float) -> None:
self.__original.deadline = value
@property
def cancel_called(self) -> bool:
return self.__original.cancel_called
@property
def shield(self) -> bool:
return self.__original.shield
@shield.setter
def shield(self, value: bool) -> None:
self.__original.shield = value
CancelledError = trio.Cancelled
checkpoint = trio.lowlevel.checkpoint
checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled
cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint
current_effective_deadline = trio.current_effective_deadline
current_time = trio.current_time
#
# Task groups
#
class ExceptionGroup(BaseExceptionGroup, trio.MultiError):
pass
class TaskGroup(abc.TaskGroup):
def __init__(self) -> None:
self._active = False
self._nursery_manager = trio.open_nursery()
self.cancel_scope = None # type: ignore[assignment]
async def __aenter__(self) -> 'TaskGroup':
self._active = True
self._nursery = await self._nursery_manager.__aenter__()
self.cancel_scope = CancelScope(self._nursery.cancel_scope)
return self
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
try:
return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb)
except trio.MultiError as exc:
raise ExceptionGroup(exc.exceptions) from None
finally:
self._active = False
def start_soon(self, func: Callable, *args: object, name: object = None) -> None:
if not self._active:
raise RuntimeError('This task group is not active; no new tasks can be started.')
self._nursery.start_soon(func, *args, name=name)
async def start(self, func: Callable[..., Coroutine],
*args: object, name: object = None) -> object:
if not self._active:
raise RuntimeError('This task group is not active; no new tasks can be started.')
return await self._nursery.start(func, *args, name=name)
#
# Threads
#
async def run_sync_in_worker_thread(
func: Callable[..., T_Retval], *args: object, cancellable: bool = False,
limiter: Optional[trio.CapacityLimiter] = None) -> T_Retval:
def wrapper() -> T_Retval:
with claim_worker_thread('trio'):
return func(*args)
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, None)
return await run_sync(context.run, wrapper, cancellable=cancellable, limiter=limiter)
# TODO: remove this workaround when trio 0.20 is the minimum requirement
def run_async_from_thread(fn: Callable[..., Awaitable[T_Retval]], *args: Any) -> T_Retval:
async def wrapper() -> T_Retval:
retval: T_Retval
async def inner() -> None:
nonlocal retval
__tracebackhide__ = True
retval = await fn(*args)
async with trio.open_nursery() as n:
context.run(n.start_soon, inner)
__tracebackhide__ = True
return retval
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, 'trio')
return trio.from_thread.run(wrapper)
def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval:
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
retval = trio.from_thread.run_sync(copy_context().run, fn, *args)
return cast(T_Retval, retval)
class BlockingPortal(abc.BlockingPortal):
def __new__(cls) -> 'BlockingPortal':
return object.__new__(cls)
def __init__(self) -> None:
super().__init__()
self._token = trio.lowlevel.current_trio_token()
def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any],
name: object, future: Future) -> None:
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, 'trio')
trio.from_thread.run_sync(
context.run, partial(self._task_group.start_soon, name=name), self._call_func, func,
args, kwargs, future, trio_token=self._token)
#
# Subprocesses
#
@dataclass(eq=False)
class ReceiveStreamWrapper(abc.ByteReceiveStream):
_stream: trio.abc.ReceiveStream
async def receive(self, max_bytes: Optional[int] = None) -> bytes:
try:
data = await self._stream.receive_some(max_bytes)
except trio.ClosedResourceError as exc:
raise ClosedResourceError from exc.__cause__
except trio.BrokenResourceError as exc:
raise BrokenResourceError from exc.__cause__
if data:
return data
else:
raise EndOfStream
async def aclose(self) -> None:
await self._stream.aclose()
@dataclass(eq=False)
class SendStreamWrapper(abc.ByteSendStream):
_stream: trio.abc.SendStream
async def send(self, item: bytes) -> None:
try:
await self._stream.send_all(item)
except trio.ClosedResourceError as exc:
raise ClosedResourceError from exc.__cause__
except trio.BrokenResourceError as exc:
raise BrokenResourceError from exc.__cause__
async def aclose(self) -> None:
await self._stream.aclose()
@dataclass(eq=False)
class Process(abc.Process):
_process: trio.Process
_stdin: Optional[abc.ByteSendStream]
_stdout: Optional[abc.ByteReceiveStream]
_stderr: Optional[abc.ByteReceiveStream]
async def aclose(self) -> None:
if self._stdin:
await self._stdin.aclose()
if self._stdout:
await self._stdout.aclose()
if self._stderr:
await self._stderr.aclose()
await self.wait()
async def wait(self) -> int:
return await self._process.wait()
def terminate(self) -> None:
self._process.terminate()
def kill(self) -> None:
self._process.kill()
def send_signal(self, signal: Signals) -> None:
self._process.send_signal(signal)
@property
def pid(self) -> int:
return self._process.pid
@property
def returncode(self) -> Optional[int]:
return self._process.returncode
@property
def stdin(self) -> Optional[abc.ByteSendStream]:
return self._stdin
@property
def stdout(self) -> Optional[abc.ByteReceiveStream]:
return self._stdout
@property
def stderr(self) -> Optional[abc.ByteReceiveStream]:
return self._stderr
async def open_process(command: Union[str, Sequence[str]], *, shell: bool,
stdin: int, stdout: int, stderr: int,
cwd: Union[str, bytes, PathLike, None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False) -> Process:
process = await trio_open_process(command, stdin=stdin, stdout=stdout, stderr=stderr,
shell=shell, cwd=cwd, env=env,
start_new_session=start_new_session)
stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
return Process(process, stdin_stream, stdout_stream, stderr_stream)
class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
def after_run(self) -> None:
super().after_run()
current_default_worker_process_limiter: RunVar = RunVar(
'current_default_worker_process_limiter')
async def _shutdown_process_pool(workers: Set[Process]) -> None:
process: Process
try:
await sleep(math.inf)
except trio.Cancelled:
for process in workers:
if process.returncode is None:
process.kill()
with CancelScope(shield=True):
for process in workers:
await process.aclose()
def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None:
trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
#
# Sockets and networking
#
class _TrioSocketMixin(Generic[T_SockAddr]):
def __init__(self, trio_socket: TrioSocketType) -> None:
self._trio_socket = trio_socket
self._closed = False
def _check_closed(self) -> None:
if self._closed:
raise ClosedResourceError
if self._trio_socket.fileno() < 0:
raise BrokenResourceError
@property
def _raw_socket(self) -> socket.socket:
return self._trio_socket._sock # type: ignore[attr-defined]
async def aclose(self) -> None:
if self._trio_socket.fileno() >= 0:
self._closed = True
self._trio_socket.close()
def _convert_socket_error(self, exc: BaseException) -> 'NoReturn':
if isinstance(exc, trio.ClosedResourceError):
raise ClosedResourceError from exc
elif self._trio_socket.fileno() < 0 and self._closed:
raise ClosedResourceError from None
elif isinstance(exc, OSError):
raise BrokenResourceError from exc
else:
raise exc
class SocketStream(_TrioSocketMixin, abc.SocketStream):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard('reading from')
self._send_guard = ResourceGuard('writing to')
async def receive(self, max_bytes: int = 65536) -> bytes:
with self._receive_guard:
try:
data = await self._trio_socket.recv(max_bytes)
except BaseException as exc:
self._convert_socket_error(exc)
if data:
return data
else:
raise EndOfStream
async def send(self, item: bytes) -> None:
with self._send_guard:
view = memoryview(item)
while view:
try:
bytes_sent = await self._trio_socket.send(view)
except BaseException as exc:
self._convert_socket_error(exc)
view = view[bytes_sent:]
async def send_eof(self) -> None:
self._trio_socket.shutdown(socket.SHUT_WR)
class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]:
if not isinstance(msglen, int) or msglen < 0:
raise ValueError('msglen must be a non-negative integer')
if not isinstance(maxfds, int) or maxfds < 1:
raise ValueError('maxfds must be a positive integer')
fds = array.array("i")
await checkpoint()
with self._receive_guard:
while True:
try:
message, ancdata, flags, addr = await self._trio_socket.recvmsg(
msglen, socket.CMSG_LEN(maxfds * fds.itemsize))
except BaseException as exc:
self._convert_socket_error(exc)
else:
if not message and not ancdata:
raise EndOfStream
break
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, '
f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}')
fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
return message, list(fds)
async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None:
if not message:
raise ValueError('message must not be empty')
if not fds:
raise ValueError('fds must not be empty')
filenos: List[int] = []
for fd in fds:
if isinstance(fd, int):
filenos.append(fd)
elif isinstance(fd, IOBase):
filenos.append(fd.fileno())
fdarray = array.array("i", filenos)
await checkpoint()
with self._send_guard:
while True:
try:
await self._trio_socket.sendmsg(
[message],
[(socket.SOL_SOCKET, socket.SCM_RIGHTS, # type: ignore[list-item]
fdarray)]
)
break
except BaseException as exc:
self._convert_socket_error(exc)
class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
def __init__(self, raw_socket: socket.socket):
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
self._accept_guard = ResourceGuard('accepting connections from')
async def accept(self) -> SocketStream:
with self._accept_guard:
try:
trio_socket, _addr = await self._trio_socket.accept()
except BaseException as exc:
self._convert_socket_error(exc)
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return SocketStream(trio_socket)
class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
def __init__(self, raw_socket: socket.socket):
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
self._accept_guard = ResourceGuard('accepting connections from')
async def accept(self) -> UNIXSocketStream:
with self._accept_guard:
try:
trio_socket, _addr = await self._trio_socket.accept()
except BaseException as exc:
self._convert_socket_error(exc)
return UNIXSocketStream(trio_socket)
class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard('reading from')
self._send_guard = ResourceGuard('writing to')
async def receive(self) -> Tuple[bytes, IPSockAddrType]:
with self._receive_guard:
try:
data, addr = await self._trio_socket.recvfrom(65536)
return data, convert_ipv6_sockaddr(addr)
except BaseException as exc:
self._convert_socket_error(exc)
async def send(self, item: UDPPacketType) -> None:
with self._send_guard:
try:
await self._trio_socket.sendto(*item)
except BaseException as exc:
self._convert_socket_error(exc)
class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard('reading from')
self._send_guard = ResourceGuard('writing to')
async def receive(self) -> bytes:
with self._receive_guard:
try:
return await self._trio_socket.recv(65536)
except BaseException as exc:
self._convert_socket_error(exc)
async def send(self, item: bytes) -> None:
with self._send_guard:
try:
await self._trio_socket.send(item)
except BaseException as exc:
self._convert_socket_error(exc)
async def connect_tcp(host: str, port: int,
local_address: Optional[IPSockAddrType] = None) -> SocketStream:
family = socket.AF_INET6 if ':' in host else socket.AF_INET
trio_socket = trio.socket.socket(family)
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if local_address:
await trio_socket.bind(local_address)
try:
await trio_socket.connect((host, port))
except BaseException:
trio_socket.close()
raise
return SocketStream(trio_socket)
async def connect_unix(path: str) -> UNIXSocketStream:
trio_socket = trio.socket.socket(socket.AF_UNIX)
try:
await trio_socket.connect(path)
except BaseException:
trio_socket.close()
raise
return UNIXSocketStream(trio_socket)
async def create_udp_socket(
family: socket.AddressFamily,
local_address: Optional[IPSockAddrType],
remote_address: Optional[IPSockAddrType],
reuse_port: bool
) -> Union[UDPSocket, ConnectedUDPSocket]:
trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
if reuse_port:
trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if local_address:
await trio_socket.bind(local_address)
if remote_address:
await trio_socket.connect(remote_address)
return ConnectedUDPSocket(trio_socket)
else:
return UDPSocket(trio_socket)
getaddrinfo = trio.socket.getaddrinfo
getnameinfo = trio.socket.getnameinfo
async def wait_socket_readable(sock: socket.socket) -> None:
try:
await wait_readable(sock)
except trio.ClosedResourceError as exc:
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
except trio.BusyResourceError:
raise BusyResourceError('reading from') from None
async def wait_socket_writable(sock: socket.socket) -> None:
try:
await wait_writable(sock)
except trio.ClosedResourceError as exc:
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
except trio.BusyResourceError:
raise BusyResourceError('writing to') from None
#
# Synchronization
#
class Event(BaseEvent):
def __new__(cls) -> 'Event':
return object.__new__(cls)
def __init__(self) -> None:
self.__original = trio.Event()
def is_set(self) -> bool:
return self.__original.is_set()
async def wait(self) -> None:
return await self.__original.wait()
def statistics(self) -> EventStatistics:
orig_statistics = self.__original.statistics()
return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
def set(self) -> DeprecatedAwaitable:
self.__original.set()
return DeprecatedAwaitable(self.set)
class CapacityLimiter(BaseCapacityLimiter):
def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter":
return object.__new__(cls)
def __init__(self, *args: Any, original: Optional[trio.CapacityLimiter] = None) -> None:
self.__original = original or trio.CapacityLimiter(*args)
async def __aenter__(self) -> None:
return await self.__original.__aenter__()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return await self.__original.__aexit__(exc_type, exc_val, exc_tb)
@property
def total_tokens(self) -> float:
return self.__original.total_tokens
@total_tokens.setter
def total_tokens(self, value: float) -> None:
self.__original.total_tokens = value
@property
def borrowed_tokens(self) -> int:
return self.__original.borrowed_tokens
@property
def available_tokens(self) -> float:
return self.__original.available_tokens
def acquire_nowait(self) -> DeprecatedAwaitable:
self.__original.acquire_nowait()
return DeprecatedAwaitable(self.acquire_nowait)
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
self.__original.acquire_on_behalf_of_nowait(borrower)
return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
async def acquire(self) -> None:
await self.__original.acquire()
async def acquire_on_behalf_of(self, borrower: object) -> None:
await self.__original.acquire_on_behalf_of(borrower)
def release(self) -> None:
return self.__original.release()
def release_on_behalf_of(self, borrower: object) -> None:
return self.__original.release_on_behalf_of(borrower)
def statistics(self) -> CapacityLimiterStatistics:
orig = self.__original.statistics()
return CapacityLimiterStatistics(
borrowed_tokens=orig.borrowed_tokens, total_tokens=orig.total_tokens,
borrowers=orig.borrowers, tasks_waiting=orig.tasks_waiting)
_capacity_limiter_wrapper: RunVar = RunVar('_capacity_limiter_wrapper')
def current_default_thread_limiter() -> CapacityLimiter:
try:
return _capacity_limiter_wrapper.get()
except LookupError:
limiter = CapacityLimiter(original=trio.to_thread.current_default_thread_limiter())
_capacity_limiter_wrapper.set(limiter)
return limiter
#
# Signal handling
#
class _SignalReceiver(DeprecatedAsyncContextManager[T]):
def __init__(self, cm: ContextManager[T]):
self._cm = cm
def __enter__(self) -> T:
return self._cm.__enter__()
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def open_signal_receiver(*signals: Signals) -> _SignalReceiver:
cm = trio.open_signal_receiver(*signals)
return _SignalReceiver(cm)
#
# Testing and debugging
#
def get_current_task() -> TaskInfo:
task = trio_lowlevel.current_task()
parent_id = None
if task.parent_nursery and task.parent_nursery.parent_task:
parent_id = id(task.parent_nursery.parent_task)
return TaskInfo(id(task), parent_id, task.name, task.coro)
def get_running_tasks() -> List[TaskInfo]:
root_task = trio_lowlevel.current_root_task()
task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)]
nurseries = root_task.child_nurseries
while nurseries:
new_nurseries: List[trio.Nursery] = []
for nursery in nurseries:
for task in nursery.child_tasks:
task_infos.append(
TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro))
new_nurseries.extend(task.child_nurseries)
nurseries = new_nurseries
return task_infos
def wait_all_tasks_blocked() -> Awaitable[None]:
import trio.testing
return trio.testing.wait_all_tasks_blocked()
class TestRunner(abc.TestRunner):
def __init__(self, **options: Any) -> None:
from collections import deque
from queue import Queue
self._call_queue: "Queue[Callable[..., object]]" = Queue()
self._result_queue: Deque[Outcome] = deque()
self._stop_event: Optional[trio.Event] = None
self._nursery: Optional[trio.Nursery] = None
self._options = options
async def _trio_main(self) -> None:
self._stop_event = trio.Event()
async with trio.open_nursery() as self._nursery:
await self._stop_event.wait()
async def _call_func(self, func: Callable[..., Awaitable[object]],
args: tuple, kwargs: dict) -> None:
try:
retval = await func(*args, **kwargs)
except BaseException as exc:
self._result_queue.append(Error(exc))
else:
self._result_queue.append(Value(retval))
def _main_task_finished(self, outcome: object) -> None:
self._nursery = None
def close(self) -> None:
if self._stop_event:
self._stop_event.set()
while self._nursery is not None:
self._call_queue.get()()
def call(self, func: Callable[..., Awaitable[T_Retval]],
*args: object, **kwargs: object) -> T_Retval:
if self._nursery is None:
trio.lowlevel.start_guest_run(
self._trio_main, run_sync_soon_threadsafe=self._call_queue.put,
done_callback=self._main_task_finished, **self._options)
while self._nursery is None:
self._call_queue.get()()
self._nursery.start_soon(self._call_func, func, args, kwargs)
while not self._result_queue:
self._call_queue.get()()
outcome = self._result_queue.pop()
return outcome.unwrap()

View file

@ -0,0 +1,175 @@
from abc import ABCMeta, abstractmethod
from contextlib import AbstractContextManager
from types import TracebackType
from typing import (
TYPE_CHECKING, Any, AsyncContextManager, Callable, ContextManager, Generator, Generic,
Iterable, List, Optional, Tuple, Type, TypeVar, Union, overload)
from warnings import warn
if TYPE_CHECKING:
from ._testing import TaskInfo
else:
TaskInfo = object
T = TypeVar('T')
AnyDeprecatedAwaitable = Union['DeprecatedAwaitable', 'DeprecatedAwaitableFloat',
'DeprecatedAwaitableList[T]', TaskInfo]
@overload
async def maybe_async(__obj: TaskInfo) -> TaskInfo:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitableFloat') -> float:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitableList[T]') -> List[T]:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitable') -> None:
...
async def maybe_async(__obj: 'AnyDeprecatedAwaitable[T]') -> Union[TaskInfo, float, List[T], None]:
"""
Await on the given object if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were converted from coroutine functions into regular functions.
Do **not** try to use this for any other purpose!
:return: the result of awaiting on the object if coroutine, or the object itself otherwise
.. versionadded:: 2.2
"""
return __obj._unwrap()
class _ContextManagerWrapper:
def __init__(self, cm: ContextManager[T]):
self._cm = cm
async def __aenter__(self) -> T:
return self._cm.__enter__()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def maybe_async_cm(cm: Union[ContextManager[T], AsyncContextManager[T]]) -> AsyncContextManager[T]:
"""
Wrap a regular context manager as an async one if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were changed to return regular context managers instead of async ones.
:param cm: a regular or async context manager
:return: an async context manager
.. versionadded:: 2.2
"""
if not isinstance(cm, AbstractContextManager):
raise TypeError('Given object is not an context manager')
return _ContextManagerWrapper(cm)
def _warn_deprecation(awaitable: 'AnyDeprecatedAwaitable[Any]', stacklevel: int = 1) -> None:
warn(f'Awaiting on {awaitable._name}() is deprecated. Use "await '
f'anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x '
f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
DeprecationWarning, stacklevel=stacklevel + 1)
class DeprecatedAwaitable:
def __init__(self, func: Callable[..., 'DeprecatedAwaitable']):
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, None]:
_warn_deprecation(self)
if False:
yield
def __reduce__(self) -> Tuple[Type[None], Tuple[()]]:
return type(None), ()
def _unwrap(self) -> None:
return None
class DeprecatedAwaitableFloat(float):
def __new__(
cls, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']
) -> 'DeprecatedAwaitableFloat':
return super().__new__(cls, x)
def __init__(self, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']):
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, float]:
_warn_deprecation(self)
if False:
yield
return float(self)
def __reduce__(self) -> Tuple[Type[float], Tuple[float]]:
return float, (float(self),)
def _unwrap(self) -> float:
return float(self)
class DeprecatedAwaitableList(List[T]):
def __init__(self, iterable: Iterable[T] = (), *,
func: Callable[..., 'DeprecatedAwaitableList[T]']):
super().__init__(iterable)
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, List[T]]:
_warn_deprecation(self)
if False:
yield
return list(self)
def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]:
return list, (list(self),)
def _unwrap(self) -> List[T]:
return list(self)
class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
@abstractmethod
def __enter__(self) -> T:
pass
@abstractmethod
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
pass
async def __aenter__(self) -> T:
warn(f'Using {self.__class__.__name__} as an async context manager has been deprecated. '
f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
f'you are completely migrating to AnyIO 3+.', DeprecationWarning)
return self.__enter__()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self.__exit__(exc_type, exc_val, exc_tb)

View file

@ -0,0 +1,140 @@
import math
import sys
import threading
from contextlib import contextmanager
from importlib import import_module
from typing import Any, Callable, Coroutine, Dict, Generator, Optional, Tuple, Type, TypeVar
import sniffio
# This must be updated when new backends are introduced
from ._compat import DeprecatedAwaitableFloat
BACKENDS = 'asyncio', 'trio'
T_Retval = TypeVar('T_Retval')
threadlocals = threading.local()
def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object,
backend: str = 'asyncio', backend_options: Optional[Dict[str, Any]] = None) -> T_Retval:
"""
Run the given coroutine function in an asynchronous event loop.
The current thread must not be already running an event loop.
:param func: a coroutine function
:param args: positional arguments to ``func``
:param backend: name of the asynchronous event loop implementation currently either
``asyncio`` or ``trio``
:param backend_options: keyword arguments to call the backend ``run()`` implementation with
(documented :ref:`here <backend options>`)
:return: the return value of the coroutine function
:raises RuntimeError: if an asynchronous event loop is already running in this thread
:raises LookupError: if the named backend is not found
"""
try:
asynclib_name = sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
else:
raise RuntimeError(f'Already running {asynclib_name} in this thread')
try:
asynclib = import_module(f'..._backends._{backend}', package=__name__)
except ImportError as exc:
raise LookupError(f'No such backend: {backend}') from exc
token = None
if sniffio.current_async_library_cvar.get(None) is None:
# Since we're in control of the event loop, we can cache the name of the async library
token = sniffio.current_async_library_cvar.set(backend)
try:
backend_options = backend_options or {}
return asynclib.run(func, *args, **backend_options)
finally:
if token:
sniffio.current_async_library_cvar.reset(token)
async def sleep(delay: float) -> None:
"""
Pause the current task for the specified duration.
:param delay: the duration, in seconds
"""
return await get_asynclib().sleep(delay)
async def sleep_forever() -> None:
"""
Pause the current task until it's cancelled.
This is a shortcut for ``sleep(math.inf)``.
.. versionadded:: 3.1
"""
await sleep(math.inf)
async def sleep_until(deadline: float) -> None:
"""
Pause the current task until the given time.
:param deadline: the absolute time to wake up at (according to the internal monotonic clock of
the event loop)
.. versionadded:: 3.1
"""
now = current_time()
await sleep(max(deadline - now, 0))
def current_time() -> DeprecatedAwaitableFloat:
"""
Return the current value of the event loop's internal clock.
:return: the clock value (seconds)
"""
return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time)
def get_all_backends() -> Tuple[str, ...]:
"""Return a tuple of the names of all built-in backends."""
return BACKENDS
def get_cancelled_exc_class() -> Type[BaseException]:
"""Return the current async library's cancellation exception class."""
return get_asynclib().CancelledError
#
# Private API
#
@contextmanager
def claim_worker_thread(backend: str) -> Generator[Any, None, None]:
module = sys.modules['anyio._backends._' + backend]
threadlocals.current_async_module = module
try:
yield
finally:
del threadlocals.current_async_module
def get_asynclib(asynclib_name: Optional[str] = None) -> Any:
if asynclib_name is None:
asynclib_name = sniffio.current_async_library()
modulename = 'anyio._backends._' + asynclib_name
try:
return sys.modules[modulename]
except KeyError:
return import_module(modulename)

View file

@ -0,0 +1,85 @@
from traceback import format_exception
from typing import List
class BrokenResourceError(Exception):
"""
Raised when trying to use a resource that has been rendered unusable due to external causes
(e.g. a send stream whose peer has disconnected).
"""
class BrokenWorkerProcess(Exception):
"""
Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise
misbehaves.
"""
class BusyResourceError(Exception):
"""Raised when two tasks are trying to read from or write to the same resource concurrently."""
def __init__(self, action: str):
super().__init__(f'Another task is already {action} this resource')
class ClosedResourceError(Exception):
"""Raised when trying to use a resource that has been closed."""
class DelimiterNotFound(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
maximum number of bytes has been read without the delimiter being found.
"""
def __init__(self, max_bytes: int) -> None:
super().__init__(f'The delimiter was not found among the first {max_bytes} bytes')
class EndOfStream(Exception):
"""Raised when trying to read from a stream that has been closed from the other end."""
class ExceptionGroup(BaseException):
"""
Raised when multiple exceptions have been raised in a task group.
:var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together
"""
SEPARATOR = '----------------------------\n'
exceptions: List[BaseException]
def __str__(self) -> str:
tracebacks = [''.join(format_exception(type(exc), exc, exc.__traceback__))
for exc in self.exceptions]
return f'{len(self.exceptions)} exceptions were raised in the task group:\n' \
f'{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}'
def __repr__(self) -> str:
exception_reprs = ', '.join(repr(exc) for exc in self.exceptions)
return f'<{self.__class__.__name__}: {exception_reprs}>'
class IncompleteRead(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
connection is closed before the requested amount of bytes has been read.
"""
def __init__(self) -> None:
super().__init__('The stream was closed before the read operation could be completed')
class TypedAttributeLookupError(LookupError):
"""
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not
found and no default value has been given.
"""
class WouldBlock(Exception):
"""Raised by ``X_nowait`` functions if ``X()`` would block."""

View file

@ -0,0 +1,529 @@
import os
import pathlib
import sys
from dataclasses import dataclass
from functools import partial
from os import PathLike
from typing import (
IO, TYPE_CHECKING, Any, AnyStr, AsyncIterator, Callable, Generic, Iterable, Iterator, List,
Optional, Sequence, Tuple, Union, cast, overload)
from .. import to_thread
from ..abc import AsyncResource
if sys.version_info >= (3, 8):
from typing import Final
else:
from typing_extensions import Final
if TYPE_CHECKING:
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
else:
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
class AsyncFile(AsyncResource, Generic[AnyStr]):
"""
An asynchronous file object.
This class wraps a standard file object and provides async friendly versions of the following
blocking methods (where available on the original file object):
* read
* read1
* readline
* readlines
* readinto
* readinto1
* write
* writelines
* truncate
* seek
* tell
* flush
All other methods are directly passed through.
This class supports the asynchronous context manager protocol which closes the underlying file
at the end of the context block.
This class also supports asynchronous iteration::
async with await open_file(...) as f:
async for line in f:
print(line)
"""
def __init__(self, fp: IO[AnyStr]) -> None:
self._fp: Any = fp
def __getattr__(self, name: str) -> object:
return getattr(self._fp, name)
@property
def wrapped(self) -> IO[AnyStr]:
"""The wrapped file object."""
return self._fp
async def __aiter__(self) -> AsyncIterator[AnyStr]:
while True:
line = await self.readline()
if line:
yield line
else:
break
async def aclose(self) -> None:
return await to_thread.run_sync(self._fp.close)
async def read(self, size: int = -1) -> AnyStr:
return await to_thread.run_sync(self._fp.read, size)
async def read1(self: 'AsyncFile[bytes]', size: int = -1) -> bytes:
return await to_thread.run_sync(self._fp.read1, size)
async def readline(self) -> AnyStr:
return await to_thread.run_sync(self._fp.readline)
async def readlines(self) -> List[AnyStr]:
return await to_thread.run_sync(self._fp.readlines)
async def readinto(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes:
return await to_thread.run_sync(self._fp.readinto, b)
async def readinto1(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes:
return await to_thread.run_sync(self._fp.readinto1, b)
@overload
async def write(self: 'AsyncFile[bytes]', b: ReadableBuffer) -> int: ...
@overload
async def write(self: 'AsyncFile[str]', b: str) -> int: ...
async def write(self, b: Union[ReadableBuffer, str]) -> int:
return await to_thread.run_sync(self._fp.write, b)
@overload
async def writelines(self: 'AsyncFile[bytes]', lines: Iterable[ReadableBuffer]) -> None: ...
@overload
async def writelines(self: 'AsyncFile[str]', lines: Iterable[str]) -> None: ...
async def writelines(self, lines: Union[Iterable[ReadableBuffer], Iterable[str]]) -> None:
return await to_thread.run_sync(self._fp.writelines, lines)
async def truncate(self, size: Optional[int] = None) -> int:
return await to_thread.run_sync(self._fp.truncate, size)
async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int:
return await to_thread.run_sync(self._fp.seek, offset, whence)
async def tell(self) -> int:
return await to_thread.run_sync(self._fp.tell)
async def flush(self) -> None:
return await to_thread.run_sync(self._fp.flush)
@overload
async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenBinaryMode,
buffering: int = ..., encoding: Optional[str] = ...,
errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...,
opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[bytes]:
...
@overload
async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenTextMode = ...,
buffering: int = ..., encoding: Optional[str] = ...,
errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...,
opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[str]:
...
async def open_file(file: Union[str, 'PathLike[str]', int], mode: str = 'r', buffering: int = -1,
encoding: Optional[str] = None, errors: Optional[str] = None,
newline: Optional[str] = None, closefd: bool = True,
opener: Optional[Callable[[str, int], int]] = None) -> AsyncFile[Any]:
"""
Open a file asynchronously.
The arguments are exactly the same as for the builtin :func:`open`.
:return: an asynchronous file object
"""
fp = await to_thread.run_sync(open, file, mode, buffering, encoding, errors, newline,
closefd, opener)
return AsyncFile(fp)
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
"""
Wrap an existing file as an asynchronous file.
:param file: an existing file-like object
:return: an asynchronous file object
"""
return AsyncFile(file)
@dataclass(eq=False)
class _PathIterator(AsyncIterator['Path']):
iterator: Iterator['PathLike[str]']
async def __anext__(self) -> 'Path':
nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True)
if nextval is None:
raise StopAsyncIteration from None
return Path(cast('PathLike[str]', nextval))
class Path:
"""
An asynchronous version of :class:`pathlib.Path`.
This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but
it is compatible with the :class:`os.PathLike` interface.
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the
deprecated :meth:`~pathlib.Path.link_to` method.
Any methods that do disk I/O need to be awaited on. These methods are:
* :meth:`~pathlib.Path.absolute`
* :meth:`~pathlib.Path.chmod`
* :meth:`~pathlib.Path.cwd`
* :meth:`~pathlib.Path.exists`
* :meth:`~pathlib.Path.expanduser`
* :meth:`~pathlib.Path.group`
* :meth:`~pathlib.Path.hardlink_to`
* :meth:`~pathlib.Path.home`
* :meth:`~pathlib.Path.is_block_device`
* :meth:`~pathlib.Path.is_char_device`
* :meth:`~pathlib.Path.is_dir`
* :meth:`~pathlib.Path.is_fifo`
* :meth:`~pathlib.Path.is_file`
* :meth:`~pathlib.Path.is_mount`
* :meth:`~pathlib.Path.lchmod`
* :meth:`~pathlib.Path.lstat`
* :meth:`~pathlib.Path.mkdir`
* :meth:`~pathlib.Path.open`
* :meth:`~pathlib.Path.owner`
* :meth:`~pathlib.Path.read_bytes`
* :meth:`~pathlib.Path.read_text`
* :meth:`~pathlib.Path.readlink`
* :meth:`~pathlib.Path.rename`
* :meth:`~pathlib.Path.replace`
* :meth:`~pathlib.Path.rmdir`
* :meth:`~pathlib.Path.samefile`
* :meth:`~pathlib.Path.stat`
* :meth:`~pathlib.Path.touch`
* :meth:`~pathlib.Path.unlink`
* :meth:`~pathlib.Path.write_bytes`
* :meth:`~pathlib.Path.write_text`
Additionally, the following methods return an async iterator yielding :class:`~.Path` objects:
* :meth:`~pathlib.Path.glob`
* :meth:`~pathlib.Path.iterdir`
* :meth:`~pathlib.Path.rglob`
"""
__slots__ = '_path', '__weakref__'
__weakref__: Any
def __init__(self, *args: Union[str, 'PathLike[str]']) -> None:
self._path: Final[pathlib.Path] = pathlib.Path(*args)
def __fspath__(self) -> str:
return self._path.__fspath__()
def __str__(self) -> str:
return self._path.__str__()
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.as_posix()!r})'
def __bytes__(self) -> bytes:
return self._path.__bytes__()
def __hash__(self) -> int:
return self._path.__hash__()
def __eq__(self, other: object) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__eq__(target)
def __lt__(self, other: 'Path') -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__lt__(target)
def __le__(self, other: 'Path') -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__le__(target)
def __gt__(self, other: 'Path') -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__gt__(target)
def __ge__(self, other: 'Path') -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__ge__(target)
def __truediv__(self, other: Any) -> 'Path':
return Path(self._path / other)
def __rtruediv__(self, other: Any) -> 'Path':
return Path(other) / self
@property
def parts(self) -> Tuple[str, ...]:
return self._path.parts
@property
def drive(self) -> str:
return self._path.drive
@property
def root(self) -> str:
return self._path.root
@property
def anchor(self) -> str:
return self._path.anchor
@property
def parents(self) -> Sequence['Path']:
return tuple(Path(p) for p in self._path.parents)
@property
def parent(self) -> 'Path':
return Path(self._path.parent)
@property
def name(self) -> str:
return self._path.name
@property
def suffix(self) -> str:
return self._path.suffix
@property
def suffixes(self) -> List[str]:
return self._path.suffixes
@property
def stem(self) -> str:
return self._path.stem
async def absolute(self) -> 'Path':
path = await to_thread.run_sync(self._path.absolute)
return Path(path)
def as_posix(self) -> str:
return self._path.as_posix()
def as_uri(self) -> str:
return self._path.as_uri()
def match(self, path_pattern: str) -> bool:
return self._path.match(path_pattern)
def is_relative_to(self, *other: Union[str, 'PathLike[str]']) -> bool:
try:
self.relative_to(*other)
return True
except ValueError:
return False
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
func = partial(os.chmod, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, mode)
@classmethod
async def cwd(cls) -> 'Path':
path = await to_thread.run_sync(pathlib.Path.cwd)
return cls(path)
async def exists(self) -> bool:
return await to_thread.run_sync(self._path.exists, cancellable=True)
async def expanduser(self) -> 'Path':
return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True))
def glob(self, pattern: str) -> AsyncIterator['Path']:
gen = self._path.glob(pattern)
return _PathIterator(gen)
async def group(self) -> str:
return await to_thread.run_sync(self._path.group, cancellable=True)
async def hardlink_to(self, target: Union[str, pathlib.Path, 'Path']) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(os.link, target, self)
@classmethod
async def home(cls) -> 'Path':
home_path = await to_thread.run_sync(pathlib.Path.home)
return cls(home_path)
def is_absolute(self) -> bool:
return self._path.is_absolute()
async def is_block_device(self) -> bool:
return await to_thread.run_sync(self._path.is_block_device, cancellable=True)
async def is_char_device(self) -> bool:
return await to_thread.run_sync(self._path.is_char_device, cancellable=True)
async def is_dir(self) -> bool:
return await to_thread.run_sync(self._path.is_dir, cancellable=True)
async def is_fifo(self) -> bool:
return await to_thread.run_sync(self._path.is_fifo, cancellable=True)
async def is_file(self) -> bool:
return await to_thread.run_sync(self._path.is_file, cancellable=True)
async def is_mount(self) -> bool:
return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True)
def is_reserved(self) -> bool:
return self._path.is_reserved()
async def is_socket(self) -> bool:
return await to_thread.run_sync(self._path.is_socket, cancellable=True)
async def is_symlink(self) -> bool:
return await to_thread.run_sync(self._path.is_symlink, cancellable=True)
def iterdir(self) -> AsyncIterator['Path']:
gen = self._path.iterdir()
return _PathIterator(gen)
def joinpath(self, *args: Union[str, 'PathLike[str]']) -> 'Path':
return Path(self._path.joinpath(*args))
async def lchmod(self, mode: int) -> None:
await to_thread.run_sync(self._path.lchmod, mode)
async def lstat(self) -> os.stat_result:
return await to_thread.run_sync(self._path.lstat, cancellable=True)
async def mkdir(self, mode: int = 0o777, parents: bool = False,
exist_ok: bool = False) -> None:
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
@overload
async def open(self, mode: OpenBinaryMode, buffering: int = ..., encoding: Optional[str] = ...,
errors: Optional[str] = ..., newline: Optional[str] = ...) -> AsyncFile[bytes]:
...
@overload
async def open(self, mode: OpenTextMode = ..., buffering: int = ...,
encoding: Optional[str] = ..., errors: Optional[str] = ...,
newline: Optional[str] = ...) -> AsyncFile[str]:
...
async def open(self, mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None,
errors: Optional[str] = None, newline: Optional[str] = None) -> AsyncFile[Any]:
fp = await to_thread.run_sync(self._path.open, mode, buffering, encoding, errors, newline)
return AsyncFile(fp)
async def owner(self) -> str:
return await to_thread.run_sync(self._path.owner, cancellable=True)
async def read_bytes(self) -> bytes:
return await to_thread.run_sync(self._path.read_bytes)
async def read_text(self, encoding: Optional[str] = None, errors: Optional[str] = None) -> str:
return await to_thread.run_sync(self._path.read_text, encoding, errors)
def relative_to(self, *other: Union[str, 'PathLike[str]']) -> 'Path':
return Path(self._path.relative_to(*other))
async def readlink(self) -> 'Path':
target = await to_thread.run_sync(os.readlink, self._path)
return Path(cast(str, target))
async def rename(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path':
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.rename, target)
return Path(target)
async def replace(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path':
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.replace, target)
return Path(target)
async def resolve(self, strict: bool = False) -> 'Path':
func = partial(self._path.resolve, strict=strict)
return Path(await to_thread.run_sync(func, cancellable=True))
def rglob(self, pattern: str) -> AsyncIterator['Path']:
gen = self._path.rglob(pattern)
return _PathIterator(gen)
async def rmdir(self) -> None:
await to_thread.run_sync(self._path.rmdir)
async def samefile(self, other_path: Union[str, bytes, int, pathlib.Path, 'Path']) -> bool:
if isinstance(other_path, Path):
other_path = other_path._path
return await to_thread.run_sync(self._path.samefile, other_path, cancellable=True)
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
func = partial(os.stat, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, cancellable=True)
async def symlink_to(self, target: Union[str, pathlib.Path, 'Path'],
target_is_directory: bool = False) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
await to_thread.run_sync(self._path.touch, mode, exist_ok)
async def unlink(self, missing_ok: bool = False) -> None:
try:
await to_thread.run_sync(self._path.unlink)
except FileNotFoundError:
if not missing_ok:
raise
def with_name(self, name: str) -> 'Path':
return Path(self._path.with_name(name))
def with_stem(self, stem: str) -> 'Path':
return Path(self._path.with_name(stem + self._path.suffix))
def with_suffix(self, suffix: str) -> 'Path':
return Path(self._path.with_suffix(suffix))
async def write_bytes(self, data: bytes) -> int:
return await to_thread.run_sync(self._path.write_bytes, data)
async def write_text(self, data: str, encoding: Optional[str] = None,
errors: Optional[str] = None, newline: Optional[str] = None) -> int:
# Path.write_text() does not support the "newline" parameter before Python 3.10
def sync_write_text() -> int:
with self._path.open('w', encoding=encoding, errors=errors, newline=newline) as fp:
return fp.write(data)
return await to_thread.run_sync(sync_write_text)
PathLike.register(Path)

View file

@ -0,0 +1,16 @@
from ..abc import AsyncResource
from ._tasks import CancelScope
async def aclose_forcefully(resource: AsyncResource) -> None:
"""
Close an asynchronous resource in a cancelled scope.
Doing this closes the resource without waiting on anything.
:param resource: the resource to close
"""
with CancelScope() as scope:
scope.cancel()
await resource.aclose()

View file

@ -0,0 +1,22 @@
from typing import AsyncIterator
from ._compat import DeprecatedAsyncContextManager
from ._eventloop import get_asynclib
def open_signal_receiver(*signals: int) -> DeprecatedAsyncContextManager[AsyncIterator[int]]:
"""
Start receiving operating system signals.
:param signals: signals to receive (e.g. ``signal.SIGINT``)
:return: an asynchronous context manager for an asynchronous iterator which yields signal
numbers
.. warning:: Windows does not support signals natively so it is best to avoid relying on this
in cross-platform applications.
.. warning:: On asyncio, this permanently replaces any previous signal handler for the given
signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
"""
return get_asynclib().open_signal_receiver(*signals)

View file

@ -0,0 +1,506 @@
import socket
import ssl
import sys
from ipaddress import IPv6Address, ip_address
from os import PathLike, chmod
from pathlib import Path
from socket import AddressFamily, SocketKind
from typing import Awaitable, List, Optional, Tuple, Union, cast, overload
from .. import to_thread
from ..abc import (
ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketListener, SocketStream, UDPSocket,
UNIXSocketStream)
from ..streams.stapled import MultiListener
from ..streams.tls import TLSStream
from ._eventloop import get_asynclib
from ._resources import aclose_forcefully
from ._synchronization import Event
from ._tasks import create_task_group, move_on_after
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
IPPROTO_IPV6 = getattr(socket, 'IPPROTO_IPV6', 41) # https://bugs.python.org/issue29515
GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]]
AnyIPAddressFamily = Literal[AddressFamily.AF_UNSPEC, AddressFamily.AF_INET,
AddressFamily.AF_INET6]
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
# tls_hostname given
@overload
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ...,
ssl_context: Optional[ssl.SSLContext] = ..., tls_standard_compatible: bool = ...,
tls_hostname: str, happy_eyeballs_delay: float = ...
) -> TLSStream:
...
# ssl_context given
@overload
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ...,
ssl_context: ssl.SSLContext, tls_standard_compatible: bool = ...,
tls_hostname: Optional[str] = ..., happy_eyeballs_delay: float = ...
) -> TLSStream:
...
# tls=True
@overload
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ...,
tls: Literal[True], ssl_context: Optional[ssl.SSLContext] = ...,
tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ...,
happy_eyeballs_delay: float = ...
) -> TLSStream:
...
# tls=False
@overload
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ...,
tls: Literal[False], ssl_context: Optional[ssl.SSLContext] = ...,
tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ...,
happy_eyeballs_delay: float = ...
) -> SocketStream:
...
# No TLS arguments
@overload
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ...,
happy_eyeballs_delay: float = ...
) -> SocketStream:
...
async def connect_tcp(
remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = None,
tls: bool = False, ssl_context: Optional[ssl.SSLContext] = None,
tls_standard_compatible: bool = True, tls_hostname: Optional[str] = None,
happy_eyeballs_delay: float = 0.25
) -> Union[SocketStream, TLSStream]:
"""
Connect to a host using the TCP protocol.
This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555).
If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until
one connection attempt succeeds. If the first attempt does not connected within 250
milliseconds, a second attempt is started using the next address in the list, and so on.
On IPv6 enabled systems, an IPv6 address (if available) is tried first.
When the connection has been established, a TLS handshake will be done if either
``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
:param remote_host: the IP address or host name to connect to
:param remote_port: port on the target host to connect to
:param local_host: the interface address or name to bind the socket to before connecting
:param tls: ``True`` to do a TLS handshake with the connected stream and return a
:class:`~anyio.streams.tls.TLSStream` instead
:param ssl_context: the SSL context object to use (if omitted, a default context is created)
:param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing
the stream and requires that the server does this as well. Otherwise,
:exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
Some protocols, such as HTTP, require this option to be ``False``.
See :meth:`~ssl.SSLContext.wrap_socket` for details.
:param tls_hostname: host name to check the server certificate against (defaults to the value
of ``remote_host``)
:param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt
:return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
:raises OSError: if the connection attempt fails
"""
# Placed here due to https://github.com/python/mypy/issues/7057
connected_stream: Optional[SocketStream] = None
async def try_connect(remote_host: str, event: Event) -> None:
nonlocal connected_stream
try:
stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
except OSError as exc:
oserrors.append(exc)
return
else:
if connected_stream is None:
connected_stream = stream
tg.cancel_scope.cancel()
else:
await stream.aclose()
finally:
event.set()
asynclib = get_asynclib()
local_address: Optional[IPSockAddrType] = None
family = socket.AF_UNSPEC
if local_host:
gai_res = await getaddrinfo(str(local_host), None)
family, *_, local_address = gai_res[0]
target_host = str(remote_host)
try:
addr_obj = ip_address(remote_host)
except ValueError:
# getaddrinfo() will raise an exception if name resolution fails
gai_res = await getaddrinfo(target_host, remote_port, family=family,
type=socket.SOCK_STREAM)
# Organize the list so that the first address is an IPv6 address (if available) and the
# second one is an IPv4 addresses. The rest can be in whatever order.
v6_found = v4_found = False
target_addrs: List[Tuple[socket.AddressFamily, str]] = []
for af, *rest, sa in gai_res:
if af == socket.AF_INET6 and not v6_found:
v6_found = True
target_addrs.insert(0, (af, sa[0]))
elif af == socket.AF_INET and not v4_found and v6_found:
v4_found = True
target_addrs.insert(1, (af, sa[0]))
else:
target_addrs.append((af, sa[0]))
else:
if isinstance(addr_obj, IPv6Address):
target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
else:
target_addrs = [(socket.AF_INET, addr_obj.compressed)]
oserrors: List[OSError] = []
async with create_task_group() as tg:
for i, (af, addr) in enumerate(target_addrs):
event = Event()
tg.start_soon(try_connect, addr, event)
with move_on_after(happy_eyeballs_delay):
await event.wait()
if connected_stream is None:
cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors)
raise OSError('All connection attempts failed') from cause
if tls or tls_hostname or ssl_context:
try:
return await TLSStream.wrap(connected_stream, server_side=False,
hostname=tls_hostname or str(remote_host),
ssl_context=ssl_context,
standard_compatible=tls_standard_compatible)
except BaseException:
await aclose_forcefully(connected_stream)
raise
return connected_stream
async def connect_unix(path: Union[str, 'PathLike[str]']) -> UNIXSocketStream:
"""
Connect to the given UNIX socket.
Not available on Windows.
:param path: path to the socket
:return: a socket stream object
"""
path = str(Path(path))
return await get_asynclib().connect_unix(path)
async def create_tcp_listener(
*, local_host: Optional[IPAddressType] = None, local_port: int = 0,
family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, backlog: int = 65536,
reuse_port: bool = False
) -> MultiListener[SocketStream]:
"""
Create a TCP socket listener.
:param local_port: port number to listen on
:param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4
and IPv6 interfaces. To listen on all interfaces on a specific address family, use
``0.0.0.0`` for IPv4 or ``::`` for IPv6.
:param family: address family (used if ``interface`` was omitted)
:param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
65536)
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a list of listener objects
"""
asynclib = get_asynclib()
backlog = min(backlog, 65536)
local_host = str(local_host) if local_host is not None else None
gai_res = await getaddrinfo(local_host, local_port, family=family, # type: ignore[arg-type]
type=socket.SOCK_STREAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG)
listeners: List[SocketListener] = []
try:
# The set() is here to work around a glibc bug:
# https://sourceware.org/bugzilla/show_bug.cgi?id=14969
for fam, *_, sockaddr in sorted(set(gai_res)):
raw_socket = socket.socket(fam)
raw_socket.setblocking(False)
# For Windows, enable exclusive address use. For others, enable address reuse.
if sys.platform == 'win32':
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
else:
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if reuse_port:
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
# If only IPv6 was requested, disable dual stack operation
if fam == socket.AF_INET6:
raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
raw_socket.bind(sockaddr)
raw_socket.listen(backlog)
listener = asynclib.TCPSocketListener(raw_socket)
listeners.append(listener)
except BaseException:
for listener in listeners:
await listener.aclose()
raise
return MultiListener(listeners)
async def create_unix_listener(
path: Union[str, 'PathLike[str]'], *, mode: Optional[int] = None,
backlog: int = 65536) -> SocketListener:
"""
Create a UNIX socket listener.
Not available on Windows.
:param path: path of the socket
:param mode: permissions to set on the socket
:param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
65536)
:return: a listener object
.. versionchanged:: 3.0
If a socket already exists on the file system in the given path, it will be removed first.
"""
path_str = str(path)
path = Path(path)
if path.is_socket():
path.unlink()
backlog = min(backlog, 65536)
raw_socket = socket.socket(socket.AF_UNIX)
raw_socket.setblocking(False)
try:
await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True)
if mode is not None:
await to_thread.run_sync(chmod, path_str, mode, cancellable=True)
raw_socket.listen(backlog)
return get_asynclib().UNIXSocketListener(raw_socket)
except BaseException:
raw_socket.close()
raise
async def create_udp_socket(
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, *,
local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False
) -> UDPSocket:
"""
Create a UDP socket.
If ``port`` has been given, the socket will be bound to this port on the local machine,
making this socket suitable for providing UDP based services.
:param family: address family (``AF_INET`` or ``AF_INET6``) automatically determined from
``local_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a UDP socket
"""
if family is AddressFamily.AF_UNSPEC and not local_host:
raise ValueError('Either "family" or "local_host" must be given')
if local_host:
gai_res = await getaddrinfo(str(local_host), local_port, family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
elif family is AddressFamily.AF_INET6:
local_address = ('::', 0)
else:
local_address = ('0.0.0.0', 0)
return await get_asynclib().create_udp_socket(family, local_address, None, reuse_port)
async def create_connected_udp_socket(
remote_host: IPAddressType, remote_port: int, *,
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False
) -> ConnectedUDPSocket:
"""
Create a connected UDP socket.
Connected UDP sockets can only communicate with the specified remote host/port, and any packets
sent from other sources are dropped.
:param remote_host: remote host to set as the default target
:param remote_port: port on the remote host to set as the default target
:param family: address family (``AF_INET`` or ``AF_INET6``) automatically determined from
``local_host`` or ``remote_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a connected UDP socket
"""
local_address = None
if local_host:
gai_res = await getaddrinfo(str(local_host), local_port, family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
gai_res = await getaddrinfo(str(remote_host), remote_port, family=family,
type=socket.SOCK_DGRAM)
family = cast(AnyIPAddressFamily, gai_res[0][0])
remote_address = gai_res[0][-1]
return await get_asynclib().create_udp_socket(family, local_address, remote_address,
reuse_port)
async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *,
family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0,
proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType:
"""
Look up a numeric IP address given a host name.
Internationalized domain names are translated according to the (non-transitional) IDNA 2008
standard.
.. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
(host, port), unlike what :func:`socket.getaddrinfo` does.
:param host: host name
:param port: port number
:param family: socket family (`'AF_INET``, ...)
:param type: socket type (``SOCK_STREAM``, ...)
:param proto: protocol number
:param flags: flags to pass to upstream ``getaddrinfo()``
:return: list of tuples containing (family, type, proto, canonname, sockaddr)
.. seealso:: :func:`socket.getaddrinfo`
"""
# Handle unicode hostnames
if isinstance(host, str):
try:
encoded_host = host.encode('ascii')
except UnicodeEncodeError:
import idna
encoded_host = idna.encode(host, uts46=True)
else:
encoded_host = host
gai_res = await get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type,
proto=proto, flags=flags)
return [(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
for family, type, proto, canonname, sockaddr in gai_res]
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]:
"""
Look up the host name of an IP address.
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
:param flags: flags to pass to upstream ``getnameinfo()``
:return: a tuple of (host name, service name)
.. seealso:: :func:`socket.getnameinfo`
"""
return get_asynclib().getnameinfo(sockaddr, flags)
def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
"""
Wait until the given socket has data to be read.
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
(default on py3.8+).
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become readable
"""
return get_asynclib().wait_socket_readable(sock)
def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
"""
Wait until the given socket can be written to.
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
(default on py3.8+).
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become writable
"""
return get_asynclib().wait_socket_writable(sock)
#
# Private API
#
def convert_ipv6_sockaddr(
sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]]
) -> Tuple[str, int]:
"""
Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
If the scope ID is nonzero, it is added to the address, separated with ``%``.
Otherwise the flow id and scope id are simply cut off from the tuple.
Any other kinds of socket addresses are returned as-is.
:param sockaddr: the result of :meth:`~socket.socket.getsockname`
:return: the converted socket address
"""
# This is more complicated than it should be because of MyPy
if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr)
if scope_id:
# Add scope_id to the address
return f"{host}%{scope_id}", port
else:
return host, port
else:
return cast(Tuple[str, int], sockaddr)

View file

@ -0,0 +1,42 @@
import math
from typing import Any, Optional, Tuple, Type, TypeVar, overload
from ..streams.memory import (
MemoryObjectReceiveStream, MemoryObjectSendStream, MemoryObjectStreamState)
T_Item = TypeVar('T_Item')
@overload
def create_memory_object_stream(
max_buffer_size: float, item_type: Type[T_Item]
) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
...
@overload
def create_memory_object_stream(
max_buffer_size: float = 0
) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
...
def create_memory_object_stream(
max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None
) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
"""
Create a memory object stream.
:param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking
:param item_type: type of item, for marking the streams with the right generic type for
static typing (not used at run time)
:return: a tuple of (send stream, receive stream)
"""
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
raise ValueError('max_buffer_size must be either an integer or math.inf')
if max_buffer_size < 0:
raise ValueError('max_buffer_size cannot be negative')
state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)
return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)

View file

@ -0,0 +1,99 @@
from io import BytesIO
from os import PathLike
from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
from typing import AsyncIterable, List, Mapping, Optional, Sequence, Union, cast
from ..abc import Process
from ._eventloop import get_asynclib
from ._tasks import create_task_group
async def run_process(command: Union[str, Sequence[str]], *, input: Optional[bytes] = None,
stdout: int = PIPE, stderr: int = PIPE, check: bool = True,
cwd: Union[str, bytes, 'PathLike[str]', None] = None,
env: Optional[Mapping[str, str]] = None, start_new_session: bool = False,
) -> 'CompletedProcess[bytes]':
"""
Run an external command in a subprocess and wait until it completes.
.. seealso:: :func:`subprocess.run`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param input: bytes passed to the standard input of the subprocess
:param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
:data:`subprocess.STDOUT`
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process
terminates with a return code other than 0
:param cwd: If not ``None``, change the working directory to this before running the command
:param env: if not ``None``, this mapping replaces the inherited environment variables from the
parent process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an object representing the completed process
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a
nonzero return code
"""
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
buffer = BytesIO()
async for chunk in stream:
buffer.write(chunk)
stream_contents[index] = buffer.getvalue()
async with await open_process(command, stdin=PIPE if input else DEVNULL, stdout=stdout,
stderr=stderr, cwd=cwd, env=env,
start_new_session=start_new_session) as process:
stream_contents: List[Optional[bytes]] = [None, None]
try:
async with create_task_group() as tg:
if process.stdout:
tg.start_soon(drain_stream, process.stdout, 0)
if process.stderr:
tg.start_soon(drain_stream, process.stderr, 1)
if process.stdin and input:
await process.stdin.send(input)
await process.stdin.aclose()
await process.wait()
except BaseException:
process.kill()
raise
output, errors = stream_contents
if check and process.returncode != 0:
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
return CompletedProcess(command, cast(int, process.returncode), output, errors)
async def open_process(command: Union[str, Sequence[str]], *, stdin: int = PIPE,
stdout: int = PIPE, stderr: int = PIPE,
cwd: Union[str, bytes, 'PathLike[str]', None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False) -> Process:
"""
Start an external command in a subprocess.
.. seealso:: :class:`subprocess.Popen`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param stdin: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
:param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
:data:`subprocess.STDOUT`
:param cwd: If not ``None``, the working directory is changed before executing
:param env: If env is not ``None``, it must be a mapping that defines the environment
variables for the new process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an asynchronous process object
"""
shell = isinstance(command, str)
return await get_asynclib().open_process(command, shell=shell, stdin=stdin, stdout=stdout,
stderr=stderr, cwd=cwd, env=env,
start_new_session=start_new_session)

View file

@ -0,0 +1,566 @@
from collections import deque
from dataclasses import dataclass
from types import TracebackType
from typing import Deque, Optional, Tuple, Type
from warnings import warn
from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled
from ._compat import DeprecatedAwaitable
from ._eventloop import get_asynclib
from ._exceptions import BusyResourceError, WouldBlock
from ._tasks import CancelScope
from ._testing import TaskInfo, get_current_task
@dataclass(frozen=True)
class EventStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
"""
tasks_waiting: int
@dataclass(frozen=True)
class CapacityLimiterStatistics:
"""
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
:ivar float total_tokens: total number of available tokens
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this
limiter
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
"""
borrowed_tokens: int
total_tokens: float
borrowers: Tuple[object, ...]
tasks_waiting: int
@dataclass(frozen=True)
class LockStatistics:
"""
:ivar bool locked: flag indicating if this lock is locked or not
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not
held by any task)
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
"""
locked: bool
owner: Optional[TaskInfo]
tasks_waiting: int
@dataclass(frozen=True)
class ConditionStatistics:
"""
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock`
"""
tasks_waiting: int
lock_statistics: LockStatistics
@dataclass(frozen=True)
class SemaphoreStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
"""
tasks_waiting: int
class Event:
def __new__(cls) -> 'Event':
return get_asynclib().Event()
def set(self) -> DeprecatedAwaitable:
"""Set the flag, notifying all listeners."""
raise NotImplementedError
def is_set(self) -> bool:
"""Return ``True`` if the flag is set, ``False`` if not."""
raise NotImplementedError
async def wait(self) -> None:
"""
Wait until the flag has been set.
If the flag has already been set when this method is called, it returns immediately.
"""
raise NotImplementedError
def statistics(self) -> EventStatistics:
"""Return statistics about the current state of this event."""
raise NotImplementedError
class Lock:
_owner_task: Optional[TaskInfo] = None
def __init__(self) -> None:
self._waiters: Deque[Tuple[TaskInfo, Event]] = deque()
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
self.release()
async def acquire(self) -> None:
"""Acquire the lock."""
await checkpoint_if_cancelled()
try:
self.acquire_nowait()
except WouldBlock:
task = get_current_task()
event = Event()
token = task, event
self._waiters.append(token)
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(token)
elif self._owner_task == task:
self.release()
raise
assert self._owner_task == task
else:
try:
await cancel_shielded_checkpoint()
except BaseException:
self.release()
raise
def acquire_nowait(self) -> None:
"""
Acquire the lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
task = get_current_task()
if self._owner_task == task:
raise RuntimeError('Attempted to acquire an already held Lock')
if self._owner_task is not None:
raise WouldBlock
self._owner_task = task
def release(self) -> DeprecatedAwaitable:
"""Release the lock."""
if self._owner_task != get_current_task():
raise RuntimeError('The current task is not holding this lock')
if self._waiters:
self._owner_task, event = self._waiters.popleft()
event.set()
else:
del self._owner_task
return DeprecatedAwaitable(self.release)
def locked(self) -> bool:
"""Return True if the lock is currently held."""
return self._owner_task is not None
def statistics(self) -> LockStatistics:
"""
Return statistics about the current state of this lock.
.. versionadded:: 3.0
"""
return LockStatistics(self.locked(), self._owner_task, len(self._waiters))
class Condition:
_owner_task: Optional[TaskInfo] = None
def __init__(self, lock: Optional[Lock] = None):
self._lock = lock or Lock()
self._waiters: Deque[Event] = deque()
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
self.release()
def _check_acquired(self) -> None:
if self._owner_task != get_current_task():
raise RuntimeError('The current task is not holding the underlying lock')
async def acquire(self) -> None:
"""Acquire the underlying lock."""
await self._lock.acquire()
self._owner_task = get_current_task()
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
self._lock.acquire_nowait()
self._owner_task = get_current_task()
def release(self) -> DeprecatedAwaitable:
"""Release the underlying lock."""
self._lock.release()
return DeprecatedAwaitable(self.release)
def locked(self) -> bool:
"""Return True if the lock is set."""
return self._lock.locked()
def notify(self, n: int = 1) -> None:
"""Notify exactly n listeners."""
self._check_acquired()
for _ in range(n):
try:
event = self._waiters.popleft()
except IndexError:
break
event.set()
def notify_all(self) -> None:
"""Notify all the listeners."""
self._check_acquired()
for event in self._waiters:
event.set()
self._waiters.clear()
async def wait(self) -> None:
"""Wait for a notification."""
await checkpoint()
event = Event()
self._waiters.append(event)
self.release()
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(event)
raise
finally:
with CancelScope(shield=True):
await self.acquire()
def statistics(self) -> ConditionStatistics:
"""
Return statistics about the current state of this condition.
.. versionadded:: 3.0
"""
return ConditionStatistics(len(self._waiters), self._lock.statistics())
class Semaphore:
def __init__(self, initial_value: int, *, max_value: Optional[int] = None):
if not isinstance(initial_value, int):
raise TypeError('initial_value must be an integer')
if initial_value < 0:
raise ValueError('initial_value must be >= 0')
if max_value is not None:
if not isinstance(max_value, int):
raise TypeError('max_value must be an integer or None')
if max_value < initial_value:
raise ValueError('max_value must be equal to or higher than initial_value')
self._value = initial_value
self._max_value = max_value
self._waiters: Deque[Event] = deque()
async def __aenter__(self) -> 'Semaphore':
await self.acquire()
return self
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
self.release()
async def acquire(self) -> None:
"""Decrement the semaphore value, blocking if necessary."""
await checkpoint_if_cancelled()
try:
self.acquire_nowait()
except WouldBlock:
event = Event()
self._waiters.append(event)
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(event)
else:
self.release()
raise
else:
try:
await cancel_shielded_checkpoint()
except BaseException:
self.release()
raise
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
if self._value == 0:
raise WouldBlock
self._value -= 1
def release(self) -> DeprecatedAwaitable:
"""Increment the semaphore value."""
if self._max_value is not None and self._value == self._max_value:
raise ValueError('semaphore released too many times')
if self._waiters:
self._waiters.popleft().set()
else:
self._value += 1
return DeprecatedAwaitable(self.release)
@property
def value(self) -> int:
"""The current value of the semaphore."""
return self._value
@property
def max_value(self) -> Optional[int]:
"""The maximum value of the semaphore."""
return self._max_value
def statistics(self) -> SemaphoreStatistics:
"""
Return statistics about the current state of this semaphore.
.. versionadded:: 3.0
"""
return SemaphoreStatistics(len(self._waiters))
class CapacityLimiter:
def __new__(cls, total_tokens: float) -> 'CapacityLimiter':
return get_asynclib().CapacityLimiter(total_tokens)
async def __aenter__(self) -> None:
raise NotImplementedError
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
raise NotImplementedError
@property
def total_tokens(self) -> float:
"""
The total number of tokens available for borrowing.
This is a read-write property. If the total number of tokens is increased, the
proportionate number of tasks waiting on this limiter will be granted their tokens.
.. versionchanged:: 3.0
The property is now writable.
"""
raise NotImplementedError
@total_tokens.setter
def total_tokens(self, value: float) -> None:
raise NotImplementedError
async def set_total_tokens(self, value: float) -> None:
warn('CapacityLimiter.set_total_tokens has been deprecated. Set the value of the'
'"total_tokens" attribute directly.', DeprecationWarning)
self.total_tokens = value
@property
def borrowed_tokens(self) -> int:
"""The number of tokens that have currently been borrowed."""
raise NotImplementedError
@property
def available_tokens(self) -> float:
"""The number of tokens currently available to be borrowed"""
raise NotImplementedError
def acquire_nowait(self) -> DeprecatedAwaitable:
"""
Acquire a token for the current task without waiting for one to become available.
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
"""
Acquire a token without waiting for one to become available.
:param borrower: the entity borrowing a token
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
async def acquire(self) -> None:
"""
Acquire a token for the current task, waiting if necessary for one to become available.
"""
raise NotImplementedError
async def acquire_on_behalf_of(self, borrower: object) -> None:
"""
Acquire a token, waiting if necessary for one to become available.
:param borrower: the entity borrowing a token
"""
raise NotImplementedError
def release(self) -> None:
"""
Release the token held by the current task.
:raises RuntimeError: if the current task has not borrowed a token from this limiter.
"""
raise NotImplementedError
def release_on_behalf_of(self, borrower: object) -> None:
"""
Release the token held by the given borrower.
:raises RuntimeError: if the borrower has not borrowed a token from this limiter.
"""
raise NotImplementedError
def statistics(self) -> CapacityLimiterStatistics:
"""
Return statistics about the current state of this limiter.
.. versionadded:: 3.0
"""
raise NotImplementedError
def create_lock() -> Lock:
"""
Create an asynchronous lock.
:return: a lock object
.. deprecated:: 3.0
Use :class:`~Lock` directly.
"""
warn('create_lock() is deprecated -- use Lock() directly', DeprecationWarning)
return Lock()
def create_condition(lock: Optional[Lock] = None) -> Condition:
"""
Create an asynchronous condition.
:param lock: the lock to base the condition object on
:return: a condition object
.. deprecated:: 3.0
Use :class:`~Condition` directly.
"""
warn('create_condition() is deprecated -- use Condition() directly', DeprecationWarning)
return Condition(lock=lock)
def create_event() -> Event:
"""
Create an asynchronous event object.
:return: an event object
.. deprecated:: 3.0
Use :class:`~Event` directly.
"""
warn('create_event() is deprecated -- use Event() directly', DeprecationWarning)
return get_asynclib().Event()
def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore:
"""
Create an asynchronous semaphore.
:param value: the semaphore's initial value
:param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the
semaphore's value would exceed this number
:return: a semaphore object
.. deprecated:: 3.0
Use :class:`~Semaphore` directly.
"""
warn('create_semaphore() is deprecated -- use Semaphore() directly', DeprecationWarning)
return Semaphore(value, max_value=max_value)
def create_capacity_limiter(total_tokens: float) -> CapacityLimiter:
"""
Create a capacity limiter.
:param total_tokens: the total number of tokens available for borrowing (can be an integer or
:data:`math.inf`)
:return: a capacity limiter object
.. deprecated:: 3.0
Use :class:`~CapacityLimiter` directly.
"""
warn('create_capacity_limiter() is deprecated -- use CapacityLimiter() directly',
DeprecationWarning)
return get_asynclib().CapacityLimiter(total_tokens)
class ResourceGuard:
__slots__ = 'action', '_guarded'
def __init__(self, action: str):
self.action = action
self._guarded = False
def __enter__(self) -> None:
if self._guarded:
raise BusyResourceError(self.action)
self._guarded = True
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
self._guarded = False
return None

View file

@ -0,0 +1,158 @@
import math
from types import TracebackType
from typing import Optional, Type
from warnings import warn
from ..abc._tasks import TaskGroup, TaskStatus
from ._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, DeprecatedAwaitableFloat
from ._eventloop import get_asynclib
class _IgnoredTaskStatus(TaskStatus):
def started(self, value: object = None) -> None:
pass
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
class CancelScope(DeprecatedAsyncContextManager['CancelScope']):
"""
Wraps a unit of work that can be made separately cancellable.
:param deadline: The time (clock value) when this scope is cancelled automatically
:param shield: ``True`` to shield the cancel scope from external cancellation
"""
def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> 'CancelScope':
return get_asynclib().CancelScope(shield=shield, deadline=deadline)
def cancel(self) -> DeprecatedAwaitable:
"""Cancel this scope immediately."""
raise NotImplementedError
@property
def deadline(self) -> float:
"""
The time (clock value) when this scope is cancelled automatically.
Will be ``float('inf')`` if no timeout has been set.
"""
raise NotImplementedError
@deadline.setter
def deadline(self, value: float) -> None:
raise NotImplementedError
@property
def cancel_called(self) -> bool:
"""``True`` if :meth:`cancel` has been called."""
raise NotImplementedError
@property
def shield(self) -> bool:
"""
``True`` if this scope is shielded from external cancellation.
While a scope is shielded, it will not receive cancellations from outside.
"""
raise NotImplementedError
@shield.setter
def shield(self, value: bool) -> None:
raise NotImplementedError
def __enter__(self) -> 'CancelScope':
raise NotImplementedError
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
raise NotImplementedError
def open_cancel_scope(*, shield: bool = False) -> CancelScope:
"""
Open a cancel scope.
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
.. deprecated:: 3.0
Use :class:`~CancelScope` directly.
"""
warn('open_cancel_scope() is deprecated -- use CancelScope() directly', DeprecationWarning)
return get_asynclib().CancelScope(shield=shield)
class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]):
def __init__(self, cancel_scope: CancelScope):
self._cancel_scope = cancel_scope
def __enter__(self) -> CancelScope:
return self._cancel_scope.__enter__()
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb)
if self._cancel_scope.cancel_called:
raise TimeoutError
return retval
def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager:
"""
Create a context manager which raises a :class:`TimeoutError` if does not finish in time.
:param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to
disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a context manager that yields a cancel scope
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\]
"""
deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf
cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield)
return FailAfterContextManager(cancel_scope)
def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope:
"""
Create a cancel scope with a deadline that expires after the given delay.
:param delay: maximum allowed time (in seconds) before exiting the context block, or ``None``
to disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
"""
deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf
return get_asynclib().CancelScope(deadline=deadline, shield=shield)
def current_effective_deadline() -> DeprecatedAwaitableFloat:
"""
Return the nearest deadline among all the cancel scopes effective for the current task.
:return: a clock value from the event loop's internal clock (``float('inf')`` if there is no
deadline in effect)
:rtype: float
"""
return DeprecatedAwaitableFloat(get_asynclib().current_effective_deadline(),
current_effective_deadline)
def create_task_group() -> 'TaskGroup':
"""
Create a task group.
:return: a task group
"""
return get_asynclib().TaskGroup()

View file

@ -0,0 +1,75 @@
from typing import Any, Awaitable, Generator, Optional, Union
from ._compat import DeprecatedAwaitableList, _warn_deprecation
from ._eventloop import get_asynclib
class TaskInfo:
"""
Represents an asynchronous task.
:ivar int id: the unique identifier of the task
:ivar parent_id: the identifier of the parent task, if any
:vartype parent_id: Optional[int]
:ivar str name: the description of the task (if any)
:ivar ~collections.abc.Coroutine coro: the coroutine object of the task
"""
__slots__ = '_name', 'id', 'parent_id', 'name', 'coro'
def __init__(self, id: int, parent_id: Optional[int], name: Optional[str],
coro: Union[Generator, Awaitable[Any]]):
func = get_current_task
self._name = f'{func.__module__}.{func.__qualname__}'
self.id: int = id
self.parent_id: Optional[int] = parent_id
self.name: Optional[str] = name
self.coro: Union[Generator, Awaitable[Any]] = coro
def __eq__(self, other: object) -> bool:
if isinstance(other, TaskInfo):
return self.id == other.id
return NotImplemented
def __hash__(self) -> int:
return hash(self.id)
def __repr__(self) -> str:
return f'{self.__class__.__name__}(id={self.id!r}, name={self.name!r})'
def __await__(self) -> Generator[None, None, "TaskInfo"]:
_warn_deprecation(self)
if False:
yield
return self
def _unwrap(self) -> 'TaskInfo':
return self
def get_current_task() -> TaskInfo:
"""
Return the current task.
:return: a representation of the current task
"""
return get_asynclib().get_current_task()
def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]:
"""
Return a list of running tasks in the current event loop.
:return: a list of task info objects
"""
tasks = get_asynclib().get_running_tasks()
return DeprecatedAwaitableList(tasks, func=get_running_tasks)
async def wait_all_tasks_blocked() -> None:
"""Wait until all other tasks are waiting for something."""
await get_asynclib().wait_all_tasks_blocked()

View file

@ -0,0 +1,79 @@
import sys
from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload
from ._exceptions import TypedAttributeLookupError
if sys.version_info >= (3, 8):
from typing import final
else:
from typing_extensions import final
T_Attr = TypeVar('T_Attr')
T_Default = TypeVar('T_Default')
undefined = object()
def typed_attribute() -> Any:
"""Return a unique object, used to mark typed attributes."""
return object()
class TypedAttributeSet:
"""
Superclass for typed attribute collections.
Checks that every public attribute of every subclass has a type annotation.
"""
def __init_subclass__(cls) -> None:
annotations: Dict[str, Any] = getattr(cls, '__annotations__', {})
for attrname in dir(cls):
if not attrname.startswith('_') and attrname not in annotations:
raise TypeError(f'Attribute {attrname!r} is missing its type annotation')
super().__init_subclass__()
class TypedAttributeProvider:
"""Base class for classes that wish to provide typed extra attributes."""
@property
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
"""
A mapping of the extra attributes to callables that return the corresponding values.
If the provider wraps another provider, the attributes from that wrapper should also be
included in the returned mapping (but the wrapper may override the callables from the
wrapped instance).
"""
return {}
@overload
def extra(self, attribute: T_Attr) -> T_Attr:
...
@overload
def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]:
...
@final
def extra(self, attribute: Any, default: object = undefined) -> object:
"""
extra(attribute, default=undefined)
Return the value of the given typed extra attribute.
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for
:param default: the value that should be returned if no value is found for the attribute
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was
given
"""
try:
return self.extra_attributes[attribute]()
except KeyError:
if default is undefined:
raise TypedAttributeLookupError('Attribute not found') from None
else:
return default

View file

@ -0,0 +1,37 @@
__all__ = ('AsyncResource', 'IPAddressType', 'IPSockAddrType', 'SocketAttribute', 'SocketStream',
'SocketListener', 'UDPSocket', 'UNIXSocketStream', 'UDPPacketType',
'ConnectedUDPSocket', 'UnreliableObjectReceiveStream', 'UnreliableObjectSendStream',
'UnreliableObjectStream', 'ObjectReceiveStream', 'ObjectSendStream', 'ObjectStream',
'ByteReceiveStream', 'ByteSendStream', 'ByteStream', 'AnyUnreliableByteReceiveStream',
'AnyUnreliableByteSendStream', 'AnyUnreliableByteStream', 'AnyByteReceiveStream',
'AnyByteSendStream', 'AnyByteStream', 'Listener', 'Process', 'Event',
'Condition', 'Lock', 'Semaphore', 'CapacityLimiter', 'CancelScope', 'TaskGroup',
'TaskStatus', 'TestRunner', 'BlockingPortal')
from typing import Any
from ._resources import AsyncResource
from ._sockets import (
ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketAttribute, SocketListener,
SocketStream, UDPPacketType, UDPSocket, UNIXSocketStream)
from ._streams import (
AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, AnyUnreliableByteReceiveStream,
AnyUnreliableByteSendStream, AnyUnreliableByteStream, ByteReceiveStream, ByteSendStream,
ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, ObjectStream,
UnreliableObjectReceiveStream, UnreliableObjectSendStream, UnreliableObjectStream)
from ._subprocesses import Process
from ._tasks import TaskGroup, TaskStatus
from ._testing import TestRunner
# Re-exported here, for backwards compatibility
# isort: off
from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore
from .._core._tasks import CancelScope
from ..from_thread import BlockingPortal
# Re-export imports so they look like they live directly in this package
key: str
value: Any
for key, value in list(locals().items()):
if getattr(value, '__module__', '').startswith('anyio.abc.'):
value.__module__ = __name__

View file

@ -0,0 +1,26 @@
from abc import ABCMeta, abstractmethod
from types import TracebackType
from typing import Optional, Type, TypeVar
T = TypeVar("T")
class AsyncResource(metaclass=ABCMeta):
"""
Abstract base class for all closeable asynchronous resources.
Works as an asynchronous context manager which returns the instance itself on enter, and calls
:meth:`aclose` on exit.
"""
async def __aenter__(self: T) -> T:
return self
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
await self.aclose()
@abstractmethod
async def aclose(self) -> None:
"""Close the resource."""

View file

@ -0,0 +1,156 @@
import socket
from abc import abstractmethod
from io import IOBase
from ipaddress import IPv4Address, IPv6Address
from socket import AddressFamily
from types import TracebackType
from typing import (
Any, AsyncContextManager, Callable, Collection, Dict, List, Mapping, Optional, Tuple, Type,
TypeVar, Union)
from .._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream
from ._tasks import TaskGroup
IPAddressType = Union[str, IPv4Address, IPv6Address]
IPSockAddrType = Tuple[str, int]
SockAddrType = Union[IPSockAddrType, str]
UDPPacketType = Tuple[bytes, IPSockAddrType]
T_Retval = TypeVar('T_Retval')
class _NullAsyncContextManager:
async def __aenter__(self) -> None:
pass
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return None
class SocketAttribute(TypedAttributeSet):
#: the address family of the underlying socket
family: AddressFamily = typed_attribute()
#: the local socket address of the underlying socket
local_address: SockAddrType = typed_attribute()
#: for IP addresses, the local port the underlying socket is bound to
local_port: int = typed_attribute()
#: the underlying stdlib socket object
raw_socket: socket.socket = typed_attribute()
#: the remote address the underlying socket is connected to
remote_address: SockAddrType = typed_attribute()
#: for IP addresses, the remote port the underlying socket is connected to
remote_port: int = typed_attribute()
class _SocketProvider(TypedAttributeProvider):
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
from .._core._sockets import convert_ipv6_sockaddr as convert
attributes: Dict[Any, Callable[[], Any]] = {
SocketAttribute.family: lambda: self._raw_socket.family,
SocketAttribute.local_address: lambda: convert(self._raw_socket.getsockname()),
SocketAttribute.raw_socket: lambda: self._raw_socket
}
try:
peername: Optional[Tuple[str, int]] = convert(self._raw_socket.getpeername())
except OSError:
peername = None
# Provide the remote address for connected sockets
if peername is not None:
attributes[SocketAttribute.remote_address] = lambda: peername
# Provide local and remote ports for IP based sockets
if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6):
attributes[SocketAttribute.local_port] = lambda: self._raw_socket.getsockname()[1]
if peername is not None:
remote_port = peername[1]
attributes[SocketAttribute.remote_port] = lambda: remote_port
return attributes
@property
@abstractmethod
def _raw_socket(self) -> socket.socket:
pass
class SocketStream(ByteStream, _SocketProvider):
"""
Transports bytes over a socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
class UNIXSocketStream(SocketStream):
@abstractmethod
async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None:
"""
Send file descriptors along with a message to the peer.
:param message: a non-empty bytestring
:param fds: a collection of files (either numeric file descriptors or open file or socket
objects)
"""
@abstractmethod
async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]:
"""
Receive file descriptors along with a message from the peer.
:param msglen: length of the message to expect from the peer
:param maxfds: maximum number of file descriptors to expect from the peer
:return: a tuple of (message, file descriptors)
"""
class SocketListener(Listener[SocketStream], _SocketProvider):
"""
Listens to incoming socket connections.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
@abstractmethod
async def accept(self) -> SocketStream:
"""Accept an incoming connection."""
async def serve(self, handler: Callable[[T_Stream], Any],
task_group: Optional[TaskGroup] = None) -> None:
from .. import create_task_group
context_manager: AsyncContextManager
if task_group is None:
task_group = context_manager = create_task_group()
else:
# Can be replaced with AsyncExitStack once on py3.7+
context_manager = _NullAsyncContextManager()
async with context_manager:
while True:
stream = await self.accept()
task_group.start_soon(handler, stream)
class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider):
"""
Represents an unconnected UDP socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
async def sendto(self, data: bytes, host: str, port: int) -> None:
"""Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port)))."""
return await self.send((data, (host, port)))
class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider):
"""
Represents an connected UDP socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""

View file

@ -0,0 +1,187 @@
from abc import abstractmethod
from typing import Any, Callable, Generic, Optional, TypeVar, Union
from .._core._exceptions import EndOfStream
from .._core._typedattr import TypedAttributeProvider
from ._resources import AsyncResource
from ._tasks import TaskGroup
T_Item = TypeVar('T_Item')
T_Stream = TypeVar('T_Stream')
class UnreliableObjectReceiveStream(Generic[T_Item], AsyncResource, TypedAttributeProvider):
"""
An interface for receiving objects.
This interface makes no guarantees that the received messages arrive in the order in which they
were sent, or that no messages are missed.
Asynchronously iterating over objects of this type will yield objects matching the given type
parameter.
"""
def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]":
return self
async def __anext__(self) -> T_Item:
try:
return await self.receive()
except EndOfStream:
raise StopAsyncIteration
@abstractmethod
async def receive(self) -> T_Item:
"""
Receive the next item.
:raises ~anyio.ClosedResourceError: if the receive stream has been explicitly
closed
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
due to external causes
"""
class UnreliableObjectSendStream(Generic[T_Item], AsyncResource, TypedAttributeProvider):
"""
An interface for sending objects.
This interface makes no guarantees that the messages sent will reach the recipient(s) in the
same order in which they were sent, or at all.
"""
@abstractmethod
async def send(self, item: T_Item) -> None:
"""
Send an item to the peer(s).
:param item: the item to send
:raises ~anyio.ClosedResourceError: if the send stream has been explicitly
closed
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
due to external causes
"""
class UnreliableObjectStream(UnreliableObjectReceiveStream[T_Item],
UnreliableObjectSendStream[T_Item]):
"""
A bidirectional message stream which does not guarantee the order or reliability of message
delivery.
"""
class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]):
"""
A receive message stream which guarantees that messages are received in the same order in
which they were sent, and that no messages are missed.
"""
class ObjectSendStream(UnreliableObjectSendStream[T_Item]):
"""
A send message stream which guarantees that messages are delivered in the same order in which
they were sent, without missing any messages in the middle.
"""
class ObjectStream(ObjectReceiveStream[T_Item], ObjectSendStream[T_Item],
UnreliableObjectStream[T_Item]):
"""
A bidirectional message stream which guarantees the order and reliability of message delivery.
"""
@abstractmethod
async def send_eof(self) -> None:
"""
Send an end-of-file indication to the peer.
You should not try to send any further data to this stream after calling this method.
This method is idempotent (does nothing on successive calls).
"""
class ByteReceiveStream(AsyncResource, TypedAttributeProvider):
"""
An interface for receiving bytes from a single peer.
Iterating this byte stream will yield a byte string of arbitrary length, but no more than
65536 bytes.
"""
def __aiter__(self) -> 'ByteReceiveStream':
return self
async def __anext__(self) -> bytes:
try:
return await self.receive()
except EndOfStream:
raise StopAsyncIteration
@abstractmethod
async def receive(self, max_bytes: int = 65536) -> bytes:
"""
Receive at most ``max_bytes`` bytes from the peer.
.. note:: Implementors of this interface should not return an empty :class:`bytes` object,
and users should ignore them.
:param max_bytes: maximum number of bytes to receive
:return: the received bytes
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
"""
class ByteSendStream(AsyncResource, TypedAttributeProvider):
"""An interface for sending bytes to a single peer."""
@abstractmethod
async def send(self, item: bytes) -> None:
"""
Send the given bytes to the peer.
:param item: the bytes to send
"""
class ByteStream(ByteReceiveStream, ByteSendStream):
"""A bidirectional byte stream."""
@abstractmethod
async def send_eof(self) -> None:
"""
Send an end-of-file indication to the peer.
You should not try to send any further data to this stream after calling this method.
This method is idempotent (does nothing on successive calls).
"""
#: Type alias for all unreliable bytes-oriented receive streams.
AnyUnreliableByteReceiveStream = Union[UnreliableObjectReceiveStream[bytes], ByteReceiveStream]
#: Type alias for all unreliable bytes-oriented send streams.
AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream]
#: Type alias for all unreliable bytes-oriented streams.
AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream]
#: Type alias for all bytes-oriented receive streams.
AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream]
#: Type alias for all bytes-oriented send streams.
AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream]
#: Type alias for all bytes-oriented streams.
AnyByteStream = Union[ObjectStream[bytes], ByteStream]
class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider):
"""An interface for objects that let you accept incoming connections."""
@abstractmethod
async def serve(self, handler: Callable[[T_Stream], Any],
task_group: Optional[TaskGroup] = None) -> None:
"""
Accept incoming connections as they come in and start tasks to handle them.
:param handler: a callable that will be used to handle each accepted connection
:param task_group: the task group that will be used to start tasks for handling each
accepted connection (if omitted, an ad-hoc task group will be created)
"""

View file

@ -0,0 +1,78 @@
from abc import abstractmethod
from signal import Signals
from typing import Optional
from ._resources import AsyncResource
from ._streams import ByteReceiveStream, ByteSendStream
class Process(AsyncResource):
"""An asynchronous version of :class:`subprocess.Popen`."""
@abstractmethod
async def wait(self) -> int:
"""
Wait until the process exits.
:return: the exit code of the process
"""
@abstractmethod
def terminate(self) -> None:
"""
Terminates the process, gracefully if possible.
On Windows, this calls ``TerminateProcess()``.
On POSIX systems, this sends ``SIGTERM`` to the process.
.. seealso:: :meth:`subprocess.Popen.terminate`
"""
@abstractmethod
def kill(self) -> None:
"""
Kills the process.
On Windows, this calls ``TerminateProcess()``.
On POSIX systems, this sends ``SIGKILL`` to the process.
.. seealso:: :meth:`subprocess.Popen.kill`
"""
@abstractmethod
def send_signal(self, signal: Signals) -> None:
"""
Send a signal to the subprocess.
.. seealso:: :meth:`subprocess.Popen.send_signal`
:param signal: the signal number (e.g. :data:`signal.SIGHUP`)
"""
@property
@abstractmethod
def pid(self) -> int:
"""The process ID of the process."""
@property
@abstractmethod
def returncode(self) -> Optional[int]:
"""
The return code of the process. If the process has not yet terminated, this will be
``None``.
"""
@property
@abstractmethod
def stdin(self) -> Optional[ByteSendStream]:
"""The stream for the standard input of the process."""
@property
@abstractmethod
def stdout(self) -> Optional[ByteReceiveStream]:
"""The stream for the standard output of the process."""
@property
@abstractmethod
def stderr(self) -> Optional[ByteReceiveStream]:
"""The stream for the standard error output of the process."""

View file

@ -0,0 +1,87 @@
import typing
from abc import ABCMeta, abstractmethod
from types import TracebackType
from typing import Any, Callable, Coroutine, Optional, Type, TypeVar
from warnings import warn
if typing.TYPE_CHECKING:
from anyio._core._tasks import CancelScope
T_Retval = TypeVar('T_Retval')
class TaskStatus(metaclass=ABCMeta):
@abstractmethod
def started(self, value: object = None) -> None:
"""
Signal that the task has started.
:param value: object passed back to the starter of the task
"""
class TaskGroup(metaclass=ABCMeta):
"""
Groups several asynchronous tasks together.
:ivar cancel_scope: the cancel scope inherited by all child tasks
:vartype cancel_scope: CancelScope
"""
cancel_scope: 'CancelScope'
async def spawn(self, func: Callable[..., Coroutine[Any, Any, Any]],
*args: object, name: object = None) -> None:
"""
Start a new task in this task group.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
.. deprecated:: 3.0
Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you
can keep using this until AnyIO 4.
"""
warn('spawn() is deprecated -- use start_soon() (without the "await") instead',
DeprecationWarning)
self.start_soon(func, *args, name=name)
@abstractmethod
def start_soon(self, func: Callable[..., Coroutine[Any, Any, Any]],
*args: object, name: object = None) -> None:
"""
Start a new task in this task group.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
.. versionadded:: 3.0
"""
@abstractmethod
async def start(self, func: Callable[..., Coroutine[Any, Any, Any]],
*args: object, name: object = None) -> object:
"""
Start a new task and wait until it signals for readiness.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
:return: the value passed to ``task_status.started()``
:raises RuntimeError: if the task finishes without calling ``task_status.started()``
.. versionadded:: 3.0
"""
@abstractmethod
async def __aenter__(self) -> 'TaskGroup':
"""Enter the task group context and allow starting new tasks."""
@abstractmethod
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
"""Exit the task group context waiting for all tasks to finish."""

View file

@ -0,0 +1,37 @@
import types
from abc import ABCMeta, abstractmethod
from typing import Any, Awaitable, Callable, Dict, Optional, Type, TypeVar
_T = TypeVar("_T")
class TestRunner(metaclass=ABCMeta):
"""
Encapsulates a running event loop. Every call made through this object will use the same event
loop.
"""
def __enter__(self) -> 'TestRunner':
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[types.TracebackType]) -> Optional[bool]:
self.close()
return None
@abstractmethod
def close(self) -> None:
"""Close the event loop."""
@abstractmethod
def call(self, func: Callable[..., Awaitable[_T]],
*args: object, **kwargs: Dict[str, Any]) -> _T:
"""
Call the given function within the backend's event loop.
:param func: a callable returning an awaitable
:param args: positional arguments to call ``func`` with
:param kwargs: keyword arguments to call ``func`` with
:return: the return value of ``func``
"""

View file

@ -0,0 +1,416 @@
import threading
from asyncio import iscoroutine
from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
from contextlib import AbstractContextManager, contextmanager
from types import TracebackType
from typing import (
Any, AsyncContextManager, Callable, ContextManager, Coroutine, Dict, Generator, Iterable,
Optional, Tuple, Type, TypeVar, Union, cast, overload)
from warnings import warn
from ._core import _eventloop
from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals
from ._core._synchronization import Event
from ._core._tasks import CancelScope, create_task_group
from .abc._tasks import TaskStatus
T_Retval = TypeVar('T_Retval')
T_co = TypeVar('T_co')
def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval:
"""
Call a coroutine function from a worker thread.
:param func: a coroutine function
:param args: positional arguments for the callable
:return: the return value of the coroutine function
"""
try:
asynclib = threadlocals.current_async_module
except AttributeError:
raise RuntimeError('This function can only be run from an AnyIO worker thread')
return asynclib.run_async_from_thread(func, *args)
def run_async_from_thread(func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object) -> T_Retval:
warn('run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead',
DeprecationWarning)
return run(func, *args)
def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval:
"""
Call a function in the event loop thread from a worker thread.
:param func: a callable
:param args: positional arguments for the callable
:return: the return value of the callable
"""
try:
asynclib = threadlocals.current_async_module
except AttributeError:
raise RuntimeError('This function can only be run from an AnyIO worker thread')
return asynclib.run_sync_from_thread(func, *args)
def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval:
warn('run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead',
DeprecationWarning)
return run_sync(func, *args)
class _BlockingAsyncContextManager(AbstractContextManager):
_enter_future: Future
_exit_future: Future
_exit_event: Event
_exit_exc_info: Tuple[Optional[Type[BaseException]], Optional[BaseException],
Optional[TracebackType]] = (None, None, None)
def __init__(self, async_cm: AsyncContextManager[T_co], portal: 'BlockingPortal'):
self._async_cm = async_cm
self._portal = portal
async def run_async_cm(self) -> Optional[bool]:
try:
self._exit_event = Event()
value = await self._async_cm.__aenter__()
except BaseException as exc:
self._enter_future.set_exception(exc)
raise
else:
self._enter_future.set_result(value)
try:
# Wait for the sync context manager to exit.
# This next statement can raise `get_cancelled_exc_class()` if
# something went wrong in a task group in this async context
# manager.
await self._exit_event.wait()
finally:
# In case of cancellation, it could be that we end up here before
# `_BlockingAsyncContextManager.__exit__` is called, and an
# `_exit_exc_info` has been set.
result = await self._async_cm.__aexit__(*self._exit_exc_info)
return result
def __enter__(self) -> T_co:
self._enter_future = Future()
self._exit_future = self._portal.start_task_soon(self.run_async_cm)
cm = self._enter_future.result()
return cast(T_co, cm)
def __exit__(self, __exc_type: Optional[Type[BaseException]],
__exc_value: Optional[BaseException],
__traceback: Optional[TracebackType]) -> Optional[bool]:
self._exit_exc_info = __exc_type, __exc_value, __traceback
self._portal.call(self._exit_event.set)
return self._exit_future.result()
class _BlockingPortalTaskStatus(TaskStatus):
def __init__(self, future: Future):
self._future = future
def started(self, value: object = None) -> None:
self._future.set_result(value)
class BlockingPortal:
"""An object that lets external threads run code in an asynchronous event loop."""
def __new__(cls) -> 'BlockingPortal':
return get_asynclib().BlockingPortal()
def __init__(self) -> None:
self._event_loop_thread_id: Optional[int] = threading.get_ident()
self._stop_event = Event()
self._task_group = create_task_group()
self._cancelled_exc_class = get_cancelled_exc_class()
async def __aenter__(self) -> 'BlockingPortal':
await self._task_group.__aenter__()
return self
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
await self.stop()
return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
def _check_running(self) -> None:
if self._event_loop_thread_id is None:
raise RuntimeError('This portal is not running')
if self._event_loop_thread_id == threading.get_ident():
raise RuntimeError('This method cannot be called from the event loop thread')
async def sleep_until_stopped(self) -> None:
"""Sleep until :meth:`stop` is called."""
await self._stop_event.wait()
async def stop(self, cancel_remaining: bool = False) -> None:
"""
Signal the portal to shut down.
This marks the portal as no longer accepting new calls and exits from
:meth:`sleep_until_stopped`.
:param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them
finish before returning
"""
self._event_loop_thread_id = None
self._stop_event.set()
if cancel_remaining:
self._task_group.cancel_scope.cancel()
async def _call_func(self, func: Callable, args: tuple, kwargs: Dict[str, Any],
future: Future) -> None:
def callback(f: Future) -> None:
if f.cancelled() and self._event_loop_thread_id not in (None, threading.get_ident()):
self.call(scope.cancel)
try:
retval = func(*args, **kwargs)
if iscoroutine(retval):
with CancelScope() as scope:
if future.cancelled():
scope.cancel()
else:
future.add_done_callback(callback)
retval = await retval
except self._cancelled_exc_class:
future.cancel()
except BaseException as exc:
if not future.cancelled():
future.set_exception(exc)
# Let base exceptions fall through
if not isinstance(exc, Exception):
raise
else:
if not future.cancelled():
future.set_result(retval)
finally:
scope = None # type: ignore[assignment]
def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any],
name: object, future: Future) -> None:
"""
Spawn a new task using the given callable.
Implementors must ensure that the future is resolved when the task finishes.
:param func: a callable
:param args: positional arguments to be passed to the callable
:param kwargs: keyword arguments to be passed to the callable
:param name: name of the task (will be coerced to a string if not ``None``)
:param future: a future that will resolve to the return value of the callable, or the
exception raised during its execution
"""
raise NotImplementedError
@overload
def call(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval:
...
@overload
def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval:
...
def call(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object) -> T_Retval:
"""
Call the given function in the event loop thread.
If the callable returns a coroutine object, it is awaited on.
:param func: any callable
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
"""
return cast(T_Retval, self.start_task_soon(func, *args).result())
@overload
def spawn_task(self, func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object, name: object = None) -> "Future[T_Retval]":
...
@overload
def spawn_task(self, func: Callable[..., T_Retval],
*args: object, name: object = None) -> "Future[T_Retval]": ...
def spawn_task(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object, name: object = None) -> "Future[T_Retval]":
"""
Start a task in the portal's task group.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a future that resolves with the return value of the callable if the task completes
successfully, or with the exception raised in the task
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
.. versionadded:: 2.1
.. deprecated:: 3.0
Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you
can keep using this until AnyIO 4.
"""
warn('spawn_task() is deprecated -- use start_task_soon() instead', DeprecationWarning)
return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type]
@overload
def start_task_soon(self, func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object, name: object = None) -> "Future[T_Retval]":
...
@overload
def start_task_soon(self, func: Callable[..., T_Retval],
*args: object, name: object = None) -> "Future[T_Retval]": ...
def start_task_soon(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object, name: object = None) -> "Future[T_Retval]":
"""
Start a task in the portal's task group.
The task will be run inside a cancel scope which can be cancelled by cancelling the
returned future.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a future that resolves with the return value of the callable if the task completes
successfully, or with the exception raised in the task
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
.. versionadded:: 3.0
"""
self._check_running()
f: Future = Future()
self._spawn_task_from_thread(func, args, {}, name, f)
return f
def start_task(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: object,
name: object = None) -> Tuple['Future[Any]', Any]:
"""
Start a task in the portal's task group and wait until it signals for readiness.
This method works the same way as :meth:`TaskGroup.start`.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a tuple of (future, task_status_value) where the ``task_status_value`` is the
value passed to ``task_status.started()`` from within the target function
.. versionadded:: 3.0
"""
def task_done(future: Future) -> None:
if not task_status_future.done():
if future.cancelled():
task_status_future.cancel()
elif future.exception():
task_status_future.set_exception(future.exception())
else:
exc = RuntimeError('Task exited without calling task_status.started()')
task_status_future.set_exception(exc)
self._check_running()
task_status_future: Future = Future()
task_status = _BlockingPortalTaskStatus(task_status_future)
f: Future = Future()
f.add_done_callback(task_done)
self._spawn_task_from_thread(func, args, {'task_status': task_status}, name, f)
return f, task_status_future.result()
def wrap_async_context_manager(self, cm: AsyncContextManager[T_co]) -> ContextManager[T_co]:
"""
Wrap an async context manager as a synchronous context manager via this portal.
Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the
middle until the synchronous context manager exits.
:param cm: an asynchronous context manager
:return: a synchronous context manager
.. versionadded:: 2.1
"""
return _BlockingAsyncContextManager(cm, self)
def create_blocking_portal() -> BlockingPortal:
"""
Create a portal for running functions in the event loop thread from external threads.
Use this function in asynchronous code when you need to allow external threads access to the
event loop where your asynchronous code is currently running.
.. deprecated:: 3.0
Use :class:`.BlockingPortal` directly.
"""
warn('create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() '
'directly', DeprecationWarning)
return BlockingPortal()
@contextmanager
def start_blocking_portal(
backend: str = 'asyncio',
backend_options: Optional[Dict[str, Any]] = None) -> Generator[BlockingPortal, Any, None]:
"""
Start a new event loop in a new thread and run a blocking portal in its main task.
The parameters are the same as for :func:`~anyio.run`.
:param backend: name of the backend
:param backend_options: backend options
:return: a context manager that yields a blocking portal
.. versionchanged:: 3.0
Usage as a context manager is now required.
"""
async def run_portal() -> None:
async with BlockingPortal() as portal_:
if future.set_running_or_notify_cancel():
future.set_result(portal_)
await portal_.sleep_until_stopped()
future: Future[BlockingPortal] = Future()
with ThreadPoolExecutor(1) as executor:
run_future = executor.submit(_eventloop.run, run_portal, backend=backend,
backend_options=backend_options)
try:
wait(cast(Iterable[Future], [run_future, future]), return_when=FIRST_COMPLETED)
except BaseException:
future.cancel()
run_future.cancel()
raise
if future.done():
portal = future.result()
try:
yield portal
except BaseException:
portal.call(portal.stop, True)
raise
portal.call(portal.stop, False)
run_future.result()

View file

@ -0,0 +1,160 @@
import enum
import sys
from dataclasses import dataclass
from typing import Any, Dict, Generic, Set, TypeVar, Union, overload
from weakref import WeakKeyDictionary
from ._core._eventloop import get_asynclib
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
T = TypeVar('T')
D = TypeVar('D')
async def checkpoint() -> None:
"""
Check for cancellation and allow the scheduler to switch to another task.
Equivalent to (but more efficient than)::
await checkpoint_if_cancelled()
await cancel_shielded_checkpoint()
.. versionadded:: 3.0
"""
await get_asynclib().checkpoint()
async def checkpoint_if_cancelled() -> None:
"""
Enter a checkpoint if the enclosing cancel scope has been cancelled.
This does not allow the scheduler to switch to a different task.
.. versionadded:: 3.0
"""
await get_asynclib().checkpoint_if_cancelled()
async def cancel_shielded_checkpoint() -> None:
"""
Allow the scheduler to switch to another task but without checking for cancellation.
Equivalent to (but potentially more efficient than)::
with CancelScope(shield=True):
await checkpoint()
.. versionadded:: 3.0
"""
await get_asynclib().cancel_shielded_checkpoint()
def current_token() -> object:
"""Return a backend specific token object that can be used to get back to the event loop."""
return get_asynclib().current_token()
_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]]
_token_wrappers: Dict[Any, '_TokenWrapper'] = {}
@dataclass(frozen=True)
class _TokenWrapper:
__slots__ = '_token', '__weakref__'
_token: object
class _NoValueSet(enum.Enum):
NO_VALUE_SET = enum.auto()
class RunvarToken(Generic[T]):
__slots__ = '_var', '_value', '_redeemed'
def __init__(self, var: 'RunVar[T]', value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]]):
self._var = var
self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value
self._redeemed = False
class RunVar(Generic[T]):
"""Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop."""
__slots__ = '_name', '_default'
NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET
_token_wrappers: Set[_TokenWrapper] = set()
def __init__(self, name: str,
default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET):
self._name = name
self._default = default
@property
def _current_vars(self) -> Dict[str, T]:
token = current_token()
while True:
try:
return _run_vars[token]
except TypeError:
# Happens when token isn't weak referable (TrioToken).
# This workaround does mean that some memory will leak on Trio until the problem
# is fixed on their end.
token = _TokenWrapper(token)
self._token_wrappers.add(token)
except KeyError:
run_vars = _run_vars[token] = {}
return run_vars
@overload
def get(self, default: D) -> Union[T, D]: ...
@overload
def get(self) -> T: ...
def get(
self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET
) -> Union[T, D]:
try:
return self._current_vars[self._name]
except KeyError:
if default is not RunVar.NO_VALUE_SET:
return default
elif self._default is not RunVar.NO_VALUE_SET:
return self._default
raise LookupError(f'Run variable "{self._name}" has no value and no default set')
def set(self, value: T) -> RunvarToken[T]:
current_vars = self._current_vars
token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET))
current_vars[self._name] = value
return token
def reset(self, token: RunvarToken[T]) -> None:
if token._var is not self:
raise ValueError('This token does not belong to this RunVar')
if token._redeemed:
raise ValueError('This token has already been used')
if token._value is _NoValueSet.NO_VALUE_SET:
try:
del self._current_vars[self._name]
except KeyError:
pass
else:
self._current_vars[self._name] = token._value
token._redeemed = True
def __repr__(self) -> str:
return f'<RunVar name={self._name!r}>'

View file

@ -0,0 +1,152 @@
from contextlib import contextmanager
from inspect import isasyncgenfunction, iscoroutinefunction
from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, Tuple, cast
import pytest
import sniffio
from ._core._eventloop import get_all_backends, get_asynclib
from .abc import TestRunner
if TYPE_CHECKING:
from _pytest.config import Config
_current_runner: Optional[TestRunner] = None
def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]:
if isinstance(backend, str):
return backend, {}
elif isinstance(backend, tuple) and len(backend) == 2:
if isinstance(backend[0], str) and isinstance(backend[1], dict):
return cast(Tuple[str, Dict[str, Any]], backend)
raise TypeError('anyio_backend must be either a string or tuple of (string, dict)')
@contextmanager
def get_runner(backend_name: str, backend_options: Dict[str, Any]) -> Iterator[TestRunner]:
global _current_runner
if _current_runner:
yield _current_runner
return
asynclib = get_asynclib(backend_name)
token = None
if sniffio.current_async_library_cvar.get(None) is None:
# Since we're in control of the event loop, we can cache the name of the async library
token = sniffio.current_async_library_cvar.set(backend_name)
try:
backend_options = backend_options or {}
with asynclib.TestRunner(**backend_options) as runner:
_current_runner = runner
yield runner
finally:
_current_runner = None
if token:
sniffio.current_async_library_cvar.reset(token)
def pytest_configure(config: "Config") -> None:
config.addinivalue_line('markers', 'anyio: mark the (coroutine function) test to be run '
'asynchronously via anyio.')
def pytest_fixture_setup(fixturedef: Any, request: Any) -> None:
def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def]
backend_name, backend_options = extract_backend_and_options(anyio_backend)
if has_backend_arg:
kwargs['anyio_backend'] = anyio_backend
with get_runner(backend_name, backend_options) as runner:
if isasyncgenfunction(func):
gen = func(*args, **kwargs)
try:
value = runner.call(gen.asend, None)
except StopAsyncIteration:
raise RuntimeError('Async generator did not yield')
yield value
try:
runner.call(gen.asend, None)
except StopAsyncIteration:
pass
else:
runner.call(gen.aclose)
raise RuntimeError('Async generator fixture did not stop')
else:
yield runner.call(func, *args, **kwargs)
# Only apply this to coroutine functions and async generator functions in requests that involve
# the anyio_backend fixture
func = fixturedef.func
if isasyncgenfunction(func) or iscoroutinefunction(func):
if 'anyio_backend' in request.fixturenames:
has_backend_arg = 'anyio_backend' in fixturedef.argnames
fixturedef.func = wrapper
if not has_backend_arg:
fixturedef.argnames += ('anyio_backend',)
@pytest.hookimpl(tryfirst=True)
def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None:
if collector.istestfunction(obj, name):
inner_func = obj.hypothesis.inner_test if hasattr(obj, 'hypothesis') else obj
if iscoroutinefunction(inner_func):
marker = collector.get_closest_marker('anyio')
own_markers = getattr(obj, 'pytestmark', ())
if marker or any(marker.name == 'anyio' for marker in own_markers):
pytest.mark.usefixtures('anyio_backend')(obj)
@pytest.hookimpl(tryfirst=True)
def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]:
def run_with_hypothesis(**kwargs: Any) -> None:
with get_runner(backend_name, backend_options) as runner:
runner.call(original_func, **kwargs)
backend = pyfuncitem.funcargs.get('anyio_backend')
if backend:
backend_name, backend_options = extract_backend_and_options(backend)
if hasattr(pyfuncitem.obj, 'hypothesis'):
# Wrap the inner test function unless it's already wrapped
original_func = pyfuncitem.obj.hypothesis.inner_test
if original_func.__qualname__ != run_with_hypothesis.__qualname__:
if iscoroutinefunction(original_func):
pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis
return None
if iscoroutinefunction(pyfuncitem.obj):
funcargs = pyfuncitem.funcargs
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
with get_runner(backend_name, backend_options) as runner:
runner.call(pyfuncitem.obj, **testargs)
return True
return None
@pytest.fixture(params=get_all_backends())
def anyio_backend(request: Any) -> Any:
return request.param
@pytest.fixture
def anyio_backend_name(anyio_backend: Any) -> str:
if isinstance(anyio_backend, str):
return anyio_backend
else:
return anyio_backend[0]
@pytest.fixture
def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]:
if isinstance(anyio_backend, str):
return {}
else:
return anyio_backend[1]

View file

@ -0,0 +1,116 @@
from dataclasses import dataclass, field
from typing import Any, Callable, Mapping
from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead
from ..abc import AnyByteReceiveStream, ByteReceiveStream
@dataclass(eq=False)
class BufferedByteReceiveStream(ByteReceiveStream):
"""
Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving
capabilities in the form of a byte stream.
"""
receive_stream: AnyByteReceiveStream
_buffer: bytearray = field(init=False, default_factory=bytearray)
_closed: bool = field(init=False, default=False)
async def aclose(self) -> None:
await self.receive_stream.aclose()
self._closed = True
@property
def buffer(self) -> bytes:
"""The bytes currently in the buffer."""
return bytes(self._buffer)
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.receive_stream.extra_attributes
async def receive(self, max_bytes: int = 65536) -> bytes:
if self._closed:
raise ClosedResourceError
if self._buffer:
chunk = bytes(self._buffer[:max_bytes])
del self._buffer[:max_bytes]
return chunk
elif isinstance(self.receive_stream, ByteReceiveStream):
return await self.receive_stream.receive(max_bytes)
else:
# With a bytes-oriented object stream, we need to handle any surplus bytes we get from
# the receive() call
chunk = await self.receive_stream.receive()
if len(chunk) > max_bytes:
# Save the surplus bytes in the buffer
self._buffer.extend(chunk[max_bytes:])
return chunk[:max_bytes]
else:
return chunk
async def receive_exactly(self, nbytes: int) -> bytes:
"""
Read exactly the given amount of bytes from the stream.
:param nbytes: the number of bytes to read
:return: the bytes read
:raises ~anyio.IncompleteRead: if the stream was closed before the requested
amount of bytes could be read from the stream
"""
while True:
remaining = nbytes - len(self._buffer)
if remaining <= 0:
retval = self._buffer[:nbytes]
del self._buffer[:nbytes]
return bytes(retval)
try:
if isinstance(self.receive_stream, ByteReceiveStream):
chunk = await self.receive_stream.receive(remaining)
else:
chunk = await self.receive_stream.receive()
except EndOfStream as exc:
raise IncompleteRead from exc
self._buffer.extend(chunk)
async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes:
"""
Read from the stream until the delimiter is found or max_bytes have been read.
:param delimiter: the marker to look for in the stream
:param max_bytes: maximum number of bytes that will be read before raising
:exc:`~anyio.DelimiterNotFound`
:return: the bytes read (not including the delimiter)
:raises ~anyio.IncompleteRead: if the stream was closed before the delimiter
was found
:raises ~anyio.DelimiterNotFound: if the delimiter is not found within the
bytes read up to the maximum allowed
"""
delimiter_size = len(delimiter)
offset = 0
while True:
# Check if the delimiter can be found in the current buffer
index = self._buffer.find(delimiter, offset)
if index >= 0:
found = self._buffer[:index]
del self._buffer[:index + len(delimiter):]
return bytes(found)
# Check if the buffer is already at or over the limit
if len(self._buffer) >= max_bytes:
raise DelimiterNotFound(max_bytes)
# Read more data into the buffer from the socket
try:
data = await self.receive_stream.receive()
except EndOfStream as exc:
raise IncompleteRead from exc
# Move the offset forward and add the new data to the buffer
offset = max(len(self._buffer) - delimiter_size + 1, 0)
self._buffer.extend(data)

View file

@ -0,0 +1,139 @@
from io import SEEK_SET, UnsupportedOperation
from os import PathLike
from pathlib import Path
from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast
from .. import (
BrokenResourceError, ClosedResourceError, EndOfStream, TypedAttributeSet, to_thread,
typed_attribute)
from ..abc import ByteReceiveStream, ByteSendStream
class FileStreamAttribute(TypedAttributeSet):
#: the open file descriptor
file: BinaryIO = typed_attribute()
#: the path of the file on the file system, if available (file must be a real file)
path: Path = typed_attribute()
#: the file number, if available (file must be a real file or a TTY)
fileno: int = typed_attribute()
class _BaseFileStream:
def __init__(self, file: BinaryIO):
self._file = file
async def aclose(self) -> None:
await to_thread.run_sync(self._file.close)
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
attributes: Dict[Any, Callable[[], Any]] = {
FileStreamAttribute.file: lambda: self._file,
}
if hasattr(self._file, 'name'):
attributes[FileStreamAttribute.path] = lambda: Path(self._file.name)
try:
self._file.fileno()
except UnsupportedOperation:
pass
else:
attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno()
return attributes
class FileReadStream(_BaseFileStream, ByteReceiveStream):
"""
A byte stream that reads from a file in the file system.
:param file: a file that has been opened for reading in binary mode
.. versionadded:: 3.0
"""
@classmethod
async def from_path(cls, path: Union[str, 'PathLike[str]']) -> 'FileReadStream':
"""
Create a file read stream by opening the given file.
:param path: path of the file to read from
"""
file = await to_thread.run_sync(Path(path).open, 'rb')
return cls(cast(BinaryIO, file))
async def receive(self, max_bytes: int = 65536) -> bytes:
try:
data = await to_thread.run_sync(self._file.read, max_bytes)
except ValueError:
raise ClosedResourceError from None
except OSError as exc:
raise BrokenResourceError from exc
if data:
return data
else:
raise EndOfStream
async def seek(self, position: int, whence: int = SEEK_SET) -> int:
"""
Seek the file to the given position.
.. seealso:: :meth:`io.IOBase.seek`
.. note:: Not all file descriptors are seekable.
:param position: position to seek the file to
:param whence: controls how ``position`` is interpreted
:return: the new absolute position
:raises OSError: if the file is not seekable
"""
return await to_thread.run_sync(self._file.seek, position, whence)
async def tell(self) -> int:
"""
Return the current stream position.
.. note:: Not all file descriptors are seekable.
:return: the current absolute position
:raises OSError: if the file is not seekable
"""
return await to_thread.run_sync(self._file.tell)
class FileWriteStream(_BaseFileStream, ByteSendStream):
"""
A byte stream that writes to a file in the file system.
:param file: a file that has been opened for writing in binary mode
.. versionadded:: 3.0
"""
@classmethod
async def from_path(cls, path: Union[str, 'PathLike[str]'],
append: bool = False) -> 'FileWriteStream':
"""
Create a file write stream by opening the given file for writing.
:param path: path of the file to write to
:param append: if ``True``, open the file for appending; if ``False``, any existing file
at the given path will be truncated
"""
mode = 'ab' if append else 'wb'
file = await to_thread.run_sync(Path(path).open, mode)
return cls(cast(BinaryIO, file))
async def send(self, item: bytes) -> None:
try:
await to_thread.run_sync(self._file.write, item)
except ValueError:
raise ClosedResourceError from None
except OSError as exc:
raise BrokenResourceError from exc

View file

@ -0,0 +1,256 @@
from collections import OrderedDict, deque
from dataclasses import dataclass, field
from types import TracebackType
from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar
from .. import (
BrokenResourceError, ClosedResourceError, EndOfStream, WouldBlock, get_cancelled_exc_class)
from .._core._compat import DeprecatedAwaitable
from ..abc import Event, ObjectReceiveStream, ObjectSendStream
from ..lowlevel import checkpoint
T_Item = TypeVar('T_Item')
class MemoryObjectStreamStatistics(NamedTuple):
current_buffer_used: int #: number of items stored in the buffer
#: maximum number of items that can be stored on this stream (or :data:`math.inf`)
max_buffer_size: float
open_send_streams: int #: number of unclosed clones of the send stream
open_receive_streams: int #: number of unclosed clones of the receive stream
tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send`
#: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive`
tasks_waiting_receive: int
@dataclass(eq=False)
class MemoryObjectStreamState(Generic[T_Item]):
max_buffer_size: float = field()
buffer: Deque[T_Item] = field(init=False, default_factory=deque)
open_send_channels: int = field(init=False, default=0)
open_receive_channels: int = field(init=False, default=0)
waiting_receivers: 'OrderedDict[Event, List[T_Item]]' = field(init=False,
default_factory=OrderedDict)
waiting_senders: 'OrderedDict[Event, T_Item]' = field(init=False, default_factory=OrderedDict)
def statistics(self) -> MemoryObjectStreamStatistics:
return MemoryObjectStreamStatistics(
len(self.buffer), self.max_buffer_size, self.open_send_channels,
self.open_receive_channels, len(self.waiting_senders), len(self.waiting_receivers))
@dataclass(eq=False)
class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]):
_state: MemoryObjectStreamState[T_Item]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:
self._state.open_receive_channels += 1
def receive_nowait(self) -> T_Item:
"""
Receive the next item if it can be done without waiting.
:return: the received item
:raises ~anyio.ClosedResourceError: if this send stream has been closed
:raises ~anyio.EndOfStream: if the buffer is empty and this stream has been
closed from the sending end
:raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks
waiting to send
"""
if self._closed:
raise ClosedResourceError
if self._state.waiting_senders:
# Get the item from the next sender
send_event, item = self._state.waiting_senders.popitem(last=False)
self._state.buffer.append(item)
send_event.set()
if self._state.buffer:
return self._state.buffer.popleft()
elif not self._state.open_send_channels:
raise EndOfStream
raise WouldBlock
async def receive(self) -> T_Item:
await checkpoint()
try:
return self.receive_nowait()
except WouldBlock:
# Add ourselves in the queue
receive_event = Event()
container: List[T_Item] = []
self._state.waiting_receivers[receive_event] = container
try:
await receive_event.wait()
except get_cancelled_exc_class():
# Ignore the immediate cancellation if we already received an item, so as not to
# lose it
if not container:
raise
finally:
self._state.waiting_receivers.pop(receive_event, None)
if container:
return container[0]
else:
raise EndOfStream
def clone(self) -> 'MemoryObjectReceiveStream[T_Item]':
"""
Create a clone of this receive stream.
Each clone can be closed separately. Only when all clones have been closed will the
receiving end of the memory stream be considered closed by the sending ends.
:return: the cloned stream
"""
if self._closed:
raise ClosedResourceError
return MemoryObjectReceiveStream(_state=self._state)
def close(self) -> None:
"""
Close the stream.
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
benefit of synchronous callbacks.
"""
if not self._closed:
self._closed = True
self._state.open_receive_channels -= 1
if self._state.open_receive_channels == 0:
send_events = list(self._state.waiting_senders.keys())
for event in send_events:
event.set()
async def aclose(self) -> None:
self.close()
def statistics(self) -> MemoryObjectStreamStatistics:
"""
Return statistics about the current state of this stream.
.. versionadded:: 3.0
"""
return self._state.statistics()
def __enter__(self) -> 'MemoryObjectReceiveStream[T_Item]':
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
self.close()
@dataclass(eq=False)
class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]):
_state: MemoryObjectStreamState[T_Item]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:
self._state.open_send_channels += 1
def send_nowait(self, item: T_Item) -> DeprecatedAwaitable:
"""
Send an item immediately if it can be done without waiting.
:param item: the item to send
:raises ~anyio.ClosedResourceError: if this send stream has been closed
:raises ~anyio.BrokenResourceError: if the stream has been closed from the
receiving end
:raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting
to receive
"""
if self._closed:
raise ClosedResourceError
if not self._state.open_receive_channels:
raise BrokenResourceError
if self._state.waiting_receivers:
receive_event, container = self._state.waiting_receivers.popitem(last=False)
container.append(item)
receive_event.set()
elif len(self._state.buffer) < self._state.max_buffer_size:
self._state.buffer.append(item)
else:
raise WouldBlock
return DeprecatedAwaitable(self.send_nowait)
async def send(self, item: T_Item) -> None:
await checkpoint()
try:
self.send_nowait(item)
except WouldBlock:
# Wait until there's someone on the receiving end
send_event = Event()
self._state.waiting_senders[send_event] = item
try:
await send_event.wait()
except BaseException:
self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type]
raise
if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type]
raise BrokenResourceError
def clone(self) -> 'MemoryObjectSendStream[T_Item]':
"""
Create a clone of this send stream.
Each clone can be closed separately. Only when all clones have been closed will the
sending end of the memory stream be considered closed by the receiving ends.
:return: the cloned stream
"""
if self._closed:
raise ClosedResourceError
return MemoryObjectSendStream(_state=self._state)
def close(self) -> None:
"""
Close the stream.
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
benefit of synchronous callbacks.
"""
if not self._closed:
self._closed = True
self._state.open_send_channels -= 1
if self._state.open_send_channels == 0:
receive_events = list(self._state.waiting_receivers.keys())
self._state.waiting_receivers.clear()
for event in receive_events:
event.set()
async def aclose(self) -> None:
self.close()
def statistics(self) -> MemoryObjectStreamStatistics:
"""
Return statistics about the current state of this stream.
.. versionadded:: 3.0
"""
return self._state.statistics()
def __enter__(self) -> 'MemoryObjectSendStream[T_Item]':
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
self.close()

View file

@ -0,0 +1,124 @@
from dataclasses import dataclass
from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar
from ..abc import (
ByteReceiveStream, ByteSendStream, ByteStream, Listener, ObjectReceiveStream, ObjectSendStream,
ObjectStream, TaskGroup)
T_Item = TypeVar('T_Item')
T_Stream = TypeVar('T_Stream')
@dataclass(eq=False)
class StapledByteStream(ByteStream):
"""
Combines two byte streams into a single, bidirectional byte stream.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param ByteSendStream send_stream: the sending byte stream
:param ByteReceiveStream receive_stream: the receiving byte stream
"""
send_stream: ByteSendStream
receive_stream: ByteReceiveStream
async def receive(self, max_bytes: int = 65536) -> bytes:
return await self.receive_stream.receive(max_bytes)
async def send(self, item: bytes) -> None:
await self.send_stream.send(item)
async def send_eof(self) -> None:
await self.send_stream.aclose()
async def aclose(self) -> None:
await self.send_stream.aclose()
await self.receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes}
@dataclass(eq=False)
class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]):
"""
Combines two object streams into a single, bidirectional object stream.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param ObjectSendStream send_stream: the sending object stream
:param ObjectReceiveStream receive_stream: the receiving object stream
"""
send_stream: ObjectSendStream[T_Item]
receive_stream: ObjectReceiveStream[T_Item]
async def receive(self) -> T_Item:
return await self.receive_stream.receive()
async def send(self, item: T_Item) -> None:
await self.send_stream.send(item)
async def send_eof(self) -> None:
await self.send_stream.aclose()
async def aclose(self) -> None:
await self.send_stream.aclose()
await self.receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes}
@dataclass(eq=False)
class MultiListener(Generic[T_Stream], Listener[T_Stream]):
"""
Combines multiple listeners into one, serving connections from all of them at once.
Any MultiListeners in the given collection of listeners will have their listeners moved into
this one.
Extra attributes are provided from each listener, with each successive listener overriding any
conflicting attributes from the previous one.
:param listeners: listeners to serve
:type listeners: Sequence[Listener[T_Stream]]
"""
listeners: Sequence[Listener[T_Stream]]
def __post_init__(self) -> None:
listeners: List[Listener[T_Stream]] = []
for listener in self.listeners:
if isinstance(listener, MultiListener):
listeners.extend(listener.listeners)
del listener.listeners[:] # type: ignore[attr-defined]
else:
listeners.append(listener)
self.listeners = listeners
async def serve(self, handler: Callable[[T_Stream], Any],
task_group: Optional[TaskGroup] = None) -> None:
from .. import create_task_group
async with create_task_group() as tg:
for listener in self.listeners:
tg.start_soon(listener.serve, handler, task_group)
async def aclose(self) -> None:
for listener in self.listeners:
await listener.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
attributes: dict = {}
for listener in self.listeners:
attributes.update(listener.extra_attributes)
return attributes

View file

@ -0,0 +1,130 @@
import codecs
from dataclasses import InitVar, dataclass, field
from typing import Any, Callable, Mapping, Tuple
from ..abc import (
AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, ObjectReceiveStream, ObjectSendStream,
ObjectStream)
@dataclass(eq=False)
class TextReceiveStream(ObjectReceiveStream[str]):
"""
Stream wrapper that decodes bytes to strings using the given encoding.
Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely
received unicode characters as soon as they come in.
:param transport_stream: any bytes-based receive stream
:param encoding: character encoding to use for decoding bytes to strings (defaults to
``utf-8``)
:param errors: handling scheme for decoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteReceiveStream
encoding: InitVar[str] = 'utf-8'
errors: InitVar[str] = 'strict'
_decoder: codecs.IncrementalDecoder = field(init=False)
def __post_init__(self, encoding: str, errors: str) -> None:
decoder_class = codecs.getincrementaldecoder(encoding)
self._decoder = decoder_class(errors=errors)
async def receive(self) -> str:
while True:
chunk = await self.transport_stream.receive()
decoded = self._decoder.decode(chunk)
if decoded:
return decoded
async def aclose(self) -> None:
await self.transport_stream.aclose()
self._decoder.reset()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.transport_stream.extra_attributes
@dataclass(eq=False)
class TextSendStream(ObjectSendStream[str]):
"""
Sends strings to the wrapped stream as bytes using the given encoding.
:param AnyByteSendStream transport_stream: any bytes-based send stream
:param str encoding: character encoding to use for encoding strings to bytes (defaults to
``utf-8``)
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteSendStream
encoding: InitVar[str] = 'utf-8'
errors: str = 'strict'
_encoder: Callable[..., Tuple[bytes, int]] = field(init=False)
def __post_init__(self, encoding: str) -> None:
self._encoder = codecs.getencoder(encoding)
async def send(self, item: str) -> None:
encoded = self._encoder(item, self.errors)[0]
await self.transport_stream.send(encoded)
async def aclose(self) -> None:
await self.transport_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.transport_stream.extra_attributes
@dataclass(eq=False)
class TextStream(ObjectStream[str]):
"""
A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on
send.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param AnyByteStream transport_stream: any bytes-based stream
:param str encoding: character encoding to use for encoding/decoding strings to/from bytes
(defaults to ``utf-8``)
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteStream
encoding: InitVar[str] = 'utf-8'
errors: InitVar[str] = 'strict'
_receive_stream: TextReceiveStream = field(init=False)
_send_stream: TextSendStream = field(init=False)
def __post_init__(self, encoding: str, errors: str) -> None:
self._receive_stream = TextReceiveStream(self.transport_stream, encoding=encoding,
errors=errors)
self._send_stream = TextSendStream(self.transport_stream, encoding=encoding, errors=errors)
async def receive(self) -> str:
return await self._receive_stream.receive()
async def send(self, item: str) -> None:
await self._send_stream.send(item)
async def send_eof(self) -> None:
await self.transport_stream.send_eof()
async def aclose(self) -> None:
await self._send_stream.aclose()
await self._receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {**self._send_stream.extra_attributes, **self._receive_stream.extra_attributes}

View file

@ -0,0 +1,281 @@
import logging
import re
import ssl
from dataclasses import dataclass
from functools import wraps
from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union
from .. import BrokenResourceError, EndOfStream, aclose_forcefully, get_cancelled_exc_class
from .._core._typedattr import TypedAttributeSet, typed_attribute
from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup
T_Retval = TypeVar('T_Retval')
_PCTRTT = Tuple[Tuple[str, str], ...]
_PCTRTTT = Tuple[_PCTRTT, ...]
class TLSAttribute(TypedAttributeSet):
"""Contains Transport Layer Security related attributes."""
#: the selected ALPN protocol
alpn_protocol: Optional[str] = typed_attribute()
#: the channel binding for type ``tls-unique``
channel_binding_tls_unique: bytes = typed_attribute()
#: the selected cipher
cipher: Tuple[str, str, int] = typed_attribute()
#: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more
#: information)
peer_certificate: Optional[Dict[str, Union[str, _PCTRTTT, _PCTRTT]]] = typed_attribute()
#: the peer certificate in binary form
peer_certificate_binary: Optional[bytes] = typed_attribute()
#: ``True`` if this is the server side of the connection
server_side: bool = typed_attribute()
#: ciphers shared between both ends of the TLS connection
shared_ciphers: List[Tuple[str, str, int]] = typed_attribute()
#: the :class:`~ssl.SSLObject` used for encryption
ssl_object: ssl.SSLObject = typed_attribute()
#: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being
#: closed
standard_compatible: bool = typed_attribute()
#: the TLS protocol version (e.g. ``TLSv1.2``)
tls_version: str = typed_attribute()
@dataclass(eq=False)
class TLSStream(ByteStream):
"""
A stream wrapper that encrypts all sent data and decrypts received data.
This class has no public initializer; use :meth:`wrap` instead.
All extra attributes from :class:`~TLSAttribute` are supported.
:var AnyByteStream transport_stream: the wrapped stream
"""
transport_stream: AnyByteStream
standard_compatible: bool
_ssl_object: ssl.SSLObject
_read_bio: ssl.MemoryBIO
_write_bio: ssl.MemoryBIO
@classmethod
async def wrap(cls, transport_stream: AnyByteStream, *, server_side: Optional[bool] = None,
hostname: Optional[str] = None, ssl_context: Optional[ssl.SSLContext] = None,
standard_compatible: bool = True) -> 'TLSStream':
"""
Wrap an existing stream with Transport Layer Security.
This performs a TLS handshake with the peer.
:param transport_stream: a bytes-transporting stream to wrap
:param server_side: ``True`` if this is the server side of the connection, ``False`` if
this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been
provided, ``False`` otherwise). Used only to create a default context when an explicit
context has not been provided.
:param hostname: host name of the peer (if host name checking is desired)
:param ssl_context: the SSLContext object to use (if not provided, a secure default will be
created)
:param standard_compatible: if ``False``, skip the closing handshake when closing the
connection, and don't raise an exception if the peer does the same
:raises ~ssl.SSLError: if the TLS handshake fails
"""
if server_side is None:
server_side = not hostname
if not ssl_context:
purpose = ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH
ssl_context = ssl.create_default_context(purpose)
# Re-enable detection of unexpected EOFs if it was disabled by Python
if hasattr(ssl, 'OP_IGNORE_UNEXPECTED_EOF'):
ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined]
bio_in = ssl.MemoryBIO()
bio_out = ssl.MemoryBIO()
ssl_object = ssl_context.wrap_bio(bio_in, bio_out, server_side=server_side,
server_hostname=hostname)
wrapper = cls(transport_stream=transport_stream,
standard_compatible=standard_compatible, _ssl_object=ssl_object,
_read_bio=bio_in, _write_bio=bio_out)
await wrapper._call_sslobject_method(ssl_object.do_handshake)
return wrapper
async def _call_sslobject_method(
self, func: Callable[..., T_Retval], *args: object
) -> T_Retval:
while True:
try:
result = func(*args)
except ssl.SSLWantReadError:
try:
# Flush any pending writes first
if self._write_bio.pending:
await self.transport_stream.send(self._write_bio.read())
data = await self.transport_stream.receive()
except EndOfStream:
self._read_bio.write_eof()
except OSError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
raise BrokenResourceError from exc
else:
self._read_bio.write(data)
except ssl.SSLWantWriteError:
await self.transport_stream.send(self._write_bio.read())
except ssl.SSLSyscallError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
raise BrokenResourceError from exc
except ssl.SSLError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
if (isinstance(exc, ssl.SSLEOFError)
or 'UNEXPECTED_EOF_WHILE_READING' in exc.strerror):
if self.standard_compatible:
raise BrokenResourceError from exc
else:
raise EndOfStream from None
raise
else:
# Flush any pending writes first
if self._write_bio.pending:
await self.transport_stream.send(self._write_bio.read())
return result
async def unwrap(self) -> Tuple[AnyByteStream, bytes]:
"""
Does the TLS closing handshake.
:return: a tuple of (wrapped byte stream, bytes left in the read buffer)
"""
await self._call_sslobject_method(self._ssl_object.unwrap)
self._read_bio.write_eof()
self._write_bio.write_eof()
return self.transport_stream, self._read_bio.read()
async def aclose(self) -> None:
if self.standard_compatible:
try:
await self.unwrap()
except BaseException:
await aclose_forcefully(self.transport_stream)
raise
await self.transport_stream.aclose()
async def receive(self, max_bytes: int = 65536) -> bytes:
data = await self._call_sslobject_method(self._ssl_object.read, max_bytes)
if not data:
raise EndOfStream
return data
async def send(self, item: bytes) -> None:
await self._call_sslobject_method(self._ssl_object.write, item)
async def send_eof(self) -> None:
tls_version = self.extra(TLSAttribute.tls_version)
match = re.match(r'TLSv(\d+)(?:\.(\d+))?', tls_version)
if match:
major, minor = int(match.group(1)), int(match.group(2) or 0)
if (major, minor) < (1, 3):
raise NotImplementedError(f'send_eof() requires at least TLSv1.3; current '
f'session uses {tls_version}')
raise NotImplementedError('send_eof() has not yet been implemented for TLS streams')
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
**self.transport_stream.extra_attributes,
TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol,
TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding,
TLSAttribute.cipher: self._ssl_object.cipher,
TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False),
TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(True),
TLSAttribute.server_side: lambda: self._ssl_object.server_side,
TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(),
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
TLSAttribute.ssl_object: lambda: self._ssl_object,
TLSAttribute.tls_version: self._ssl_object.version
}
@dataclass(eq=False)
class TLSListener(Listener[TLSStream]):
"""
A convenience listener that wraps another listener and auto-negotiates a TLS session on every
accepted connection.
If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is
called to do whatever post-mortem processing is deemed necessary.
Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute.
:param Listener listener: the listener to wrap
:param ssl_context: the SSL context object
:param standard_compatible: a flag passed through to :meth:`TLSStream.wrap`
:param handshake_timeout: time limit for the TLS handshake
(passed to :func:`~anyio.fail_after`)
"""
listener: Listener[Any]
ssl_context: ssl.SSLContext
standard_compatible: bool = True
handshake_timeout: float = 30
@staticmethod
async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None:
f"""
Handle an exception raised during the TLS handshake.
This method does 3 things:
#. Forcefully closes the original stream
#. Logs the exception (unless it was a cancellation exception) using the ``{__name__}``
logger
#. Reraises the exception if it was a base exception or a cancellation exception
:param exc: the exception
:param stream: the original stream
"""
await aclose_forcefully(stream)
# Log all except cancellation exceptions
if not isinstance(exc, get_cancelled_exc_class()):
logging.getLogger(__name__).exception('Error during TLS handshake')
# Only reraise base exceptions and cancellation exceptions
if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()):
raise
async def serve(self, handler: Callable[[TLSStream], Any],
task_group: Optional[TaskGroup] = None) -> None:
@wraps(handler)
async def handler_wrapper(stream: AnyByteStream) -> None:
from .. import fail_after
try:
with fail_after(self.handshake_timeout):
wrapped_stream = await TLSStream.wrap(
stream, ssl_context=self.ssl_context,
standard_compatible=self.standard_compatible)
except BaseException as exc:
await self.handle_handshake_error(exc, stream)
else:
await handler(wrapped_stream)
await self.listener.serve(handler_wrapper, task_group)
async def aclose(self) -> None:
await self.listener.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
}

View file

@ -0,0 +1,229 @@
import os
import pickle
import subprocess
import sys
from collections import deque
from importlib.util import module_from_spec, spec_from_file_location
from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast
from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class
from ._core._exceptions import BrokenWorkerProcess
from ._core._subprocesses import open_process
from ._core._synchronization import CapacityLimiter
from ._core._tasks import CancelScope, fail_after
from .abc import ByteReceiveStream, ByteSendStream, Process
from .lowlevel import RunVar, checkpoint_if_cancelled
from .streams.buffered import BufferedByteReceiveStream
WORKER_MAX_IDLE_TIME = 300 # 5 minutes
T_Retval = TypeVar('T_Retval')
_process_pool_workers: RunVar[Set[Process]] = RunVar('_process_pool_workers')
_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar(
'_process_pool_idle_workers')
_default_process_limiter: RunVar[CapacityLimiter] = RunVar('_default_process_limiter')
async def run_sync(
func: Callable[..., T_Retval], *args: object, cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None) -> T_Retval:
"""
Call the given function with the given arguments in a worker process.
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
the worker process running it will be abruptly terminated using SIGKILL (or
``terminateProcess()`` on Windows).
:param func: a callable
:param args: positional arguments for the callable
:param cancellable: ``True`` to allow cancellation of the operation while it's running
:param limiter: capacity limiter to use to limit the total amount of processes running
(if omitted, the default limiter is used)
:return: an awaitable that yields the return value of the function.
"""
async def send_raw_command(pickled_cmd: bytes) -> object:
try:
await stdin.send(pickled_cmd)
response = await buffered.receive_until(b'\n', 50)
status, length = response.split(b' ')
if status not in (b'RETURN', b'EXCEPTION'):
raise RuntimeError(f'Worker process returned unexpected response: {response!r}')
pickled_response = await buffered.receive_exactly(int(length))
except BaseException as exc:
workers.discard(process)
try:
process.kill()
with CancelScope(shield=True):
await process.aclose()
except ProcessLookupError:
pass
if isinstance(exc, get_cancelled_exc_class()):
raise
else:
raise BrokenWorkerProcess from exc
retval = pickle.loads(pickled_response)
if status == b'EXCEPTION':
assert isinstance(retval, BaseException)
raise retval
else:
return retval
# First pickle the request before trying to reserve a worker process
await checkpoint_if_cancelled()
request = pickle.dumps(('run', func, args), protocol=pickle.HIGHEST_PROTOCOL)
# If this is the first run in this event loop thread, set up the necessary variables
try:
workers = _process_pool_workers.get()
idle_workers = _process_pool_idle_workers.get()
except LookupError:
workers = set()
idle_workers = deque()
_process_pool_workers.set(workers)
_process_pool_idle_workers.set(idle_workers)
get_asynclib().setup_process_pool_exit_at_shutdown(workers)
async with (limiter or current_default_process_limiter()):
# Pop processes from the pool (starting from the most recently used) until we find one that
# hasn't exited yet
process: Process
while idle_workers:
process, idle_since = idle_workers.pop()
if process.returncode is None:
stdin = cast(ByteSendStream, process.stdin)
buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout))
# Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or
# longer
now = current_time()
killed_processes: List[Process] = []
while idle_workers:
if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME:
break
process, idle_since = idle_workers.popleft()
process.kill()
workers.remove(process)
killed_processes.append(process)
with CancelScope(shield=True):
for process in killed_processes:
await process.aclose()
break
workers.remove(process)
else:
command = [sys.executable, '-u', '-m', __name__]
process = await open_process(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
try:
stdin = cast(ByteSendStream, process.stdin)
buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout))
with fail_after(20):
message = await buffered.receive(6)
if message != b'READY\n':
raise BrokenWorkerProcess(
f'Worker process returned unexpected response: {message!r}')
main_module_path = getattr(sys.modules['__main__'], '__file__', None)
pickled = pickle.dumps(('init', sys.path, main_module_path),
protocol=pickle.HIGHEST_PROTOCOL)
await send_raw_command(pickled)
except (BrokenWorkerProcess, get_cancelled_exc_class()):
raise
except BaseException as exc:
process.kill()
raise BrokenWorkerProcess('Error during worker process initialization') from exc
workers.add(process)
with CancelScope(shield=not cancellable):
try:
return cast(T_Retval, await send_raw_command(request))
finally:
if process in workers:
idle_workers.append((process, current_time()))
def current_default_process_limiter() -> CapacityLimiter:
"""
Return the capacity limiter that is used by default to limit the number of worker processes.
:return: a capacity limiter object
"""
try:
return _default_process_limiter.get()
except LookupError:
limiter = CapacityLimiter(os.cpu_count() or 2)
_default_process_limiter.set(limiter)
return limiter
def process_worker() -> None:
# Redirect standard streams to os.devnull so that user code won't interfere with the
# parent-worker communication
stdin = sys.stdin
stdout = sys.stdout
sys.stdin = open(os.devnull)
sys.stdout = open(os.devnull, 'w')
stdout.buffer.write(b'READY\n')
while True:
retval = exception = None
try:
command, *args = pickle.load(stdin.buffer)
except EOFError:
return
except BaseException as exc:
exception = exc
else:
if command == 'run':
func, args = args
try:
retval = func(*args)
except BaseException as exc:
exception = exc
elif command == 'init':
main_module_path: Optional[str]
sys.path, main_module_path = args
del sys.modules['__main__']
if main_module_path:
# Load the parent's main module but as __mp_main__ instead of __main__
# (like multiprocessing does) to avoid infinite recursion
try:
spec = spec_from_file_location('__mp_main__', main_module_path)
if spec and spec.loader:
main = module_from_spec(spec)
spec.loader.exec_module(main)
sys.modules['__main__'] = main
except BaseException as exc:
exception = exc
try:
if exception is not None:
status = b'EXCEPTION'
pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL)
else:
status = b'RETURN'
pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL)
except BaseException as exc:
exception = exc
status = b'EXCEPTION'
pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL)
stdout.buffer.write(b'%s %d\n' % (status, len(pickled)))
stdout.buffer.write(pickled)
# Respect SIGTERM
if isinstance(exception, SystemExit):
raise exception
if __name__ == '__main__':
process_worker()

View file

@ -0,0 +1,54 @@
from typing import Callable, Optional, TypeVar
from warnings import warn
from ._core._eventloop import get_asynclib
from .abc import CapacityLimiter
T_Retval = TypeVar('T_Retval')
async def run_sync(
func: Callable[..., T_Retval], *args: object, cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None) -> T_Retval:
"""
Call the given function with the given arguments in a worker thread.
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
the thread will still run its course but its return value (or any raised exception) will be
ignored.
:param func: a callable
:param args: positional arguments for the callable
:param cancellable: ``True`` to allow cancellation of the operation
:param limiter: capacity limiter to use to limit the total amount of threads running
(if omitted, the default limiter is used)
:return: an awaitable that yields the return value of the function.
"""
return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable,
limiter=limiter)
async def run_sync_in_worker_thread(
func: Callable[..., T_Retval], *args: object, cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None) -> T_Retval:
warn('run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead',
DeprecationWarning)
return await run_sync(func, *args, cancellable=cancellable, limiter=limiter)
def current_default_thread_limiter() -> CapacityLimiter:
"""
Return the capacity limiter that is used by default to limit the number of concurrent threads.
:return: a capacity limiter object
"""
return get_asynclib().current_default_thread_limiter()
def current_default_worker_thread_limiter() -> CapacityLimiter:
warn('current_default_worker_thread_limiter() has been deprecated, '
'use anyio.to_thread.current_default_thread_limiter() instead',
DeprecationWarning)
return current_default_thread_limiter()

View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) [year] [fullname]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,145 @@
Metadata-Version: 2.1
Name: apischema
Version: 0.16.6
Summary: JSON (de)serialization, *GraphQL* and JSON schema generation using Python typing.
Home-page: https://github.com/wyfo/apischema
Author: Joseph Perez
Author-email: joperez@hotmail.fr
License: MIT
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=3.6
Description-Content-Type: text/markdown
License-File: LICENSE.txt
Requires-Dist: dataclasses (==0.7) ; python_version < "3.7"
Provides-Extra: examples
Requires-Dist: graphql-core (>=3.0.0) ; extra == 'examples'
Requires-Dist: attrs ; extra == 'examples'
Requires-Dist: docstring-parser ; extra == 'examples'
Requires-Dist: bson ; extra == 'examples'
Requires-Dist: orjson ; extra == 'examples'
Requires-Dist: pydantic ; extra == 'examples'
Requires-Dist: pytest ; extra == 'examples'
Requires-Dist: sqlalchemy ; extra == 'examples'
Provides-Extra: graphql
Requires-Dist: graphql-core (>=3.0.0) ; extra == 'graphql'
# apischema
JSON (de)serialization, *GraphQL* and JSON schema generation using Python typing.
*apischema* makes your life easier when dealing with API data.
## Documentation
[https://wyfo.github.io/apischema/](https://wyfo.github.io/apischema/)
## Install
```shell
pip install apischema
```
It requires only Python 3.6+ (and dataclasses [official backport](https://pypi.org/project/dataclasses/) for version 3.6 only)
*PyPy3* is fully supported.
## Why another library?
(If you wonder how this differs from the *pydantic* library, see the [dedicated section of the documentation](https://wyfo.github.io/apischema/difference_with_pydantic/) — there are many differences.)
This library fulfills the following goals:
- stay as close as possible to the standard library (dataclasses, typing, etc.) — as a consequence we do not need plugins for editors/linters/etc.;
- be adaptable, provide tools to support any types (ORM, etc.);
- avoid dynamic things like using raw strings for attributes name - play nicely with your IDE.
No known alternative achieves all of this, and apischema is also [faster](https://wyfo.github.io/apischema/performance_and_benchmark) than all of them.
On top of that, because APIs are not only JSON, *apischema* is also a complete *GraphQL* library.
## Example
```python
from collections.abc import Collection
from dataclasses import dataclass, field
from uuid import UUID, uuid4
from graphql import print_schema
from pytest import raises
from apischema import ValidationError, deserialize, serialize
from apischema.graphql import graphql_schema
from apischema.json_schema import deserialization_schema
# Define a schema with standard dataclasses
@dataclass
class Resource:
id: UUID
name: str
tags: set[str] = field(default_factory=set)
# Get some data
uuid = uuid4()
data = {"id": str(uuid), "name": "wyfo", "tags": ["some_tag"]}
# Deserialize data
resource = deserialize(Resource, data)
assert resource == Resource(uuid, "wyfo", {"some_tag"})
# Serialize objects
assert serialize(Resource, resource) == data
# Validate during deserialization
with raises(ValidationError) as err: # pytest checks exception is raised
deserialize(Resource, {"id": "42", "name": "wyfo"})
assert err.value.errors == [
{"loc": ["id"], "msg": "badly formed hexadecimal UUID string"}
]
# Generate JSON Schema
assert deserialization_schema(Resource) == {
"$schema": "http://json-schema.org/draft/2020-12/schema#",
"type": "object",
"properties": {
"id": {"type": "string", "format": "uuid"},
"name": {"type": "string"},
"tags": {"type": "array", "items": {"type": "string"}, "uniqueItems": True},
},
"required": ["id", "name"],
"additionalProperties": False,
}
# Define GraphQL operations
def resources(tags: Collection[str] | None = None) -> Collection[Resource] | None:
...
# Generate GraphQL schema
schema = graphql_schema(query=[resources], id_types={UUID})
schema_str = """\
type Query {
resources(tags: [String!]): [Resource!]
}
type Resource {
id: ID!
name: String!
tags: [String!]!
}
"""
assert print_schema(schema) == schema_str
```
*apischema* works out of the box with your data model.
[*Let's start the apischema tour.*](https://wyfo.github.io/apischema/)
## Changelog
See [releases](https://github.com/wyfo/apischema/releases)

View file

@ -0,0 +1,133 @@
apischema-0.16.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
apischema-0.16.6.dist-info/LICENSE.txt,sha256=ACwmltkrXIz5VsEQcrqljq-fat6ZXAMepjXGoe40KtE,1069
apischema-0.16.6.dist-info/METADATA,sha256=e25BznRCYVpAF92b5HEEZwFX47s_2PYY7BEo9bbCxFE,4727
apischema-0.16.6.dist-info/RECORD,,
apischema-0.16.6.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
apischema-0.16.6.dist-info/top_level.txt,sha256=zW0-6yTDoOL1hAiW09vIidai_o1RmEB0JfImev9wq44,10
apischema/__init__.py,sha256=CdbV9VkcY0ioAvF_nn9A3p5nn271HMtPoHU0Nb3OF-8,2404
apischema/__pycache__/__init__.cpython-39.pyc,,
apischema/__pycache__/aliases.cpython-39.pyc,,
apischema/__pycache__/cache.cpython-39.pyc,,
apischema/__pycache__/dataclasses.cpython-39.pyc,,
apischema/__pycache__/dependencies.cpython-39.pyc,,
apischema/__pycache__/dependent_required.cpython-39.pyc,,
apischema/__pycache__/fields.cpython-39.pyc,,
apischema/__pycache__/methods.cpython-39.pyc,,
apischema/__pycache__/ordering.cpython-39.pyc,,
apischema/__pycache__/recursion.cpython-39.pyc,,
apischema/__pycache__/settings.cpython-39.pyc,,
apischema/__pycache__/skip.cpython-39.pyc,,
apischema/__pycache__/std_types.cpython-39.pyc,,
apischema/__pycache__/tagged_unions.cpython-39.pyc,,
apischema/__pycache__/type_names.cpython-39.pyc,,
apischema/__pycache__/types.cpython-39.pyc,,
apischema/__pycache__/typing.cpython-39.pyc,,
apischema/__pycache__/utils.cpython-39.pyc,,
apischema/__pycache__/visitor.cpython-39.pyc,,
apischema/aliases.py,sha256=YnOhFCrDYWtQUMS6LInQBBLKgxCKmMOVn120tWWT7_M,1363
apischema/cache.py,sha256=Zwes3fW_MDZ_J0bQdFG6awnQh6k3Tsdr3zm1F_LeCaE,1219
apischema/conversions/__init__.py,sha256=f54zZOD_Wq5jzdgr15kKUxzrXhqRxkCGfvu5b5cfd7U,1014
apischema/conversions/__pycache__/__init__.cpython-39.pyc,,
apischema/conversions/__pycache__/conversions.cpython-39.pyc,,
apischema/conversions/__pycache__/converters.cpython-39.pyc,,
apischema/conversions/__pycache__/dataclass_models.cpython-39.pyc,,
apischema/conversions/__pycache__/utils.cpython-39.pyc,,
apischema/conversions/__pycache__/visitor.cpython-39.pyc,,
apischema/conversions/__pycache__/wrappers.cpython-39.pyc,,
apischema/conversions/conversions.py,sha256=JydDimFXomRFehkWDnSI20YiWyHWRuGyeH_HqAPBsBs,3467
apischema/conversions/converters.py,sha256=vWcMYUsfj3y05rp3kzs-_VNYsyzKbLOkBfNisE3EEgU,6094
apischema/conversions/dataclass_models.py,sha256=LVPCdRIDnh196XnQTFFFHPoqCNWf8UsUNDUmEu8wFC8,2975
apischema/conversions/utils.py,sha256=f6Yg0PNqSeQmmLllZGr_Ki21c_8b4pUWw-t4buzBUx0,2944
apischema/conversions/visitor.py,sha256=CkA8YXTI0bkmam8-Tqayy6yy8qUrDK2Oe-2N26PKvQI,8471
apischema/conversions/wrappers.py,sha256=-9kDqjFpYUOUYTqwBUrEi2tIIPDsMDX4K6ai_i6mQLE,2227
apischema/dataclasses.py,sha256=440GM4lKn1mHc7g_U9L7KgyCtvX8akgY8xm-zQgoQ58,887
apischema/dependencies.py,sha256=DgwQCOPe8dyrF_RoGbbNQxTCwP7jNdM3ylFB-BVLLvE,2110
apischema/dependent_required.py,sha256=Yp2lRSTYrU_LbwgW7GKJlO4-2Mw97raROrAYMxaGTZE,338
apischema/deserialization/__init__.py,sha256=_1gwEMVXkilWu-Z_otIKTTqXQYxRKBgt7kode0MjlDQ,35164
apischema/deserialization/__pycache__/__init__.cpython-39.pyc,,
apischema/deserialization/__pycache__/coercion.cpython-39.pyc,,
apischema/deserialization/__pycache__/flattened.cpython-39.pyc,,
apischema/deserialization/coercion.py,sha256=2JtCLPBdh5ufJA-1hHP47S3-V41CWg-eim9HOiG-xJg,1467
apischema/deserialization/flattened.py,sha256=ApD-W0rH74dQaEpD0j59jvcYb4QJj9HgoCdFfxvd7B8,1721
apischema/fields.py,sha256=jb5sOnR2upTDIPKEqxpnt4AHfdy0zpsbJ_pu_10t5w8,4434
apischema/graphql/__init__.py,sha256=n5xycpEUJSx_QoIyRstB6x-ecXl-2fCiGENeeprSzlI,498
apischema/graphql/__pycache__/__init__.cpython-39.pyc,,
apischema/graphql/__pycache__/interfaces.cpython-39.pyc,,
apischema/graphql/__pycache__/resolvers.cpython-39.pyc,,
apischema/graphql/__pycache__/schema.cpython-39.pyc,,
apischema/graphql/interfaces.py,sha256=kJPLb96ZtRU1pdevjvgTmFIuwGsrQXXQmSI1vG6YCmU,367
apischema/graphql/relay/__init__.py,sha256=9c1O2xI4yfOyBUOD0NwjwfENB_Gl4wTX_lGVntiNM3s,404
apischema/graphql/relay/__pycache__/__init__.cpython-39.pyc,,
apischema/graphql/relay/__pycache__/connections.cpython-39.pyc,,
apischema/graphql/relay/__pycache__/global_identification.cpython-39.pyc,,
apischema/graphql/relay/__pycache__/mutations.cpython-39.pyc,,
apischema/graphql/relay/__pycache__/utils.cpython-39.pyc,,
apischema/graphql/relay/connections.py,sha256=yftPDvLqbfsQwq2yyumTMhWuKbdKwai0d6dN_Io3BLE,2687
apischema/graphql/relay/global_identification.py,sha256=K6hMG4ryQAKfgMG3m1nQwQmTtJ48b4lmUUF4BgtG_9c,4562
apischema/graphql/relay/mutations.py,sha256=wvBg-3Lubm34GsZwVHULR1eNQ1QXp0P64ouSOTrGrYk,5268
apischema/graphql/relay/utils.py,sha256=zHeb4rcrGjDcqB-0k1LfF50JrO7GKHHzC_V8PQZsGOM,240
apischema/graphql/resolvers.py,sha256=q9qFdYcCWSf03i8f2vHizuXyHDCjPL5Ud_AD892Z6kQ,10632
apischema/graphql/schema.py,sha256=VcU06dyIRk_asJ-9RkoKDuHlTiEoygRacjFiVBq6llc,36811
apischema/json_schema/__init__.py,sha256=85iCyTQitcrSJCt5eCWE47IWIWF_3cFgFY5PAhHAYVU,287
apischema/json_schema/__pycache__/__init__.cpython-39.pyc,,
apischema/json_schema/__pycache__/conversions_resolver.cpython-39.pyc,,
apischema/json_schema/__pycache__/patterns.cpython-39.pyc,,
apischema/json_schema/__pycache__/refs.cpython-39.pyc,,
apischema/json_schema/__pycache__/schema.cpython-39.pyc,,
apischema/json_schema/__pycache__/types.cpython-39.pyc,,
apischema/json_schema/__pycache__/versions.cpython-39.pyc,,
apischema/json_schema/conversions_resolver.py,sha256=e7Yp6lTA48Fm1WzhD2I1ui8VMdMsVeiT9JlseBDESio,4374
apischema/json_schema/patterns.py,sha256=MqjaFdifkkTmKDb41V1OJe604A3CR-mIKIdWsyxiHgg,789
apischema/json_schema/refs.py,sha256=64NNhftG6LExoLWyk4oVOrATLtd8VJqWsYCqd7jPw9A,4657
apischema/json_schema/schema.py,sha256=2k67nXYm74vQgahQZWnYtxLPbRsmThLR46Ql6DKUi_k,25164
apischema/json_schema/types.py,sha256=9QbY8pyLdlCEztf1GruzUWeHaDyawLV5loxWbmo1PnM,3380
apischema/json_schema/versions.py,sha256=yYnjlysqycyMaZsiXxGczukvyDI1OlumqudFdn5BqtY,4012
apischema/metadata/__init__.py,sha256=i6Kxq4bHag4LYqMt1JeRE2bqGL4TTwzgfhbfOuoB1HA,1146
apischema/metadata/__pycache__/__init__.cpython-39.pyc,,
apischema/metadata/__pycache__/implem.cpython-39.pyc,,
apischema/metadata/__pycache__/keys.cpython-39.pyc,,
apischema/metadata/implem.py,sha256=w9PI1N9DZZ-rU7K_Vpu94n6fhXaYEtvJikqoJQ-wgU0,2872
apischema/metadata/keys.py,sha256=N-BfR9OS_ZG-szx76nyre3JrBZQhdeckujnQMae44sI,704
apischema/methods.py,sha256=9DRxvjU4b2pglUivJfKW4LMrlfGxybOyfciQNuq-pMY,4335
apischema/objects/__init__.py,sha256=ZuxNNqyIVgE4-k7pSpG-K_Cckt3gLR7OZliVpcXf_B0,379
apischema/objects/__pycache__/__init__.cpython-39.pyc,,
apischema/objects/__pycache__/conversions.cpython-39.pyc,,
apischema/objects/__pycache__/fields.cpython-39.pyc,,
apischema/objects/__pycache__/getters.cpython-39.pyc,,
apischema/objects/__pycache__/visitor.cpython-39.pyc,,
apischema/objects/conversions.py,sha256=AIzGrE-9WgeLPKzWoZ1wqTqq5vKDmu73cM4DPvXgAas,5892
apischema/objects/fields.py,sha256=Jwxeyngy4jghzvJlKgCiUrxqmDNeLJwjyWRI-kNWHdc,7878
apischema/objects/getters.py,sha256=buHbqEzMO4fjnxsUV4kaJNQxQDR44ftWqpiPbGUvsMY,4153
apischema/objects/visitor.py,sha256=cyEKI4JOoANI91apXDC6Uk-sG7FJbrhD_sKybfkjaZQ,5308
apischema/ordering.py,sha256=YvtfOZ7O1UXc9v4GTrNUMcKHgsIeQuUr2PDV9Rf8T8Q,3845
apischema/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apischema/recursion.py,sha256=MXDbGoBdXXpgvPc2K0zX3_CuJV0nPKzn0LvON9RxvbE,5313
apischema/schemas/__init__.py,sha256=FHbk2aJBC2eTIs8F-i7muN8vYjYQEXotnXDn0Iov2gA,4065
apischema/schemas/__pycache__/__init__.cpython-39.pyc,,
apischema/schemas/__pycache__/annotations.cpython-39.pyc,,
apischema/schemas/__pycache__/constraints.cpython-39.pyc,,
apischema/schemas/annotations.py,sha256=MXKI6qTTVJNYNQdqMbXGdcwo2VaMUdQKAZLdjFi_x6Y,1301
apischema/schemas/constraints.py,sha256=z87DW_X40Xm4lz5vbzE_-uE-UjH9dijYSBjggqz_VKQ,5076
apischema/serialization/__init__.py,sha256=xzSyDV-oOL0IS_LAPeW_2WooW2-rldjuebD8iFFuyis,20905
apischema/serialization/__pycache__/__init__.cpython-39.pyc,,
apischema/serialization/__pycache__/serialized_methods.cpython-39.pyc,,
apischema/serialization/serialized_methods.py,sha256=_Yq9M9IptwIodVG02uXkQ13BUWYquqgNRWDTn9SUAuk,5189
apischema/settings.py,sha256=_jUMcTiTGF8i4HRvyrqs53x68rTCmdOSipdvkVXloLU,3110
apischema/skip.py,sha256=IavIlMoyGnIqcKhfqJ--gqcuQ1V89WCx1E-PgD2PPDg,428
apischema/std_types.py,sha256=oZ7WQnPlbJV1tfbAeUQm1n4c9D9V31GyT7g-ZMLfGyE,2755
apischema/tagged_unions.py,sha256=T1ANMdYsUv8V-c3fb6Y7cxp9g1zmCtVm08BNrjsizPk,5236
apischema/type_names.py,sha256=PNZN9QIYniW1npbx6KHZ58PFz4efQbLL0kWYXsY5W10,3333
apischema/types.py,sha256=tPhy3Tf6ByWdgcZiD_l8VnZnKTVpH_kfsbpZ7Gvvb24,2491
apischema/typing.py,sha256=UMVCgZbTaYc4zF66jZHv8j-GD1z65DyHbOHrY-g9QYg,9735
apischema/utils.py,sha256=MzHdhCcM_gEoeDRxh6noO3OtqaXNdRZRr1fG-W6srqU,12431
apischema/validation/__init__.py,sha256=gAtawUe-29vQi3KwDqDoGrCWZhbipGwvVVu1NDrHp8U,291
apischema/validation/__pycache__/__init__.cpython-39.pyc,,
apischema/validation/__pycache__/dependencies.cpython-39.pyc,,
apischema/validation/__pycache__/errors.cpython-39.pyc,,
apischema/validation/__pycache__/mock.cpython-39.pyc,,
apischema/validation/__pycache__/validators.cpython-39.pyc,,
apischema/validation/dependencies.py,sha256=AIbs-9M_43RjNGGgVUJ_SnDQHa4plCnepQj6aGO07q0,2015
apischema/validation/errors.py,sha256=mPOnZOsnPPwuqv-T5quj5FRkMB6I-ESZwqYGu_Y3kMA,4251
apischema/validation/mock.py,sha256=gi8QBpmnKHcBAfZ_APeqSaGHpDUoinJwkHMdUY8vjtQ,1861
apischema/validation/validators.py,sha256=myA0c2D0FViWWgbVHXdczSnXFZo7ld1GSgfWqKP-klU,6738
apischema/visitor.py,sha256=mV3hfDOlcM1iyjEbMpaAuQvHLat0ZVPT9HXMHF2t9J0,7258

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.0)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1 @@
apischema

View file

@ -0,0 +1,99 @@
__all__ = [
"PassThroughOptions",
"Undefined",
"UndefinedType",
"Unsupported",
"ValidationError",
"alias",
"dependent_required",
"deserialization_method",
"deserialize",
"deserializer",
"identity",
"order",
"properties",
"schema",
"schema_ref",
"serialization_default",
"serialization_method",
"serialize",
"serialized",
"serializer",
"settings",
"type_name",
"validator",
]
import warnings
from . import ( # noqa: F401
cache,
conversions,
dataclasses,
fields,
json_schema,
metadata,
objects,
skip,
tagged_unions,
validation,
)
from .aliases import alias
from .conversions import deserializer, serializer
from .dependencies import dependent_required
from .deserialization import deserialization_method, deserialize
from .metadata import properties
from .ordering import order
from .schemas import schema
from .serialization import (
PassThroughOptions,
serialization_default,
serialization_method,
serialize,
)
from .serialization.serialized_methods import serialized
from .settings import settings
from .type_names import schema_ref, type_name
from .types import Undefined, UndefinedType
from .utils import identity
from .validation import ValidationError, validator
from .visitor import Unsupported
try:
import graphql as _gql
if _gql.__version__.startswith("2."):
warnings.warn(
f"graphql-core version {_gql.__version__} is incompatible with apischema;\n"
"GraphQL schema generation is thus not available."
)
else:
from . import graphql # noqa: F401
__all__.append("graphql")
del _gql
except ImportError:
pass
def __getattr__(name):
if name == "graphql":
raise AttributeError(
"GraphQL feature requires graphql-core library\n"
"Run `pip install apischema[graphql]` to install it"
)
if name == "skip":
warnings.warn("apischema.skip module is deprecated")
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
def register_default_conversions():
"""Handle standard library + internal types"""
from . import std_types # noqa: F401
deserializer(ValidationError.from_errors)
serializer(ValidationError.errors)
register_default_conversions()
del register_default_conversions

View file

@ -0,0 +1,53 @@
from typing import Callable, MutableMapping, TypeVar, overload
from apischema.cache import CacheAwareDict
from apischema.types import Metadata, MetadataImplem
Aliaser = Callable[[str], str]
Cls = TypeVar("Cls", bound=type)
_class_aliasers: MutableMapping[type, Aliaser] = CacheAwareDict({})
get_class_aliaser = _class_aliasers.get
@overload
def alias(alias_: str, *, override: bool = True) -> Metadata:
...
@overload
def alias(override: bool) -> Metadata:
...
@overload
def alias(aliaser: Aliaser) -> Callable[[Cls], Cls]:
...
def alias(arg=None, *, override: bool = True): # type: ignore
"""Field alias or class aliaser
:param alias_: alias of the field
:param override: alias can be overridden by a class aliaser
:param aliaser: compute alias for each (overridable) field of the class decorated
"""
from apischema.metadata.keys import ALIAS_METADATA, ALIAS_NO_OVERRIDE_METADATA
if callable(arg):
def aliaser(cls: Cls) -> Cls:
_class_aliasers[cls] = arg
return cls
return aliaser
else:
metadata = MetadataImplem()
if arg is not None:
metadata[ALIAS_METADATA] = arg
if not override:
metadata[ALIAS_NO_OVERRIDE_METADATA] = True
if not metadata:
raise NotImplementedError
return metadata

View file

@ -0,0 +1,54 @@
__all__ = ["cache", "reset", "set_size"]
import sys
from functools import lru_cache
from typing import Callable, Iterator, MutableMapping, TypeVar, cast
from apischema.utils import type_dict_wrapper
_cached: list = []
Func = TypeVar("Func", bound=Callable)
def cache(func: Func) -> Func:
cached = cast(Func, lru_cache()(func))
_cached.append(cached)
return cached
def reset():
for cached in _cached:
cached.cache_clear()
def set_size(size: int):
for cached in _cached:
wrapped = cached.__wrapped__
setattr(
sys.modules[wrapped.__module__], wrapped.__name__, lru_cache(size)(wrapped)
)
K = TypeVar("K")
V = TypeVar("V")
class CacheAwareDict(MutableMapping[K, V]):
def __init__(self, wrapped: MutableMapping[K, V]):
self.wrapped = type_dict_wrapper(wrapped)
def __getitem__(self, key: K) -> V:
return self.wrapped[key]
def __setitem__(self, key: K, value: V):
self.wrapped[key] = value
reset()
def __delitem__(self, key: K):
del self.wrapped[key]
def __len__(self) -> int:
return len(self.wrapped)
def __iter__(self) -> Iterator[K]:
return iter(self.wrapped)

View file

@ -0,0 +1,43 @@
__all__ = [
"AnyConversion",
"Conversion",
"LazyConversion",
"as_names",
"as_str",
"dataclass_input_wrapper",
"deserializer",
"reset_deserializers",
"reset_serializer",
"serializer",
]
import sys
import warnings
from .conversions import AnyConversion, Conversion, LazyConversion
from .converters import (
as_names,
as_str,
deserializer,
reset_deserializers,
reset_serializer,
serializer,
)
from .wrappers import dataclass_input_wrapper
if sys.version_info >= (3, 7):
def __getattr__(name):
if name == "identity":
from apischema.utils import identity # noqa: F811
warnings.warn(
"apischema.conversions.identity is deprecated, "
"use apischema.identity instead",
DeprecationWarning,
)
return identity
raise AttributeError(f"module {__name__} has no attribute {name}")
else:
from apischema.utils import identity # noqa: F401

View file

@ -0,0 +1,111 @@
from dataclasses import dataclass
from functools import lru_cache
from typing import (
Any,
Callable,
Collection,
Dict,
Generic,
List,
NewType,
Optional,
TYPE_CHECKING,
Tuple,
TypeVar,
Union,
)
from apischema.conversions.utils import Converter, converter_types
from apischema.dataclasses import replace
from apischema.methods import is_method, method_class, method_wrapper
from apischema.types import AnyType
from apischema.typing import is_type_var
from apischema.utils import deprecate_kwargs, identity
if TYPE_CHECKING:
pass
ConvOrProp = TypeVar("ConvOrProp", Converter, property)
@dataclass(frozen=True)
class Conversion(Generic[ConvOrProp]):
converter: ConvOrProp
source: AnyType = None
target: AnyType = None
sub_conversion: Optional["AnyConversion"] = None
inherited: Optional[bool] = None
deprecate_kwargs({"sub_conversions": "sub_conversion"})(Conversion)
@dataclass(frozen=True)
class LazyConversion:
get: Callable[[], Optional["AnyConversion"]]
def __post_init__(self):
object.__setattr__(self, "get", lru_cache(1)(self.get))
@property
def inherited(self) -> Optional[bool]:
conversion = self.get() # type: ignore
return isinstance(conversion, Conversion) and conversion.inherited
ConvOrFunc = Union[Conversion, Converter, property, LazyConversion]
AnyConversion = Union[ConvOrFunc, Tuple[ConvOrFunc, ...]]
DefaultConversion = Callable[[AnyType], Optional[AnyConversion]]
ResolvedConversion = NewType("ResolvedConversion", Conversion[Converter])
ResolvedConversions = Tuple[ResolvedConversion, ...] # Tuple in order to be hashable
def resolve_conversion(
conversion: Union[Converter, property, Conversion], namespace: Dict[str, Any] = None
) -> ResolvedConversion:
if not isinstance(conversion, Conversion):
conversion = Conversion(conversion)
if is_method(conversion.converter):
if conversion.source is None:
conversion = replace(conversion, source=method_class(conversion.converter))
conversion = replace(conversion, converter=method_wrapper(conversion.converter))
assert not isinstance(conversion.converter, property)
source, target = converter_types(
conversion.converter, conversion.source, conversion.target, namespace
)
return ResolvedConversion(replace(conversion, source=source, target=target))
def resolve_any_conversion(conversion: Optional[AnyConversion]) -> ResolvedConversions:
if not conversion:
return ()
result: List[ResolvedConversion] = []
for conv in conversion if isinstance(conversion, Collection) else [conversion]:
if isinstance(conv, LazyConversion):
result.extend(resolve_any_conversion(conv.get())) # type: ignore
else:
result.append(resolve_conversion(conv))
return tuple(result)
def handle_identity_conversion(
conversion: ResolvedConversion, tp: AnyType
) -> ResolvedConversion:
if (
is_identity(conversion)
and conversion.source == conversion.target
and is_type_var(conversion.source)
):
return ResolvedConversion(replace(conversion, source=tp, target=tp))
else:
return conversion
def is_identity(conversion: ResolvedConversion) -> bool:
return (
conversion.converter == identity
and conversion.source == conversion.target
and conversion.sub_conversion is None
)

View file

@ -0,0 +1,208 @@
import sys
from collections import defaultdict
from enum import Enum
from functools import partial
from types import new_class
from typing import (
Callable,
List,
MutableMapping,
Optional,
TYPE_CHECKING,
Type,
TypeVar,
Union,
overload,
)
from apischema.cache import CacheAwareDict
from apischema.conversions import LazyConversion
from apischema.conversions.conversions import (
AnyConversion,
ConvOrFunc,
Conversion,
resolve_conversion,
)
from apischema.conversions.utils import Converter, is_convertible
from apischema.methods import MethodOrProperty, MethodWrapper, is_method, method_class
from apischema.type_names import type_name
from apischema.types import AnyType
from apischema.typing import is_type_var
from apischema.utils import get_args2, get_origin_or_type, stop_signature_abuse
if TYPE_CHECKING:
pass
_deserializers: MutableMapping[AnyType, List[ConvOrFunc]] = CacheAwareDict(
defaultdict(list)
)
_serializers: MutableMapping[AnyType, ConvOrFunc] = CacheAwareDict({})
Deserializer = TypeVar(
"Deserializer", bound=Union[Callable, Conversion, staticmethod, type]
)
Serializer = TypeVar("Serializer", bound=Union[Callable, Conversion, property, type])
default_deserialization: Callable[[type], Optional[AnyConversion]]
# defaultdict.get is not hashable in 3.7
if sys.version_info < (3, 8):
def default_deserialization(tp):
return _deserializers.get(tp)
else:
default_deserialization = _deserializers.get # type: ignore
def default_serialization(tp: Type) -> Optional[AnyConversion]:
for sub_cls in getattr(tp, "__mro__", [tp]):
if sub_cls in _serializers:
conversion = _serializers[sub_cls]
if (
sub_cls == tp
or not isinstance(conversion, (Conversion, LazyConversion))
or conversion.inherited in (None, True)
):
return conversion
else:
return None
def check_converter_type(tp: AnyType) -> AnyType:
origin = get_origin_or_type(tp)
if not is_convertible(tp):
raise TypeError(f"{origin} is not convertible")
if not all(map(is_type_var, get_args2(tp))):
raise TypeError("Generic conversion doesn't support specialization")
return origin
def _add_deserializer(conversion: ConvOrFunc, target: AnyType):
target = check_converter_type(target)
if conversion not in _deserializers[target]:
_deserializers[target].append(conversion)
class DeserializerDescriptor(MethodWrapper[staticmethod]):
def __set_name__(self, owner, name):
super().__set_name__(owner, name)
method = self._method.__get__(None, object)
resolved = resolve_conversion(method, {owner.__name__: owner})
_add_deserializer(method, resolved.target)
@overload
def deserializer(deserializer: Deserializer) -> Deserializer:
...
@overload
def deserializer(
*, lazy: Callable[[], Union[Converter, Conversion]], target: Type
) -> None:
...
def deserializer(
deserializer: Deserializer = None,
*,
lazy: Callable[[], Union[Converter, Conversion]] = None,
target: Type = None,
):
if deserializer is not None:
if isinstance(deserializer, staticmethod):
return DeserializerDescriptor(deserializer)
elif isinstance(deserializer, LazyConversion):
stop_signature_abuse()
else:
resolved = resolve_conversion(deserializer) # type: ignore
_add_deserializer(deserializer, resolved.target) # type: ignore
return deserializer
elif lazy is not None and target is not None:
_add_deserializer(LazyConversion(lazy), target)
else:
stop_signature_abuse()
def _add_serializer(conversion: ConvOrFunc, source: AnyType):
source = check_converter_type(source)
_serializers[source] = conversion
class SerializerDescriptor(MethodWrapper[MethodOrProperty]):
def __set_name__(self, owner, name):
super().__set_name__(owner, name)
_add_serializer(self._method, source=owner)
@overload
def serializer(serializer: Serializer) -> Serializer:
...
@overload
def serializer(
*, lazy: Callable[[], Union[Converter, Conversion]], source: Type
) -> Callable[[Serializer], Serializer]:
...
def serializer(
serializer: Serializer = None,
*,
lazy: Callable[[], Union[Converter, Conversion]] = None,
source: Type = None,
):
if serializer is not None:
if is_method(serializer) and method_class(serializer) is None: # type: ignore
return SerializerDescriptor(serializer) # type: ignore
elif isinstance(serializer, LazyConversion):
stop_signature_abuse()
else:
resolved = resolve_conversion(serializer)
_add_serializer(serializer, resolved.source)
return serializer
elif lazy is not None and source is not None:
_add_serializer(LazyConversion(lazy), source)
else:
stop_signature_abuse()
def reset_deserializers(cls: Type):
_deserializers.pop(cls, ...)
def reset_serializer(cls: Type):
_serializers.pop(cls, ...)
Cls = TypeVar("Cls", bound=type)
def as_str(cls: Cls) -> Cls:
deserializer(Conversion(cls, source=str))
serializer(Conversion(str, source=cls))
return cls
EnumCls = TypeVar("EnumCls", bound=Type[Enum])
def as_names(cls: EnumCls, aliaser: Callable[[str], str] = lambda s: s) -> EnumCls:
# Enum requires to call namespace __setitem__
def exec_body(namespace: dict):
for elt in cls: # type: ignore
namespace[elt.name] = aliaser(elt.name)
if not issubclass(cls, Enum):
raise TypeError("as_names must be called with Enum subclass")
name_cls = type_name(None)(
new_class(cls.__name__, (str, Enum), exec_body=exec_body)
)
deserializer(Conversion(partial(getattr, cls), source=name_cls, target=cls))
def get_name(obj):
return getattr(name_cls, obj.name)
serializer(Conversion(get_name, source=cls, target=name_cls))
return cls

View file

@ -0,0 +1,92 @@
import warnings
from dataclasses import dataclass
from types import new_class
from typing import Callable, Optional, TYPE_CHECKING, Tuple, Type, Union
from apischema.conversions import Conversion
from apischema.conversions.conversions import ResolvedConversion
from apischema.dataclasses import replace
from apischema.utils import PREFIX, identity
if TYPE_CHECKING:
from apischema.deserialization.coercion import Coerce
Model = Union[Type, Callable[[], Type]]
def check_model(origin: Type, model: Type):
if not isinstance(model, type):
raise TypeError("Dataclass model must be a dataclass")
if getattr(origin, "__parameters__", ()) != getattr(model, "__parameters__", ()):
raise TypeError("Dataclass model must have the same generic parameters")
MODEL_ORIGIN_ATTR = f"{PREFIX}model_origin"
DATACLASS_ATTR = "_dataclass"
@dataclass(frozen=True)
class DataclassModel:
origin: Type
model: Model
fields_only: bool
@property
def dataclass(self) -> Type:
if not hasattr(self, "_dataclass"):
origin = self.origin
if isinstance(self.model, type):
assert check_model(origin, self.model) is None
model = self.model
else:
model = self.model()
check_model(origin, model)
namespace = {"__new__": lambda _, *args, **kwargs: origin(*args, **kwargs)}
if not self.fields_only:
namespace[MODEL_ORIGIN_ATTR] = origin
cls = new_class(
model.__name__, (model,), exec_body=lambda ns: ns.update(namespace)
)
object.__setattr__(self, "_dataclass", cls)
return getattr(self, "_dataclass")
def dataclass_model(
origin: Type,
model: Model,
*,
fields_only: bool = False,
additional_properties: Optional[bool] = None,
coercion: Optional["Coerce"] = None,
fall_back_on_default: Optional[bool] = None,
exclude_unset: Optional[bool] = None,
) -> Tuple[Conversion, Conversion]:
warnings.warn(
"dataclass_model is deprecated, use set_object_fields instead",
DeprecationWarning,
)
if isinstance(model, type):
check_model(origin, model)
model_type = DataclassModel(origin, model, fields_only)
return Conversion(identity, source=model_type, target=origin), Conversion(
identity, source=origin, target=model_type
)
def has_model_origin(cls: Type) -> bool:
return hasattr(cls, MODEL_ORIGIN_ATTR)
def get_model_origin(cls: Type) -> Type:
return getattr(cls, MODEL_ORIGIN_ATTR)
def handle_dataclass_model(conversion: ResolvedConversion) -> ResolvedConversion:
conv: Conversion = conversion
if isinstance(conv.source, DataclassModel):
conv = replace(conv, source=conv.source.dataclass)
if isinstance(conv.target, DataclassModel):
conv = replace(conv, target=conv.target.dataclass)
return ResolvedConversion(conv)

View file

@ -0,0 +1,80 @@
from inspect import Parameter, signature
from typing import Any, Callable, Dict, Generic, Optional, Tuple, Type, cast
from apischema.types import AnyType
from apischema.typing import (
get_type_hints,
is_annotated,
is_literal,
is_new_type,
is_type,
is_union,
)
from apischema.utils import get_origin_or_type
Converter = Callable[[Any], Any]
def converter_types(
converter: Converter,
source: Optional[AnyType] = None,
target: Optional[AnyType] = None,
namespace: Dict[str, Any] = None,
) -> Tuple[AnyType, AnyType]:
try:
# in pre 3.9, Generic __new__ perturb signature of types
if (
isinstance(converter, type)
and converter.__new__ is Generic.__new__ is not object.__new__
and converter.__init__ is not object.__init__ # type: ignore
):
parameters = list(signature(converter.__init__).parameters.values())[1:] # type: ignore
else:
parameters = list(signature(converter).parameters.values())
except ValueError: # builtin types
if target is None and is_type(converter):
target = cast(Type[Any], converter)
if source is None:
raise TypeError("Converter source is unknown") from None
else:
if not parameters:
raise TypeError("converter must have at least one parameter")
first_param, *other_params = parameters
for p in other_params:
if p.default is Parameter.empty and p.kind not in (
Parameter.VAR_POSITIONAL,
Parameter.VAR_KEYWORD,
):
raise TypeError(
"converter must have at most one parameter without default"
)
if source is not None and target is not None:
return source, target
types = get_type_hints(converter, None, namespace, include_extras=True)
if not types and is_type(converter):
types = get_type_hints(
converter.__new__, None, namespace, include_extras=True
) or get_type_hints(
converter.__init__, None, namespace, include_extras=True # type: ignore
)
if source is None:
try:
source = types.pop(first_param.name)
except KeyError:
raise TypeError("converter source is unknown") from None
if target is None:
if is_type(converter):
target = cast(Type, converter)
else:
try:
target = types.pop("return")
except KeyError:
raise TypeError("converter target is unknown") from None
return source, target
def is_convertible(tp: AnyType) -> bool:
origin = get_origin_or_type(tp)
return is_new_type(tp) or (
is_type(origin) and not (is_literal(tp) or is_annotated(tp) or is_union(origin))
)

View file

@ -0,0 +1,245 @@
from contextlib import contextmanager, suppress
from dataclasses import replace
from functools import lru_cache
from types import new_class
from typing import (
Any,
Collection,
Generic,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from apischema.conversions import LazyConversion
from apischema.conversions.conversions import (
AnyConversion,
DefaultConversion,
ResolvedConversion,
ResolvedConversions,
handle_identity_conversion,
is_identity,
resolve_any_conversion,
)
from apischema.conversions.utils import is_convertible
from apischema.metadata.implem import ConversionMetadata
from apischema.metadata.keys import CONVERSION_METADATA
from apischema.type_names import type_name
from apischema.types import AnyType
from apischema.typing import get_args, is_type_var
from apischema.utils import (
context_setter,
get_args2,
get_origin_or_type,
has_type_vars,
is_subclass,
substitute_type_vars,
subtyping_substitution,
)
from apischema.visitor import Result, Unsupported, Visitor
Deserialization = ResolvedConversions
Serialization = ResolvedConversion
Conv = TypeVar("Conv")
class ConversionsVisitor(Visitor[Result], Generic[Conv, Result]):
def __init__(self, default_conversion: DefaultConversion):
self.default_conversion = default_conversion
self._conversion: Optional[AnyConversion] = None
def _has_conversion(
self, tp: AnyType, conversion: Optional[AnyConversion]
) -> Tuple[bool, Optional[Conv]]:
raise NotImplementedError
def _annotated_conversion(
self, annotation: ConversionMetadata
) -> Optional[AnyConversion]:
raise NotImplementedError
def annotated(self, tp: AnyType, annotations: Sequence[Any]) -> Result:
for annotation in reversed(annotations):
if isinstance(annotation, Mapping) and CONVERSION_METADATA in annotation:
with self._replace_conversion(
self._annotated_conversion(annotation[CONVERSION_METADATA])
):
return super().annotated(tp, annotations)
return super().annotated(tp, annotations)
def _union_results(self, alternatives: Iterable[AnyType]) -> Sequence[Result]:
results = []
for alt in alternatives:
with suppress(Unsupported):
results.append(self.visit(alt))
if not results:
raise Unsupported(Union[tuple(alternatives)])
return results
def _visited_union(self, results: Sequence[Result]) -> Result:
raise NotImplementedError
def union(self, alternatives: Sequence[AnyType]) -> Result:
return self._visited_union(self._union_results(alternatives))
@contextmanager
def _replace_conversion(self, conversion: Optional[AnyConversion]):
with context_setter(self):
self._conversion = resolve_any_conversion(conversion) or None
yield
def visit_with_conv(
self, tp: AnyType, conversion: Optional[AnyConversion]
) -> Result:
with self._replace_conversion(conversion):
return self.visit(tp)
def _visit_conversion(
self,
tp: AnyType,
conversion: Conv,
dynamic: bool,
next_conversion: Optional[AnyConversion],
) -> Result:
raise NotImplementedError
def visit_conversion(
self,
tp: AnyType,
conversion: Optional[Conv],
dynamic: bool,
next_conversion: Optional[AnyConversion] = None,
) -> Result:
if conversion is not None:
return self._visit_conversion(tp, conversion, dynamic, next_conversion)
else:
with self._replace_conversion(next_conversion):
return super().visit(tp)
def visit(self, tp: AnyType) -> Result:
if not is_convertible(tp):
return self.visit_conversion(tp, None, False, self._conversion)
dynamic, conversion = self._has_conversion(tp, self._conversion)
if not dynamic:
_, conversion = self._has_conversion(
tp, self.default_conversion(get_origin_or_type(tp)) # type: ignore
)
next_conversion = None
if not dynamic and is_subclass(tp, Collection):
next_conversion = self._conversion
return self.visit_conversion(tp, conversion, dynamic, next_conversion)
def sub_conversion(
conversion: ResolvedConversion, next_conversion: Optional[AnyConversion]
) -> Optional[AnyConversion]:
return (
LazyConversion(lambda: conversion.sub_conversion),
LazyConversion(lambda: next_conversion),
)
@lru_cache(maxsize=0)
def self_deserialization_wrapper(cls: Type) -> Type:
wrapper = new_class(
f"{cls.__name__}SelfDeserializer",
(cls[cls.__parameters__] if has_type_vars(cls) else cls,),
exec_body=lambda ns: ns.update(
{"__new__": lambda _, *args, **kwargs: cls(*args, **kwargs)}
),
)
return type_name(None)(wrapper)
class DeserializationVisitor(ConversionsVisitor[Deserialization, Result]):
@staticmethod
def _has_conversion(
tp: AnyType, conversion: Optional[AnyConversion]
) -> Tuple[bool, Optional[Deserialization]]:
identity_conv, result = False, []
for conv in resolve_any_conversion(conversion):
conv = handle_identity_conversion(conv, tp)
if is_subclass(conv.target, tp):
if is_identity(conv):
if identity_conv:
continue
identity_conv = True
wrapper: AnyType = self_deserialization_wrapper(
get_origin_or_type(tp)
)
if get_args(tp):
wrapper = wrapper[get_args(tp)]
conv = ResolvedConversion(replace(conv, source=wrapper))
if is_type_var(conv.source) or any(
map(is_type_var, get_args2(conv.source))
):
_, substitution = subtyping_substitution(tp, conv.target)
conv = replace(
conv, source=substitute_type_vars(conv.source, substitution)
)
result.append(ResolvedConversion(replace(conv, target=tp)))
if identity_conv and len(result) == 1:
return True, None
else:
return bool(result), tuple(result) or None
def _annotated_conversion(
self, annotation: ConversionMetadata
) -> Optional[AnyConversion]:
return annotation.deserialization
def _visit_conversion(
self,
tp: AnyType,
conversion: Deserialization,
dynamic: bool,
next_conversion: Optional[AnyConversion],
) -> Result:
results = [
self.visit_with_conv(conv.source, sub_conversion(conv, next_conversion))
for conv in conversion
]
return self._visited_union(results)
class SerializationVisitor(ConversionsVisitor[Serialization, Result]):
@staticmethod
def _has_conversion(
tp: AnyType, conversion: Optional[AnyConversion]
) -> Tuple[bool, Optional[Serialization]]:
for conv in resolve_any_conversion(conversion):
conv = handle_identity_conversion(conv, tp)
if is_subclass(tp, conv.source):
if is_identity(conv):
return True, None
if is_type_var(conv.target) or any(
map(is_type_var, get_args2(conv.target))
):
substitution, _ = subtyping_substitution(conv.source, tp)
conv = replace(
conv, target=substitute_type_vars(conv.target, substitution)
)
return True, ResolvedConversion(replace(conv, source=tp))
else:
return False, None
def _annotated_conversion(
self, annotation: ConversionMetadata
) -> Optional[AnyConversion]:
return annotation.serialization
def _visit_conversion(
self,
tp: AnyType,
conversion: Serialization,
dynamic: bool,
next_conversion: Optional[AnyConversion],
) -> Result:
return self.visit_with_conv(
conversion.target, sub_conversion(conversion, next_conversion)
)

View file

@ -0,0 +1,55 @@
import warnings
from dataclasses import MISSING, field as field_, make_dataclass
from inspect import Parameter, iscoroutinefunction, signature
from typing import Any, Callable, Mapping, Tuple, Type
from apischema.metadata import properties
from apischema.typing import get_type_hints
from apischema.utils import to_camel_case
def dataclass_input_wrapper(
func: Callable, parameters_metadata: Mapping[str, Mapping] = None
) -> Tuple[Callable, Type]:
warnings.warn(
"dataclass_input_wrapper is deprecated, use object_deserialization instead",
DeprecationWarning,
)
parameters_metadata = parameters_metadata or {}
types = get_type_hints(func, include_extras=True)
fields = []
params, kwargs_param = [], None
for param_name, param in signature(func).parameters.items():
if param.kind is Parameter.POSITIONAL_ONLY:
raise TypeError("Positional only parameters are not supported")
field_type = types.get(param_name, Any)
if param.kind in {Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY}:
default = MISSING if param.default is Parameter.empty else param.default
field = field_(
default=default, metadata=parameters_metadata.get(param_name)
)
fields.append((param_name, field_type, field))
params.append(param_name)
if param.kind == Parameter.VAR_KEYWORD:
field = field_(default_factory=dict, metadata=properties)
fields.append((param_name, Mapping[str, field_type], field)) # type: ignore
kwargs_param = param_name
input_cls = make_dataclass(to_camel_case(func.__name__), fields)
def wrapper(input):
kwargs = {name: getattr(input, name) for name in params}
if kwargs_param:
kwargs.update(getattr(input, kwargs_param))
return func(**kwargs)
if iscoroutinefunction(func):
wrapped = wrapper
async def wrapper(input):
return await wrapped(input)
wrapper.__annotations__["input"] = input_cls
if "return" in func.__annotations__:
wrapper.__annotations__["return"] = func.__annotations__["return"]
return wrapper, input_cls

View file

@ -0,0 +1,26 @@
# flake8: noqa
from dataclasses import *
def _replace(__obj, **changes):
from apischema.fields import FIELDS_SET_ATTR, fields_set, set_fields
from dataclasses import replace as replace_, _FIELDS, _FIELD_INITVAR # type: ignore
# Fix https://bugs.python.org/issue36470
assert is_dataclass(__obj)
for name, field in getattr(__obj, _FIELDS).items():
if field._field_type == _FIELD_INITVAR and name not in changes: # type: ignore
if field.default is not MISSING:
changes[name] = field.default
elif field.default_factory is not MISSING:
changes[name] = field.default_factory()
result = replace_(__obj, **changes)
if hasattr(__obj, FIELDS_SET_ATTR):
set_fields(result, *fields_set(__obj), *changes, overwrite=True)
return result
globals()[replace.__name__] = _replace
del _replace

View file

@ -0,0 +1,75 @@
from collections import defaultdict
from dataclasses import dataclass
from typing import (
AbstractSet,
Any,
Collection,
Dict,
List,
Mapping,
MutableMapping,
Set,
Tuple,
overload,
)
from apischema.cache import CacheAwareDict
from apischema.objects.fields import check_field_or_name, get_field_name
_dependent_requireds: MutableMapping[
type, List[Tuple[Any, Collection[Any]]]
] = CacheAwareDict(defaultdict(list))
DependentRequired = Mapping[str, AbstractSet[str]]
def get_dependent_required(cls: type) -> DependentRequired:
result: Dict[str, Set[str]] = defaultdict(set)
for sub_cls in cls.__mro__:
for field, required in _dependent_requireds[sub_cls]:
result[get_field_name(field)].update(map(get_field_name, required))
return result
@dataclass
class DependentRequiredDescriptor:
fields: Mapping[Any, Collection[Any]]
groups: Collection[Collection[Any]]
def __set_name__(self, owner, name):
setattr(owner, name, None)
dependent_required(self.fields, *self.groups, owner=owner)
@overload
def dependent_required(
fields: Mapping[Any, Collection[Any]], *groups: Collection[Any], owner: type = None
):
...
@overload
def dependent_required(*groups: Collection[Any], owner: type = None):
...
def dependent_required(*groups: Collection[Any], owner: type = None): # type: ignore
if not groups:
return
fields: Mapping[Any, Collection[Any]] = {}
if isinstance(groups[0], Mapping):
fields, *groups = groups # type: ignore
if owner is None:
return DependentRequiredDescriptor(fields, groups)
else:
dep_req = _dependent_requireds[owner]
for field, required in fields.items():
dep_req.append((field, required))
check_field_or_name(field)
for req in required:
check_field_or_name(req)
for group in map(list, groups):
for i, field in enumerate(group):
check_field_or_name(field)
dep_req.append((field, [group[:i], group[i:]]))

View file

@ -0,0 +1,12 @@
import warnings
from apischema.dependencies import dependent_required
def DependentRequired(*args, **kwargs):
warnings.warn(
"apischema.dependent_required.DependentRequired is deprecated,"
" use apischema.dependent_required instead",
DeprecationWarning,
)
return dependent_required(*args, **kwargs)

View file

@ -0,0 +1,924 @@
from collections import defaultdict
from dataclasses import dataclass, replace
from enum import Enum
from functools import lru_cache
from typing import (
AbstractSet,
Any,
Callable,
Collection,
Dict,
List,
Mapping,
Optional,
Pattern,
Sequence,
Set,
Tuple,
Type,
TypeVar,
overload,
)
from apischema.aliases import Aliaser
from apischema.cache import cache
from apischema.conversions.conversions import AnyConversion, DefaultConversion
from apischema.conversions.visitor import (
Deserialization,
DeserializationVisitor,
sub_conversion,
)
from apischema.dependencies import get_dependent_required
from apischema.deserialization.coercion import Coerce, Coercer
from apischema.deserialization.flattened import get_deserialization_flattened_aliases
from apischema.json_schema.patterns import infer_pattern
from apischema.json_schema.types import bad_type
from apischema.metadata.implem import ValidatorsMetadata
from apischema.metadata.keys import SCHEMA_METADATA, VALIDATORS_METADATA
from apischema.objects import ObjectField
from apischema.objects.fields import FieldKind
from apischema.objects.visitor import DeserializationObjectVisitor
from apischema.recursion import RecursiveConversionsVisitor
from apischema.schemas import Schema, get_schema
from apischema.schemas.constraints import Check, Constraints, merge_constraints
from apischema.types import AnyType, NoneType
from apischema.typing import get_args, get_origin
from apischema.utils import (
Lazy,
PREFIX,
deprecate_kwargs,
get_origin_or_type,
literal_values,
opt_or,
)
from apischema.validation import get_validators
from apischema.validation.errors import ErrorKey, ValidationError, merge_errors
from apischema.validation.mock import ValidatorMock
from apischema.validation.validators import Validator, validate
from apischema.visitor import Unsupported
MISSING_PROPERTY = "missing property"
UNEXPECTED_PROPERTY = "unexpected property"
NOT_NONE = object()
INIT_VARS_ATTR = f"{PREFIX}_init_vars"
T = TypeVar("T")
DeserializationMethod = Callable[[Any], T]
Factory = Callable[[Optional[Constraints], Sequence[Validator]], DeserializationMethod]
@dataclass(frozen=True)
class DeserializationMethodFactory:
factory: Factory
cls: Optional[type] = None
constraints: Optional[Constraints] = None
validators: Tuple[Validator, ...] = ()
def merge(
self, constraints: Optional[Constraints], validators: Sequence[Validator] = ()
) -> "DeserializationMethodFactory":
if constraints is None and not validators:
return self
return replace(
self,
constraints=merge_constraints(self.constraints, constraints),
validators=(*validators, *self.validators),
)
@property # type: ignore
@lru_cache()
def method(self) -> DeserializationMethod:
return self.factory(self.constraints, self.validators) # type: ignore
def get_constraints(schema: Optional[Schema]) -> Optional[Constraints]:
return schema.constraints if schema is not None else None
def get_constraint_checks(
constraints: Optional[Constraints], cls: type
) -> Collection[Tuple[Check, Any, str]]:
return () if constraints is None else constraints.checks_by_type[cls]
class DeserializationMethodVisitor(
RecursiveConversionsVisitor[Deserialization, DeserializationMethodFactory],
DeserializationVisitor[DeserializationMethodFactory],
DeserializationObjectVisitor[DeserializationMethodFactory],
):
def __init__(
self,
additional_properties: bool,
aliaser: Aliaser,
coercer: Optional[Coercer],
default_conversion: DefaultConversion,
fall_back_on_default: bool,
):
super().__init__(default_conversion)
self.additional_properties = additional_properties
self.aliaser = aliaser
self.coercer = coercer
self.fall_back_on_default = fall_back_on_default
def _recursive_result(
self, lazy: Lazy[DeserializationMethodFactory]
) -> DeserializationMethodFactory:
def factory(
constraints: Optional[Constraints], validators: Sequence[Validator]
) -> DeserializationMethod:
rec_method = None
def method(data: Any) -> Any:
nonlocal rec_method
if rec_method is None:
rec_method = lazy().merge(constraints, validators).method
return rec_method(data)
return method
return DeserializationMethodFactory(factory)
def visit_not_recursive(self, tp: AnyType) -> DeserializationMethodFactory:
return deserialization_method_factory(
tp,
self.additional_properties,
self.aliaser,
self.coercer,
self._conversion,
self.default_conversion,
self.fall_back_on_default,
)
def annotated(
self, tp: AnyType, annotations: Sequence[Any]
) -> DeserializationMethodFactory:
factory = super().annotated(tp, annotations)
for annotation in reversed(annotations):
if isinstance(annotation, Mapping):
factory = factory.merge(
get_constraints(annotation.get(SCHEMA_METADATA)),
annotation.get(
VALIDATORS_METADATA, ValidatorsMetadata(())
).validators,
)
return factory
def _factory(
self, factory: Factory, cls: Optional[type] = None, validation: bool = True
) -> DeserializationMethodFactory:
def wrapper(
constraints: Optional[Constraints], validators: Sequence[Validator]
) -> DeserializationMethod:
method: DeserializationMethod
if validation and validators:
wrapped, aliaser = factory(constraints, ()), self.aliaser
def method(data: Any) -> Any:
result = wrapped(data)
validate(result, validators, aliaser=aliaser)
return result
else:
method = factory(constraints, validators)
if self.coercer is not None and cls is not None:
coercer = self.coercer
def wrapper(data: Any) -> Any:
assert cls is not None
return method(coercer(cls, data))
return wrapper
else:
return method
return DeserializationMethodFactory(wrapper, cls)
def any(self) -> DeserializationMethodFactory:
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
checks = None if constraints is None else constraints.checks_by_type
def method(data: Any) -> Any:
if checks is not None:
if data.__class__ in checks:
errors = [
err
for check, attr, err in checks[data.__class__]
if check(data, attr)
]
if errors:
raise ValidationError(errors)
return data
return method
return self._factory(factory)
def collection(
self, cls: Type[Collection], value_type: AnyType
) -> DeserializationMethodFactory:
value_factory = self.visit(value_type)
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
deserialize_value = value_factory.method
checks = get_constraint_checks(constraints, list)
constructor: Optional[Callable[[list], Collection]] = None
if issubclass(cls, AbstractSet):
constructor = set
elif issubclass(cls, tuple):
constructor = tuple
def method(data: Any) -> Any:
if not isinstance(data, list):
raise bad_type(data, list)
elt_errors: Dict[ErrorKey, ValidationError] = {}
values: list = [None] * len(data)
index = 0 # don't use `enumerate` for performance
for elt in data:
try:
values[index] = deserialize_value(elt)
except ValidationError as err:
elt_errors[index] = err
index += 1
if checks:
errors = [err for check, attr, err in checks if check(data, attr)]
if errors or elt_errors:
raise ValidationError(errors, elt_errors)
elif elt_errors:
raise ValidationError([], elt_errors)
return constructor(values) if constructor else values
return method
return self._factory(factory, list)
def enum(self, cls: Type[Enum]) -> DeserializationMethodFactory:
return self.literal(list(cls))
def literal(self, values: Sequence[Any]) -> DeserializationMethodFactory:
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
value_map = dict(zip(literal_values(values), values))
types = list(set(map(type, value_map))) if self.coercer else []
error = f"not one of {list(value_map)}"
coercer = self.coercer
def method(data: Any) -> Any:
try:
return value_map[data]
except KeyError:
if coercer:
for cls in types:
try:
return value_map[coercer(cls, data)]
except IndexError:
pass
raise ValidationError([error])
# Unions with Literal can have not hashable data
except TypeError:
raise bad_type(data, *types)
return method
return self._factory(factory)
def mapping(
self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType
) -> DeserializationMethodFactory:
key_factory, value_factory = self.visit(key_type), self.visit(value_type)
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
deserialize_key = key_factory.method
deserialize_value = value_factory.method
checks = get_constraint_checks(constraints, dict)
def method(data: Any) -> Any:
if not isinstance(data, dict):
raise bad_type(data, dict)
item_errors: Dict[ErrorKey, ValidationError] = {}
items = {}
for key, value in data.items():
assert isinstance(key, str)
try:
items[deserialize_key(key)] = deserialize_value(value)
except ValidationError as err:
item_errors[key] = err
if checks:
errors = [err for check, attr, err in checks if check(data, attr)]
if errors or item_errors:
raise ValidationError(errors, item_errors)
elif item_errors:
raise ValidationError([], item_errors)
return items
return method
return self._factory(factory, dict)
def object(
self, tp: Type, fields: Sequence[ObjectField]
) -> DeserializationMethodFactory:
field_factories = [
self.visit_with_conv(f.type, f.deserialization).merge(
get_constraints(f.schema), f.validators
)
for f in fields
]
def factory(
constraints: Optional[Constraints], validators: Sequence[Validator]
) -> DeserializationMethod:
cls = get_origin_or_type(tp)
alias_by_name = {field.name: self.aliaser(field.alias) for field in fields}
requiring: Dict[str, Set[str]] = defaultdict(set)
for f, reqs in get_dependent_required(cls).items():
for req in reqs:
requiring[req].add(alias_by_name[f])
normal_fields, flattened_fields, pattern_fields = [], [], []
additional_field = None
for field, field_factory in zip(fields, field_factories):
deserialize_field: DeserializationMethod = field_factory.method
fall_back_on_default = (
field.fall_back_on_default or self.fall_back_on_default
)
if field.flattened:
flattened_aliases = get_deserialization_flattened_aliases(
cls, field, self.default_conversion
)
flattened_fields.append(
(
field.name,
set(map(self.aliaser, flattened_aliases)),
deserialize_field,
fall_back_on_default,
)
)
elif field.pattern_properties is not None:
field_pattern = field.pattern_properties
if field_pattern is ...:
field_pattern = infer_pattern(
field.type, self.default_conversion
)
assert isinstance(field_pattern, Pattern)
pattern_fields.append(
(
field.name,
field_pattern,
deserialize_field,
fall_back_on_default,
)
)
elif field.additional_properties:
additional_field = (
field.name,
deserialize_field,
fall_back_on_default,
)
else:
normal_fields.append(
(
field.name,
self.aliaser(field.alias),
deserialize_field,
field.required,
requiring[field.name],
fall_back_on_default,
)
)
has_aggregate_field = (
flattened_fields or pattern_fields or (additional_field is not None)
)
post_init_modified = {field.name for field in fields if field.post_init}
checks = get_constraint_checks(constraints, dict)
aliaser = self.aliaser
additional_properties = self.additional_properties
all_aliases = set(alias_by_name.values())
init_defaults = [
(f.name, f.default_factory)
for f in fields
if f.kind == FieldKind.WRITE_ONLY
]
def method(data: Any) -> Any:
if not isinstance(data, dict):
raise bad_type(data, dict)
values: Dict[str, Any] = {}
fields_count = 0
errors = (
[err for check, attr, err in checks if check(data, attr)]
if checks
else []
)
field_errors: Dict[ErrorKey, ValidationError] = {}
for (
name,
alias,
deserialize_field,
required,
required_by,
fall_back_on_default,
) in normal_fields:
if required:
try:
value = data[alias]
except KeyError:
field_errors[alias] = ValidationError([MISSING_PROPERTY])
else:
fields_count += 1
try:
values[name] = deserialize_field(value)
except ValidationError as err:
field_errors[alias] = err
elif alias in data:
fields_count += 1
try:
values[name] = deserialize_field(data[alias])
except ValidationError as err:
if not fall_back_on_default:
field_errors[alias] = err
elif required_by and not required_by.isdisjoint(data):
requiring = sorted(required_by & data.keys())
msg = f"missing property (required by {requiring})"
field_errors[alias] = ValidationError([msg])
if has_aggregate_field:
remain = data.keys() - all_aliases
for (
name,
flattened_alias,
deserialize_field,
fall_back_on_default,
) in flattened_fields:
flattened = {
alias: data[alias]
for alias in flattened_alias
if alias in data
}
remain.difference_update(flattened)
try:
values[name] = deserialize_field(flattened)
except ValidationError as err:
if not fall_back_on_default:
errors.extend(err.messages)
field_errors.update(err.children)
for (
name,
pattern,
deserialize_field,
fall_back_on_default,
) in pattern_fields:
matched = {
key: data[key] for key in remain if pattern.match(key)
}
remain.difference_update(matched)
try:
values[name] = deserialize_field(matched)
except ValidationError as err:
if not fall_back_on_default:
errors.extend(err.messages)
field_errors.update(err.children)
if additional_field:
name, deserialize_field, fall_back_on_default = additional_field
additional = {key: data[key] for key in remain}
try:
values[name] = deserialize_field(additional)
except ValidationError as err:
if not fall_back_on_default:
errors.extend(err.messages)
field_errors.update(err.children)
elif remain and not additional_properties:
for key in remain:
field_errors[key] = ValidationError([UNEXPECTED_PROPERTY])
elif not additional_properties and len(data) != fields_count:
for key in data.keys() - all_aliases:
field_errors[key] = ValidationError([UNEXPECTED_PROPERTY])
validators2: Sequence[Validator]
if validators:
init: Dict[str, Any] = {}
for name, default_factory in init_defaults:
if name in values:
init[name] = values[name]
elif name not in field_errors:
assert default_factory is not None
init[name] = default_factory()
# Don't keep validators when all dependencies are default
validators2 = [
v
for v in validators
if not v.dependencies.isdisjoint(values.keys())
]
if field_errors or errors:
error = ValidationError(errors, field_errors)
invalid_fields = field_errors.keys() | post_init_modified
try:
validate(
ValidatorMock(cls, values),
[
v
for v in validators2
if v.dependencies.isdisjoint(invalid_fields)
],
init,
aliaser=aliaser,
)
except ValidationError as err:
error = merge_errors(error, err)
raise error
elif field_errors or errors:
raise ValidationError(errors, field_errors)
else:
validators2, init = (), ... # type: ignore # only for linter
try:
res = cls(**values)
except (AssertionError, ValidationError):
raise
except TypeError as err:
if str(err).startswith("__init__() got"):
raise Unsupported(cls)
else:
raise ValidationError([str(err)])
except Exception as err:
raise ValidationError([str(err)])
if validators:
validate(res, validators2, init, aliaser=aliaser)
return res
return method
return self._factory(factory, dict, validation=False)
def primitive(self, cls: Type) -> DeserializationMethodFactory:
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
checks = get_constraint_checks(constraints, cls)
if cls is NoneType:
def method(data: Any) -> Any:
if data is not None:
raise bad_type(data, cls)
return data
elif cls is not float and not checks:
def method(data: Any) -> Any:
if not isinstance(data, cls):
raise bad_type(data, cls)
return data
elif cls is not float and len(checks) == 1:
((check, attr, err),) = checks
def method(data: Any) -> Any:
if not isinstance(data, cls):
raise bad_type(data, cls)
elif check(data, attr):
raise ValidationError([err])
return data
else:
is_float = cls is float
def method(data: Any) -> Any:
if not isinstance(data, cls):
if is_float and isinstance(data, int):
data = float(data)
else:
raise bad_type(data, cls)
if checks:
errors = [
err for check, attr, err in checks if check(data, attr)
]
if errors:
raise ValidationError(errors)
return data
return method
return self._factory(factory, cls)
def subprimitive(self, cls: Type, superclass: Type) -> DeserializationMethodFactory:
primitive_factory = self.primitive(superclass)
def factory(
constraints: Optional[Constraints], validators: Sequence[Validator]
) -> DeserializationMethod:
deserialize_primitive = primitive_factory.merge(
constraints, validators
).method
def method(data: Any) -> Any:
return superclass(deserialize_primitive(data))
return method
return replace(primitive_factory, factory=factory)
def tuple(self, types: Sequence[AnyType]) -> DeserializationMethodFactory:
elt_factories = [self.visit(tp) for tp in types]
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
expected_len = len(types)
(_, _, min_err), (_, _, max_err) = Constraints(
min_items=len(types), max_items=len(types)
).checks_by_type[list]
elt_methods = list(enumerate(fact.method for fact in elt_factories))
checks = get_constraint_checks(constraints, list)
def method(data: Any) -> Any:
if not isinstance(data, list):
raise bad_type(data, list)
if len(data) != expected_len:
raise ValidationError([min_err, max_err])
elt_errors: Dict[ErrorKey, ValidationError] = {}
elts: List[Any] = [None] * expected_len
for i, deserialize_elt in elt_methods:
try:
elts[i] = deserialize_elt(data[i])
except ValidationError as err:
elt_errors[i] = err
if checks:
errors = [err for check, attr, err in checks if check(data, attr)]
if errors or elt_errors:
raise ValidationError(errors, elt_errors)
elif elt_errors:
raise ValidationError([], elt_errors)
return tuple(elts)
return method
return self._factory(factory, list)
def union(self, alternatives: Sequence[AnyType]) -> DeserializationMethodFactory:
alt_factories = self._union_results(alternatives)
if len(alt_factories) == 1:
return alt_factories[0]
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
alt_methods = [fact.merge(constraints).method for fact in alt_factories]
# method_by_cls cannot replace alt_methods, because there could be several
# methods for one class
method_by_cls = dict(zip((f.cls for f in alt_factories), alt_methods))
if NoneType in alternatives and len(alt_methods) == 2:
deserialize_alt = next(
meth
for fact, meth in zip(alt_factories, alt_methods)
if fact.cls is not NoneType
)
coercer = self.coercer
def method(data: Any) -> Any:
if data is None:
return None
try:
return deserialize_alt(data)
except ValidationError as err:
if coercer and coercer(NoneType, data) is None:
return None
else:
raise merge_errors(err, bad_type(data, NoneType))
elif None not in method_by_cls and len(method_by_cls) == len(alt_factories):
classes = tuple(cls for cls in method_by_cls if cls is not None)
def method(data: Any) -> Any:
try:
return method_by_cls[data.__class__](data)
except KeyError:
raise bad_type(data, *classes) from None
except ValidationError as err:
other_classes = (
cls for cls in classes if cls is not data.__class__
)
raise merge_errors(err, bad_type(data, *other_classes))
else:
def method(data: Any) -> Any:
error = None
for deserialize_alt in alt_methods:
try:
return deserialize_alt(data)
except ValidationError as err:
error = merge_errors(error, err)
assert error is not None
raise error
return method
return self._factory(factory)
def _visit_conversion(
self,
tp: AnyType,
conversion: Deserialization,
dynamic: bool,
next_conversion: Optional[AnyConversion],
) -> DeserializationMethodFactory:
assert conversion
conv_factories = [
self.visit_with_conv(conv.source, sub_conversion(conv, next_conversion))
for conv in conversion
]
def factory(constraints: Optional[Constraints], _) -> DeserializationMethod:
conv_methods = [
((fact if dynamic else fact.merge(constraints)).method, conv.converter)
for conv, fact in zip(conversion, conv_factories)
]
method: DeserializationMethod
if len(conv_methods) == 1:
deserialize_alt, converter = conv_methods[0]
def method(data: Any) -> Any:
try:
return converter(deserialize_alt(data))
except (ValidationError, AssertionError):
raise
except Exception as err:
raise ValidationError([str(err)])
else:
def method(data: Any) -> Any:
error: Optional[ValidationError] = None
for deserialize_alt, converter in conv_methods:
try:
value = deserialize_alt(data)
except ValidationError as err:
error = merge_errors(error, err)
else:
try:
return converter(value)
except (ValidationError, AssertionError):
raise
except Exception as err:
raise ValidationError([str(err)])
assert error is not None
raise error
return method
return self._factory(factory, validation=not dynamic)
def visit_conversion(
self,
tp: AnyType,
conversion: Optional[Deserialization],
dynamic: bool,
next_conversion: Optional[AnyConversion] = None,
) -> DeserializationMethodFactory:
factory = super().visit_conversion(tp, conversion, dynamic, next_conversion)
if not dynamic:
factory = factory.merge(get_constraints(get_schema(tp)), get_validators(tp))
if get_args(tp):
factory = factory.merge(
get_constraints(get_schema(get_origin(tp))),
get_validators(get_origin(tp)),
)
return factory
@cache
def deserialization_method_factory(
tp: AnyType,
additional_properties: bool,
aliaser: Aliaser,
coercer: Optional[Coercer],
conversion: Optional[AnyConversion],
default_conversion: DefaultConversion,
fall_back_on_default: bool,
) -> DeserializationMethodFactory:
return DeserializationMethodVisitor(
additional_properties,
aliaser,
coercer,
default_conversion,
fall_back_on_default,
).visit_with_conv(tp, conversion)
@overload
def deserialization_method(
type: Type[T],
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> DeserializationMethod[T]:
...
@overload
def deserialization_method(
type: AnyType,
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> DeserializationMethod:
...
def deserialization_method(
type: AnyType,
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> DeserializationMethod:
from apischema import settings
coercer: Optional[Coercer] = None
if callable(coerce):
coercer = coerce
elif opt_or(coerce, settings.deserialization.coerce):
coercer = settings.deserialization.coercer
return (
deserialization_method_factory(
type,
opt_or(additional_properties, settings.additional_properties),
opt_or(aliaser, settings.aliaser),
coercer,
conversion,
opt_or(default_conversion, settings.deserialization.default_conversion),
opt_or(fall_back_on_default, settings.deserialization.fall_back_on_default),
)
.merge(get_constraints(schema), ())
.method
)
@overload
def deserialize(
type: Type[T],
data: Any,
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> T:
...
@overload
def deserialize(
type: AnyType,
data: Any,
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> Any:
...
@deprecate_kwargs(
{
"coercion": "coerce",
"conversions": "conversion",
"default_fallback": "fall_back_on_default",
}
)
def deserialize(
type: AnyType,
data: Any,
*,
additional_properties: bool = None,
aliaser: Aliaser = None,
coerce: Coerce = None,
conversion: AnyConversion = None,
default_conversion: DefaultConversion = None,
fall_back_on_default: bool = None,
schema: Schema = None,
) -> Any:
return deserialization_method(
type,
additional_properties=additional_properties,
aliaser=aliaser,
coerce=coerce,
conversion=conversion,
default_conversion=default_conversion,
fall_back_on_default=fall_back_on_default,
schema=schema,
)(data)

View file

@ -0,0 +1,55 @@
from typing import Any, Callable, Dict, Type, TypeVar, Union
from apischema.json_schema.types import bad_type
from apischema.types import NoneType
T = TypeVar("T")
Coercer = Callable[[Type[T], Any], T]
_bool_pairs = (
("0", "1"),
("f", "t"),
("n", "y"),
("no", "yes"),
("false", "true"),
("off", "on"),
("ko", "ok"),
)
STR_TO_BOOL: Dict[str, bool] = {}
for false, true in _bool_pairs:
for s, value in ((false, False), (true, True)):
STR_TO_BOOL[s.lower()] = value
STR_NONE_VALUES = {""}
def coerce(cls: Type[T], data: Any) -> T:
if cls is NoneType:
if data is None or data in STR_NONE_VALUES:
return None # type: ignore
else:
raise bad_type(data, cls)
elif isinstance(data, cls):
return data
elif cls is bool:
if isinstance(data, str):
return STR_TO_BOOL[data.lower()] # type: ignore
elif isinstance(data, int):
return bool(data) # type: ignore
else:
raise bad_type(data, cls)
elif cls in (int, float):
try:
return cls(data) # type: ignore
except ValueError:
raise bad_type(data, cls)
elif cls is str:
if isinstance(data, (int, float)) and not isinstance(data, bool):
return str(data) # type: ignore
else:
raise bad_type(data, cls)
else:
raise bad_type(data, cls)
Coerce = Union[bool, Coercer]

View file

@ -0,0 +1,46 @@
from typing import Iterator, Mapping, Sequence, Type
from apischema.conversions.conversions import DefaultConversion
from apischema.conversions.visitor import DeserializationVisitor
from apischema.objects import ObjectField
from apischema.objects.visitor import DeserializationObjectVisitor
from apischema.types import AnyType
from apischema.utils import get_origin_or_type
from apischema.visitor import Unsupported
class InitFlattenedAliasVisitor(
DeserializationObjectVisitor[Iterator[str]], DeserializationVisitor[Iterator[str]]
):
def mapping(
self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType
) -> Iterator[str]:
yield from ()
def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> Iterator[str]:
for field in fields:
if field.flattened:
yield from get_deserialization_flattened_aliases(
get_origin_or_type(tp), field, self.default_conversion
)
elif not field.is_aggregate:
yield field.alias
def _visited_union(self, results: Sequence[Iterator[str]]) -> Iterator[str]:
if len(results) != 1:
raise NotImplementedError
return results[0]
def get_deserialization_flattened_aliases(
cls: Type, field: ObjectField, default_conversion: DefaultConversion
) -> Iterator[str]:
assert field.flattened
try:
yield from InitFlattenedAliasVisitor(default_conversion).visit_with_conv(
field.type, field.deserialization
)
except (NotImplementedError, Unsupported):
raise TypeError(
f"Flattened field {cls.__name__}.{field.name} must have an object type"
) from None

View file

@ -0,0 +1,145 @@
__all__ = ["fields_set", "is_set", "set_fields", "unset_fields", "with_fields_set"]
from dataclasses import ( # type: ignore
Field,
_FIELD,
_FIELDS,
_FIELD_INITVAR,
is_dataclass,
)
from functools import wraps
from inspect import signature
from typing import AbstractSet, Any, Collection, Set, Type, TypeVar, cast
from apischema.objects.fields import get_field_name
from apischema.utils import PREFIX
FIELDS_SET_ATTR = f"{PREFIX}fields_set"
_ALREADY_SET = f"{PREFIX}already_set"
Cls = TypeVar("Cls", bound=Type)
_fields_set_classes: Set[type] = set()
def support_fields_set(cls: type) -> bool:
return any(base in _fields_set_classes for base in cls.__mro__)
def with_fields_set(cls: Cls) -> Cls:
from apischema.metadata.keys import DEFAULT_AS_SET_METADATA
init_fields = set()
post_init_fields = set()
if is_dataclass(cls):
for field in getattr(cls, _FIELDS).values():
assert isinstance(field, Field)
if field._field_type == _FIELD_INITVAR: # type: ignore
init_fields.add(field.name)
if field._field_type == _FIELD and not field.init: # type: ignore
post_init_fields.add(field.name)
if field.metadata.get(DEFAULT_AS_SET_METADATA):
post_init_fields.add(field.name)
params = list(signature(cls.__init__).parameters)[1:]
old_new = cls.__new__
old_init = cls.__init__
old_setattr = cls.__setattr__
def new_new(*args, **kwargs):
if old_new is object.__new__:
obj = object.__new__(args[0])
else:
obj = old_new(*args, **kwargs)
# Initialize FIELD_SET_ATTR in order to prevent inherited class which override
# __init__ to raise in __setattr__
obj.__dict__[FIELDS_SET_ATTR] = set()
return obj
def new_init(self, *args, **kwargs):
prev_fields_set = self.__dict__.get(FIELDS_SET_ATTR, set()).copy()
self.__dict__[FIELDS_SET_ATTR] = set()
try:
old_init(self, *args, **kwargs)
except TypeError as err:
if str(err) == no_dataclass_init_error:
raise RuntimeError(dataclass_before_error) from None
else:
raise
arg_fields = {*params[: len(args)], *kwargs} - init_fields
self.__dict__[FIELDS_SET_ATTR] = prev_fields_set | arg_fields | post_init_fields
def new_setattr(self, attr, value):
try:
self.__dict__[FIELDS_SET_ATTR].add(attr)
except KeyError:
raise RuntimeError(dataclass_before_error) from None
old_setattr(self, attr, value)
for attr, old, new in [
("__new__", old_new, new_new),
("__init__", old_init, new_init),
("__setattr__", old_setattr, new_setattr),
]:
if hasattr(old, _ALREADY_SET):
continue
setattr(new, _ALREADY_SET, True)
setattr(cls, attr, wraps(old)(new)) # type: ignore
_fields_set_classes.add(cls)
return cls
no_dataclass_init_error = (
"object.__init__() takes exactly one argument (the instance to initialize)"
)
dataclass_before_error = (
f"{with_fields_set.__name__} must be put before dataclass decorator"
)
T = TypeVar("T")
def _field_names(fields: Collection) -> AbstractSet[str]:
result: Set[str] = set()
for field in fields:
result.add(get_field_name(field))
return result
def _fields_set(obj: Any) -> Set[str]:
try:
return getattr(obj, FIELDS_SET_ATTR)
except AttributeError:
raise TypeError(
f"Type {obj.__class__} is not decorated" f" with {with_fields_set.__name__}"
)
def set_fields(obj: T, *fields: Any, overwrite=False) -> T:
if overwrite:
_fields_set(obj).clear()
_fields_set(obj).update(map(get_field_name, fields))
return obj
def unset_fields(obj: T, *fields: Any) -> T:
_fields_set(obj).difference_update(map(get_field_name, fields))
return obj
# This could just be an alias with a specified type, but it's better handled by IDE
# like this
def fields_set(obj: Any) -> AbstractSet[str]:
return _fields_set(obj)
class FieldIsSet:
def __init__(self, obj: Any):
self.fields_set = fields_set(obj)
def __getattribute__(self, name: str) -> bool:
return name in object.__getattribute__(self, "fields_set")
def is_set(obj: T) -> T:
return cast(T, FieldIsSet(obj))

View file

@ -0,0 +1,23 @@
__all__ = [
"ID",
"Mutation",
"Query",
"Subscription",
"graphql_schema",
"interface",
"relay",
"resolver",
]
try:
from .schema import ID, Query, Mutation, Subscription, graphql_schema
from .interfaces import interface
from .resolvers import resolver
from . import relay
except ImportError:
raise
raise ImportError(
"GraphQL feature requires graphql-core library\n"
"Run `pip install apischema[graphql]` to install it"
)

View file

@ -0,0 +1,18 @@
from typing import Collection, Set, Type, TypeVar
_interfaces: Set[Type] = set()
Cls = TypeVar("Cls", bound=Type)
def interface(cls: Cls) -> Cls:
_interfaces.add(cls)
return cls
def is_interface(cls: Type) -> bool:
return cls in _interfaces
def get_interfaces(cls: Type) -> Collection[Type]:
return list(filter(is_interface, cls.__mro__[1:]))

Some files were not shown because too many files have changed in this diff Show more